<$button tooltip="View the top level of the Author Index">
{{$:/core/images/up-arrow}} Author Index
<$action-navigate $to="Author Index"/>
</$button>
\rules except wikilink
@@.cpredtext
!!! Press to Save {{$:/core/ui/Buttons/save-wiki}}
@@
!!! Sidebar Tabs
| <$fieldmangler tiddler="$:/core/ui/SideBar/More"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/More"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler> |[[$:/core/ui/SideBar/More]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/Tools"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/Tools"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/Tools]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/Recent"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/Recent"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/Recent]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/History"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/History"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/History]] |
!!! Sidebar Buttons
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki]] |
!!! Toolbar Buttons
| <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions" text="hide"/>Remove</$button> |[[$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions]] |
| <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit" text="hide"/>Remove</$button> |[[$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit]] |
!!! Causal PDF/MEDIA Display Configuration
| <$button><$action-setfield $tiddler="$:/causal/config/hidePDFandMEDIA" text="hide"/>Hide</$button> | <$button><$action-setfield $tiddler="$:/causal/config/hidePDFandMEDIA" text="show"/>Show</$button> |[[$:/causal/config/hidePDFandMEDIA]] |
<hr>
!!! Style Sheets
<<list-links "[tag[$:/tags/Stylesheet]]">>
<$button tooltip="View the session which holds this paper">
{{$:/core/images/up-arrow}} This Session
<$action-navigate $to={{!!current_session}}/>
</$button>
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 13.0.2, SVG Export Plug-In . SVG Version: 6.00 Build 14948) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="welcome" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"
width="100%" height="100%" viewBox="0 0 850.0 600.0" enable-background="new 0 0 850.0 600.0"
xml:space="preserve">
<style type="text/css">
<![CDATA[
@font-face{font-family:'Helvetica';src:url("data:;base64,\
T1RUTwADACAAAQAQQ0ZGICIxfToAAAQAAABH6kdQT1PhQxDEAABL7AAABT5jbWFwSItk+QAAADwA\
AAPEAAAAAQAAAAMAAAAMAAQDuAAAAOoAgAAGAGoAJgAnAF8AYAB+AKMApAClAKYApwCoAKkAqgCr\
AKwArgCvALAAsQCyALMAtAC1ALYAtwC4ALkAugC7ALwAvQC+AL8AwADCAMMAxADFAMYAxwDIAMsA\
zADPANAA0QDSANQA1QDWANcA2ADZANwA3QDeAN8A4ADiAOMA5ADlAOYA5wDoAOsA7ADvAPAA8QDy\
APQA9QD2APcA+AD5APwA/QD+AP8BMQFBAUIBUgFTAWABYQF4AX0BfgGSAsYCxwLZAtoC2wLcAt0g\
EyAUIBggGSAaIBwgHSAeICEgIiAmIDAgOiBEISIiEvsC//8AAAAgACcAKABgAGEAoQCkAKUApgCn\
AKgAqQCqAKsArACuAK8AsACxALIAswC0ALUAtgC3ALgAuQC6ALsAvAC9AL4AvwDAAMEAwwDEAMUA\
xgDHAMgAyQDMAM0A0ADRANIA0wDVANYA1wDYANkA2gDdAN4A3wDgAOEA4wDkAOUA5gDnAOgA6QDs\
AO0A8ADxAPIA8wD1APYA9wD4APkA+gD9AP4A/wExAUEBQgFSAVMBYAFhAXgBfQF+AZICxgLHAtgC\
2gLbAtwC3SATIBQgGCAZIBogHCAdIB4gICAiICYgMCA5IEQhIiIS+wH////hAEH/4QAc/+H/v//D\
/7//+v+//9sAAf/h/7//6//3/9H/8f/r//L/9v/J/+P/vf+7/83/3f/V/73/4v/e/+X/vP/u/+r/\
7f/p/+r/xP/q/+3/6f/t/+n/yv/p/+z/6P/q/+f/0f+1/+v/5//o/7//tv/r/+f/6v/m/+f/qv/n\
/+r/5v/q/+b/t//m/+n/5f/n/+T/qP+b/+j/5P/l/6T/5P9g/0v/UP88/0H/YP98/07/Sv9m/tP9\
uP3B/an9qv2s/aP9qeBc4HXgKd/v4FvgTeBa4FjgUOBS4FPgSuAy4B/fd96UBWwAAQAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEA\
BAIAAQEBCkhlbHZldGljYQABAQEc+BsB+BcE+zr7dfp8+jcFHG7ADfdjEbMcR8ISAAEBAZdDb3B5\
cmlnaHQgKGMpIDE5ODUsIDE5ODcsIDE5ODksIDE5OTAgQWRvYmUgU3lzdGVtcyBJbmNvcnBvcmF0\
ZWQuICBBbGwgcmlnaHRzIHJlc2VydmVkLkhlbHZldGljYSBpcyBhIHRyYWRlbWFyayBvZiBMaW5v\
dHlwZSBBRyBhbmQvb3IgaXRzIHN1YnNpZGlhcmllcy4AAADlAgABAAMABQAxAFMAwQFNAcoCWAKA\
ArcC7gMlA0oDawOAA5ADqgPwBBUEbwTwBSEFcQXfBg0GkwcABx0HSwdxB5MHuQgVCMMI/AlkCbMJ\
8AoYCjwKqwrVCuoLHQtSC2oLtAvlDDUMcwzmDU8NzQ3rDiUOTw6WDtQPAw8sD0oPYw+CD6kPvA/n\
EHAQ1BEbEXkR1xIPEogSwBLhExYTShNfE7kT9RQ+FJgU9BUlFZQVxhYFFi0WcRayFwIXJxeDF5YX\
7Rg2GGEY5hmAGZwZ4xpSGv4bcRuEG8wcEBw5HGEctBz/HRMdNx1rHYwduB3ZHf8eQh6HHsYe7x+h\
H/8gGCAyIFogpyC8IO8hAiEhIVghmiHBIfUiFSIrInAi/yMuI6MkHyRnJR8lNCVdJd8mfCb4Jxwn\
NSeAJ8soGyinKNcpHSmLKdEp7SonKn8rPSuLLCMsOCzBLPUtci4ELlUusy8KL1wvzDBOMOAxHzFq\
Ma4x7TIYMlAygDKrMyIziTP7NGg0zzVmNgE2UjauNwM3UzeYN+I4KzjIOXg6IjrAO4E8VTzfPVE9\
1T5SPsU+8T8pP1k/hEAFQGVA0EE0QZNCI0KvQwJDYkO7RA9EdUThRSV3DncOd4vx0Pi3i3cS5ewT\
sBOQ90/xFSol7AYTUHP3PxWj98oF94Eq+4EHo/vKBQ7E+GP3kxLR1NDUE8DR+GMV1PeTQgYToPci\
+5MV1PeTQgYO92bK9yLKAaf4iQP4GPdmFfIGlMoFJAaf9yIF8gaUygUkBqj3ZgVIBm77ZgX7EAao\
92YFSAZu+2YFJAaCTAXyBnj7IgUkBoJMBfIGbvtmBc0GqPdmBfcRBm77ZgXNBoL4MxV4+yIF+xEG\
n/ciBQ541Pjc1BKr40Hj9wu69wrgROMT2veR+C0VT5VQq9IayLXC2B66+94VwXoFtX6vYVMaSmBO\
Mh4T3Pdf+GcVhfc1+xqjTJAIy1xLB/sLMzT7APse9Gjxcx/7nQcT6iebcK+F9ggzBvtB7Ff3EIQe\
K7rrB933Hrj3PvcfOKP7HbMf94MHE9znep5dklUIDvjjhNH3XNGh0fdc0RKy0fdd0fcV0fdd0RPf\
E/+y+KIVK9hA6enX2OnpPtYuLT5BLB7RFsG3uMTCuGBTVGBdUVZcsskeE9/4JPv+FSvYQOnp19jp\
6T7WLi0+QSwe0RbBt7jEwrhgU1RgXVFWXLLJHuX4rxVUBvwf/WYFwQYO+AV812N2+RbXErfjieD3\
NeATtPg19yEVV2VVb0sbYzKo5di5rNi8HxOsqu8VcKZfvroao5XB1ai7fEtOV2ZlcR73e/uwFbDP\
nM2PvAg4BoJWh3x7X/sX9zAYwK/XvvAa01vX+wX7Ak8+OE2jar9LHhO0+xlAblYpGlSr+xn3R/TC\
tMO8HhN0zjkF9wYGDj/4Y/eTIfUSwPNYvhNQE6DA+PgVwAZpik9Xgx5cB9yeosjYGu0jBw6u+052\
+kB3Ac/pA/eB+XEV+wD7Lk77IvtEGvtPz/sR8/swHsYGO/cxUvcO9zka94S+6+D3NB4OrvtOdvpA\
dwH3P+kD6/tjFfcA9y7I9yL3RBr3T0f3ESP3MB5QBtv7McT7Dvs5GvuEWCs2+zQeDub4Q/ezi3cS\
svfK+03HE1AToLL44xX3AGlIL7xpzOnMLbytSOf3AK13wiJlBfcCT/sCByKxBQ73sqB292zUAfeU\
1AP3lPe1FfttQvdt+2zU92z3bNT7bPdsQgYOd/snufD1AeLzA+IWwAZcgFlhhx5dB5+O35v3HBrt\
IwcOrvd85QG394kD97X31hX7iTH3iQYOd4v1AeLzA/dT9RUjIfMGDneNdvmIdwF698wD97v5dRVJ\
BvuK/YgFzgYOeNr4yNoBsOn3uukD+D336hX7RF0hJiZd9fdE90S59fDwuSH7RB7pFvcmZPdr+177\
XmT7a/sm+yay+2v3Xvdesvdr9yYeDqB2+IfR9xp3Afed6QP3+/lTFUYGb/sHXID7BoMIRfc4/Ifp\
Bw6L3vix2hKl7jrj963pE9j3GPhXFbyOkfcQ9xkb8q9EUy9VbkBgHy5VBRPoLVRJPYb7Jgj4dd78\
EgaPpqDC68DUsxj0w97A9xIa9xcp2vsOHhPYP/s7dPt5Hw542ved1Pd22hKt5kHh4vdJiuZI6RPp\
E+33dPfYFZiZjJgbE/HH84D7CjpMVDH7F3vkzYgfMAaROqA62WIIea23fr8b90fV9wP3B+lau0eh\
H40HE+qxl7+25Rr2Otn7HPtNYfsTJB7hBryPk/H3ERv0pEZhM0lwRx8T7Xx7jIx8Hw6gdvc/1/hc\
dwH34OMD+J/3PxXXJPhcQgf71vxSBTX3x/s/4/c/BzH4HxWN+9P7dgYOeNf30dr3KeEB+DjpA/dF\
+O4V977h/AAGVvwN14cFtqy5o74b4tBUJjNSQjA8Tq/dgh8wBvsxnPcKZucb93Cu9z3Z9yX7Cdv7\
BkxgdnRtHw542vfJ2vdE2gGx5ve+5gP4i/iYFfcjgvsHtzob+2ZZ+3L7Li+XNbVJHz292HPBG+61\
prSwH7u/nr/TGvc8+xrCNStaW2V1HomNBdWo9073HtetXU2WHvuo+8AV3rrR7PcBrDVNLFVJNyRe\
3NYeDqB2+O7hAbD4egOw+O4V+BcG+xv7MfsU+5Bz+1UI7Aa+9873PPd00dQI2Px6Bw542vei1/du\
2hKx6UXm94zmRukT8hPs+ID4nhXdTu77MPs8XvsMQkmuW8d0HhPyQW1eVDUaaYs/01IeX8PVgbIb\
9zrT9wH3BcB32SyzHxPsqpvGqvAaE/L7a/tUFdvNYSz7Aj50S/sOb9/DzLnM7h8T7IrXFUtQtM64\
oszp8qdNYEtaWj8fDnjU90na98raErXmOOP3pfIT9Peb938VzNGpwaUfjYkF+0p1WDorGxPsQ2G3\
0oYfMwZHt/sM9zn3hpj31db3BnX3aPt0HhP0+yg9+wX7FB/7IOY09xYemNoVUDec9xvpwcvd8LRF\
O/sfMnZdHw53i/X3xPUS4vMToPdT9RUjIfMGE2D4mAQjIfMGDnf7J7nw9ffE9RLi8xfiFsAGXIBZ\
YYceXQefjt+b9xwa7SMHEzDz98QV9SMhBw73spb4eAG7+HwD+KzmFfwa9zb4Gvc2BdsH/Hz7YwVF\
B/h8+2MFDvey9wfU9xXUAbL4jgP4tfgaFfyOQviOBvsVBPyOQviOBg73spb4eAG7+HwDu/gzFfga\
+zb8Gvs2BTsH+Hz3YwXRB/x892MFDovx+LnXEsPj2Ows4+LpE/T3JPiBFbaQ9wf3FOyjQ2FSfX1J\
Rx4T7EVDcm77BhrjBonupaLKyAjPzKWtzRr3FCfP+wRWV3xsYx5CU4hLiEcIE5D3mvwbFSol7AYO\
+WF4yejT97jT8skB9yfU1933T9b3N8wD+Pf4LBU7S/sMNllps8TeyfcE5LexYFoequEVw3FioVsb\
+x8v+yH7GzTIQuG6t6qqqB+NjAVgjad3rRvk9yr3APdJ90r7M/cW+037Yvs/+zv7Zftp9zz7O/dq\
9xT3C873AcwfSAZFUi5fLhv7Q/sc9xv3RfdF9xL3I/dG9yv3FCn7NCBB+wFHe4aVnqa/9zOx9yAf\
RwYO+AWgdvdl4QGZ+RQD+HH3ZRXS+2UF9Qb7mPliBfsCBvui/WIF7wbW92UFqeEV9wf30AWNBvX7\
0AUO+AWL3veQ2vdx3hLV7PfZ7EzsE/T3P/kPFfdEBhP49wC0ZUgzUm8uH/tDBir8MhX3yQYT9Pcz\
4Oz3B/cPOKxcnh8T+NqwnM26Gu1G3vsaHvvQBuz8ExX3UwYT9NrkdyY7WFgmH/tjBg74PHjh+Nzh\
AbfvA/k2+IoV9wx9K/cH+00b+2X7Evst+3H7qPc0Ifc3wfdwl/ejsx8tBvs7avsBbUwb+0ZP9xb3\
PfdR9Ov3FtD0dPsSph8O+DyL3vi83gHc7Pgg7wPcFvezBveA0fdk9zX3Y/sH9yL7Tx/7twbsOBX3\
SQb3GN4w+0n7STko+xIf+1AGDvgFi+H3hOH3cOEB4ewD+PzhFfxF94T4HeH8Hfdw+D7h/J/9Yvim\
Bg73zaB299rh93DhAeHsA/dL99oV9/Th+/T3cPgk4fyF/WLsBg74dHjeYHb3wd73n+ESu+/4ZeYT\
vPgZ98EV93QGPntLTFkeYVZRh2cb+1Nb9yr3JvdQ9fL3Fdr0cfsRqh/pBvc1dvsp1/saG/t+IftZ\
+0M/jvsP9wIjH2ewx173Dxvt5bfPvB8TfKIuBcr4FPvPBg74PKB29+Ph9713Adjs+AvsA/i5+DkV\
/Av3vSr9Yuz34/gL++Ps+WIqBg53oHb5YncB5uwD91D5YhUq/WLsBg73Xnje+SJ3AZzm93PsA5z3\
exVEB0iw+wT3O+z3Arj3PB74oCr8pgdFclUxSGKq4R69Bw74BaB2+WJ3AdfsA/dB95AV9wX3APeP\
+/wF9xIG+8f4QPe/97YF+xoG+/D78AX38Cr9YuwHDovh+Qx3AdfsA/dB+WIVKv1i+GHh/AAGDvir\
i/cD+ID3BwHU6fiI6QP5jRb5YvseB/tg/PMFiQb7YfjzBfsf/WLp+DwGoInmzRqNBvde/O4F6wb3\
XvjvBY0GSIkwdhr8PAcO+DyL9x74TvceAdfp+BLpA/i89x4ViQb7+/jYBfsH/WLp+NiNBvgB/NgF\
9wH5Yi0GDvh0eOH43OEBsu/4iO8D+RP3+xX7PS37D/sw+zAt9w/3Pfc96fcP9zD3MOn7D/s9Hu8W\
9y8593P7oPugOftz+y/7L937c/eg96Dd93P3Lx4O+AWgdvfD3veN3gHh7Pfn7wP3S/kPFfdZBtvJ\
bjAqUGs0H/tVBjgE93YG9zDE9wXv9wg74fsZH/vX/WLsBg74dHjhSnb5MuESsu/4iO8TuPg49ygV\
1lEFfG1ng2Mb+zAt9w/3Pfc96fcP9zD3MOn7D/s9L288WFgfMNIFE3j3nPtiFSnXBdfYp/cF5hr3\
Lzn3c/ug+6A5+3P7Lx4TuPsv3ftz96DXyZ2puh4TePcCNgUO+DyLm/e43veI3hLj7Pf/7DLvE/T3\
TffIFfd/BhP49wiXRlBwjvsBmV8f9wubBm2chpqKs4X3FhiH72KdYpwIE/S5psGy9Rr3LPsKtPsK\
Hvvg/WLsBvkPBPeGBrvcgvsDIUB5RB/7dQYO+AV43vji3hK85kjp9+rmQekT1BPY+Oz4kBXah3L3\
KvuPG/tCQvsC+wT7EfVquoAf9ydpBRPk9wpvrnNAGiT7CnlFLyGx9xYeMAZIl0/IVB5uq79j9xkb\
9w/3Mrr3PvcGRcMuoR8T2PthugVhlWCfyBr3AfKcyNznaPsDkB4O982gdvkM4QH3lewDmfkMFfeH\
/Qzs+Qz3h+H82wYO+Dx44fkfdwHa7PgH7AP4t/liFfxaB/swJ2IyNCy29zAe+Fgq/HMH+xre+xD3\
VfdZ5/cQ9yce+GYHDvgFi/X4+HcBn/kHA/fj9RWJBvth+PgF+wAG95r9YgXyBvea+WIF+wEGDvka\
i/ca+GH3DwGb+iQD+Gv45xWNBvc4/OcF8Qb3UfliBSAG+xr83AWJBvs1+NwFIQb7NfzcBYkG+xr4\
3AUiBvdP/WIF8QYO+AWgdvlidwGe+QkD+CD4BBX3hffyBfsIBvtK+6f7SvenBfsOBveF+/L7lfwE\
BfcJBvdb97v3Vfu7BfcMBg74BaB2+WJ3Afex7AP4Eve1Ffej+EEF+wYG+2L77fti9+0F+wUG96P8\
QQX7tewHDvfNi+H4tuEBovjJA/jg4RX8Tgb4Tvi4Bd/8qDX4Kwf8TPy7BTr4yQcOd/tY0vmc0gHK\
2gP3IvkfFfcA0vtP/ir3T9L7AAYOd412+Yh3AXr3zAP3eHgVzgb7ivmIBUkGDnf7WNL5nNIB9xza\
A/cc+xEV+wBE90/6KvtPRPcABg73P/j02wF9+IUD+Hf3nBX7aPg8BUMG+2n8PAXZBvc/9+z3P/vs\
BQ77Eb0Bi/jAA/jA+xEVvfzAWQcOP/hq9SH3k4t3Esy+WPMTsBNI9z341BVWBq2Mx7+THroHOnh0\
Tj4aKfMHDnzUR9H4KdQSr+ZK3PeF4xO0E6z3I/gAFbORmcjvG96zbVRXcoN2iR/7JXkFE7T7Jnl+\
JV8aMc9O6/HAu7CtHhNsY46XY9kbn5iRj5ofyweJgYCJgxt5gJShH/fBB/ca+yyXYfsVOFr7EYce\
99b7SxUTtEU7VjlJbq3EzdCYtpEe9wGZoZOZlggOfNdOyPgi2vdIdxLF4/es5hO8E3z3IRYTvMyN\
B26dsFjkG/dGv/c79xH3Jzz3BvsfH06KV3JlVAiJ95kzBhN8/WIH+AT3mxUTvPsHXjQrUzSw9x/g\
mfcd9xMeE3z3HJL7FUgfDvdefNf4ItoBqekD+HH37xXmhFfv+x0b+0E9+xX7Nvsr4vsD9yf3Lb73\
Cd6UHzYGO3xYX1Ab+w119wPl6K7z9wHVsmFFlh8OfNdjdvhf2vdIdxKu5vex4xO8+If5YhUz+5oG\
iYQFqXZjxiwb+x88+wb7J/sRv/s790a+x5vSsR+NBhN8Q94HE7z8CfebFc6S9xX3HPcTmfsdNvsf\
NGZTK17i9wceDnzX90DR9zDaErPp97PjN+YT9Pg59zgVZIhaSzgbJljK9wEf+BIG901B9wz7K/tB\
PfsV+zb7K+L7A/cnHhP4362fm6Mfzbej1Y6kCPwL9x8V3MvW3B4T9PatQDqQHw53oHb4VtT3EtoB\
4eMD95r4VhXUM8wHtZmesZaZioqWHtoHjH1+jH4bNVlnPB8xQ0LT/Fbj+FYHDvtw1PcY1/giy0va\
ErPm96zjE9z3F/ebFc6S9xX3HPcTmfsdNvsfNGZTK17i9wceE+z4BPeYFTgGE9xBiQepdmPGLBv7\
Hzz7Bvsn+xG/+zv3RtG5rLCiH4+NdQdNjvsO+yx2Ooveex4zBvsfnPcNeskb94aM91DWHw6gdvhf\
2vdIdwHM4/eO4wP4f/f4FfcpJaw6N19XanIeifedM/1i4/exBvcY1bXT359dRB776uMHDj+gdvif\
d/cH7xLO4xf3L/ifFTP8n+MGEzD4/gTvMycHDj/7Ztr5Inf3B+8SzuMX9y/4nxUz/OIGUH6GVx55\
PwaJmJeKlxv3Fo/juB8TMPmvBDMn4wYO916gdvifd/drdwHO4AP3tffcFfdc91cF+wQG+3X7cQX4\
NDb9YuD3WgfUzfc3+5wF9wUGDj+gdvlidwHO4wP3L/liFTP9YuMGDviroHb4X8tL2hLM4/dw4/dw\
4xQcE7zMFuP3sQb3IuOrvcycVl8e+/7j99MH0bzR1dahWkMe++bj9/4H9ychqFM6aGdaXx6nfG3E\
LRstXk5veB+JBhPc1TgHDqB2+F/LS9oSzOP3juMTuPh/9/gV9yklrDoxWk5veB6JBhPY1Tj8n+P3\
sQcTuPci46u94aVdMB771uMHDn3X+CTXAa7m98TmA673mhX7G9f7Ifc79zvX9yH3G/cbP/ch+zv7\
Oz/7IfsbHuYW0aX3FvcS9xKl+xZFRXH7FvsS+xJx9xbRHg77Tnb3VNf4IstL2hLF4/es5hPc9yL3\
gRXgmfcd9xP3HJL7FUj7B140K1M0sPcfHjf8UBXj95qNBmaiuWrRG/dGv/c79xH3Jzz3BvsfLGNQ\
bXYfiQYT7NU4Bw77Tnb3VNf4IstL2hKu5ves4xPcE+z4gvifFTgGE9xBiQepdmPGLBv7Hzz7Bvsn\
+xG/+zv3RtG5rLCiH437muMGN/hQFfsfNGZTK17i9wfOkvcV9xz3E5n7HTYeDq6gdvhQ2jzpEtjj\
E7D3OffEFde/y90erOYGjYSGjIAbSV1iTmgfiQYT0OI4/J/jBw73XnzX+CXXEqvgR+D3eOBE4xPU\
E9j4U/gKFaV+9x77TyQsV/sHQ7tm1Hke8XIFE+TWeKh9YRpRUnJM+xB/zbSIHjYGTI6a+wz3Yxv3\
CuTM9wDSZbP7AaYfE9gzoQVHnHCVthrM2JSm9wOXVGqMHg53hNf4EdQB4OMD95X4VhXUN/cmM/sm\
RELS++0HTJ5a4pSkj46rHtBsB3loi7Qf9+gHDnzXTsj4YncSz+P3ieMTuBN4+H0W+J8z+7QHE7g/\
ain7A1JfqOEe9+8z/AwH+xHoZtjfuKvIsB6NiQUTeD8HDvdei+z4PncBk/h4A/eM7BWJBvsc+D4F\
JQb3U/yfBekG91v4nwUrBg74PIv3BPfOdvcKdwGZ+UsD9/r4KRWNBvX8KQXmBvcs+J8FMwYh/C8F\
iQYl+C8FKQYm/C8FiQYn+C8FKwb3K/yfBeYGDvdeoHb3tnb3kncBlvhzA/fG96EV90X3kgX7AAb7\
DftM+w33TAX7BQb3Q/uS+0z7oQX3Awb3EvdZ9xb7WQX3BAYO9177atpZdvltdxKW+HITsPeQ7BWJ\
Bvsf+D4FJwb3VvytBfsAYYZ+XRtzeZCPgB8TcDsHE7CHmZWHmBv3BZSX92jeH873P8f3P8j3Pwgq\
Bg73XovU+A3UAar4SgP4YPhXFdP8KUL3twf7z/wRBUb4StT71QcOr/tYzPgBx/f/zBK105TaE6gT\
+LX3fhUT6LmuVEAf+z8HOb5cvh7OzF8GaICxtx/3PQeF802sd5EIjQefkMmwke0I9z4HuJawrh63\
zEgGWFhcOR/7QAdCaFVdHg5ljXb5iHcB6dQD6XgV1PmIQgYOr/tYzPgBx/f/zAH3GNoD97j3uhVd\
aMHUH/dAB91YulgeSEq3Bq6WZl4f+z4HkSnJZp+GCIkHd4VNaoUjCPs9B1+AZWgeX0rOBr6+ut0f\
9z8H1q7CuR4O97L3SPcmaHYSyPhiE2AToPh799oVaHRyZmMbemiYolkfnGVkmmkbSGdfYXUfr1MF\
rpujsLcbubB6easfeayrfLAbwbSxu6kfDq77V/i30PES9wrsE2D3Cvg5FezxKgYToKP7PxVz+8oF\
+4Hs94EHc/fKBQ5411t2q3b4Y9eFdxK+4xOU92jZFWCmbc/TGvcszszgmZKLiJMe91H7BhUTjH/3\
Djq5Z5Wk7xhkBnEwBRM0j4F+jXcb+y8wIvtQ+xXIO9hmH2n7BgW0BhNUqvIFE5SHpJ6Imxv3OMD3\
FdSUHzYGInNQdk4beYSMjoEf9wL4Frp5mFiTahkOe95SdrPU92vK95rXEsbpkd73U+MTnfdg+BAV\
asd5s7Ias6ze9wLdwk84HoLjmgfzS/cB+yv7VVX7DThYpF2dZh5GTAYTX/YGknuXbHIaaHk4I0Ye\
vEYFEz2isLWcsRsTndXWXscbu7Cbq7UfYs0FfHxpe3AbEz1hS65NGxOfWl57dmYfjQeJiQXew7HU\
xBqnh56Gnh73HsoGDvsXjXb5ZncB+zr4hwP34flTFVIG/E79ZgXEBg6gdvdNxODE98R3Afd+4wO1\
99sV91Q2+1RS91T7TeP3TfdVxPtV4PdVxPsvBvdV98QFJwb7Q/u7+0P3uwUnBvdV+8QF+y4GDvtj\
2vib0fdc13yaEoD4lPwX99wT6BPY+In5ZhUT5JN2e5JiGzRUOkR7H2/7EAX7D0X3AQZL++0F+wB3\
eUlJG3Z7kpJ9H3w6BYSal4auG+vSz/dfsB/H99sF9xPR+wYGn/IFsZOfxsAbnpuDgqEfDvtT1/mc\
1xK23mDjQOP3UeNr3hPk+GD4vhX3KCOuOx4TzPsXWitBZJhzrWYfE+RRaWFkNhptllPBZx73Zvsg\
Ba50n3VpGkNHgGYeE9QxhNqsHzMG+xngVPcE4OvF8r12qmysHhPKvqu/t98a3Vu1Tq8e+ybhBV6m\
b6WwGsfAoLIeE+Toj0BrjR8T4oP7fRWtdqdsXxpYbHJlch77UfcOBX6Ta6PAGrqnqbKfHg7x0ff4\
0QGs1Pfs1AP4dPjvFVJTBaxlVZ1WG1ZVeWplH1LDW1vCVQVpZHtTVBpUm1WtYR5VU7hewsUFZrPC\
ecEbwcKdsLMfwlG4uFXDBa21m8HCGsJ7w2myHsLBBfw6+2AV6dje6urYOC0rPzgrKz/e6x4OIPhj\
95MBxtQDxvhjFdT3k0IGDq74avUh95OLdxKxvljzyL5Y8xO0E0L3x/jUFVYGrYzHv5Meugc6eHRO\
Phop8wcTSPs59RVWBq2Mx7+THroHOnh0Tj4aKfMHDvcA9+b7KHYS7Pcxu/cxE3AToPeS91gVItz0\
3AXjB/sx+w8FLwf3MfsPBROQ92HjFSLc9NwF4wf7MfsPBS8H9zH7DwUOrvcA9+b7KHYS4/cxE2AT\
oPeJ91gVItz03AXjB/sx+w8FLwf3MfsPBQ6u9wD35vsodhLj9zETYBOg4/cAFfcx9w8F5wf7MfcP\
BTMH9DoiOgUO916gdvhW1OrvRtoS4eP3QOMT7Pea+FYV1DMHE9zMB7WZnrGWmYqKlh7aB4x9fox+\
GzVZZzwfMUNC0/xW4/hWB/eY1BUz/J/jBhPs+P4E7zMnBw73XqB2+FbU9xLQRtoS4eP3PuMT3Pea\
+FYV1DPMB7WZnrGWmYqKlh7aB4x9fox+GzVZZzwfMUNC0/xW4/hWBxPs95b3oBUz/WLjBg73hNQB\
i/jAA/jA980V/MBC+MAGDvhD1/dndwH3fOkD99r4QxX3UNf7UPdnLftn+1E/91H84ukGDrbX99TX\
9193Afd86QP32vhLFfdQ1/tQ918t+1/7UT/3UfvU+1E/91H7Xun3XvdQ1/tQBg5391L3EQHY9xED\
9yD3zxVqbXFmaqdura+mqauvbKZsHw73g/kmxwH3fNT3C9QD93z7QRXU+dP3C/3T1PoP+4wG+yAw\
UfsYI+E79xQfDr/3XvfPAZ33zwP34ff7FeJE0jQzRkQ0M9FG4uLS0eIeDj/7KfeTIfUSwPNYvhNQ\
E6DAFsAGaYpPV4MeXAfcnqLI2BrtIwcOrvsp95Mh9RKl81i+yPNYvhNUE6ClFsAGaYpPV4MeXAfc\
nqLI2BrtIwcTiPc5IRXABmmKT1eDHlwH3J6iyNga7SMHDq74Y/eTIfUSpfNYvsjzWL4TVBOgpfj4\
FcAGaYpPV4MeXAfcnqLI2BrtIwcTiPc5IRXABmmKT1eDHlwH3J6iyNga7SMHDvcA9+b7KHYS7Pcx\
u/cxE3AToPeS93sV5wf7MfcPBTMH9DoiOgUzBxOQ9/73DxXnB/sx9w8FMwf0OiI6BTMHDvlSi/UB\
9wfz93nz93nzFHD3b/UVIyHzBvfh9RUjIfMG9+H1FSMh8wYO+VKEyvdOytnK907KEpLK907Ki8r3\
Tsq+yvdOyhP9wBM8AJL4rxUz0Ufh4tDR4eFF0TU1RUY0HsoWvbO2wL61YldYYWFYVmO1vh4TwwD3\
jfwaFTPRR+Hi0NHh4UXRNTVFRjQeyha9s7bAvrViV1hhYVhWY7W+HhPAwPfAFjPRR+Hi0NHh4UXR\
NTVFRjQeyha9s7bAvrViV1hhYVhWY7W+HhMRAPtd+L4VVAb8If1mBcIGDvfN+13X+LjxEubp2+wx\
49rjE/T4S6wVYIb7B/sUKnPTtcSZmc3PHhPs0dOkqPcGGjMGjShxdExOCEdKcWlJGvsU70f3BMC/\
mqqzHtTDjsuOzwgTUPua+BoV7PEqBg6u+OX3IQGZ91kD9yT45RXOBjL3IQX7AAYOrvjl9yEB9w73\
WQP30/lyFfsABjL7IQXOBg6u+OX3IVu7EqD3txOgE2D3OvlCFROgzS4F2wYo9yEFLgYo+yEF2gYO\
rvjy9wiLdxKH9+kTYBOg97P5ZhV4h4VwYxuDd5CTeB91lAWTdnaScRtIbltQgx+7BqWTl52mG5eZ\
iH+uH6WCBYKmmYedG8SvvMeSHw6u+QfEAZX3zQP31/lAFfvNUvfNBg6u+OfE2ncBmPfIA/el+W8V\
bIJ9WzsbYHGZnHwffJuGn4eXCFoGaI+YJvcYG+HJse2WHw6u+PDxAfcN5gP3aPlWFTAl5gYOrvjw\
8RKz5tLmE8D3F/lWFTAl5gYToPc28RUwJeYGDq740LPzswHWs/OzA/c7+YgVVmRkVlayZMDAsrLA\
wGSyVh9XLxWooqKoqKJ0bm50dG5udKKoHg6u+3W63LIB90vXA9o1FZx4BZGglI2UG5mtg2xmZoZ4\
cnyQlXIfemMFgKWsgLAb1bezv6t4tkp+hImJgB+5yQVcBg6u+OX3IQGq+A4D93n5chX7AQYy+yEF\
zgb3y/chFfsBBjL7IQXOBg6u+3W691d2AdTXA/ei+zcVgXJ8hnIbeGaQuL2dqN24H49IBy1xfEBr\
Gle3Y9W1s5uRmR4Orvjl9yEBoPe3A+/5chU8Bu77IQXoBu73IQU7BkkuBQ75UveE1AGL+nwD+nz3\
zRX+fEL6fAYO+VKL4fcP4arh93DhAfhd7AP3VPdlFfed+2X4guH8IfeE9/jh+/j3cPga4f0YBvu4\
/WIF9Ab37Pe7Fft6BvcZ9+UF7AYO0/fEu8C6XL/owN2/EqPKXcH3LcgTqRPb9+74WhWKhISKhht/\
hZCZH/dGB9wnknA3VW1AiB7BBpuNkbDQG8GheXZsf4Z/iR8zgQUTvSR/gk9tGlW4ZsnNrqihoR5v\
jZN0vRuYlI6NlR/7XboVYHyfqau0k6eOH9KUmpCRkQhkB2GOV2tVG/sCIhVb97y7Bw6L4fkMdwHX\
7AP3t/icFfsKSAX3nSr70AcrVAVRB+vCBfvs+GHh/AD3yQf3Cs4FDvh0eOH43OEBsu/4iO8D+OL4\
sxWrW5xLQxr7PS37D/swQ1GluGEecKwVar55y9Qa9z3p9w/3MNTHcF21HvdE9xUVbKk6MgXAVj2v\
+wAb+6A5+3P7LzmiJsk+Hzgxq2/b4gVXwdho9Bv3oN33c/cv3HXuT9gfDvlSeN5L4feE4fdwmH7h\
S94Sr+/4EewTa/iZ93oVE6f7DSxeSvsEWbXeaR56toO8xRrsmvdu91Xw000nHhNzkfctFYkGE6ek\
fF/O+wsb+49T+4b7IPtr8/sz91H3DLG+tqwfjQYTa0D4T+H78/eE9+Hh++H3cPfx4fxMBw7O98S7\
wL/3eL8BpM73Ss4D9+n4zxX3FjevQUE3Z/sW+xbfZ9XV36/3Fh5IFmJ/Qjw8f9S0tJfU2tqXQmIe\
+4P7bxVb97y7Bw7443zX90DR9zDaQtQSr+ZK3PeF3Pez4zfmE9qA+GX3wxUT6oDcy9bc9q1AOpAe\
h/sfFWSIWks4GyZYyvcBH/gSBhPWgPdNQfcM+ysiY11vbh6yZmquJxv7FTha+xGHH9wGs5GZyO8b\
3rNtVFdyg3aJH/sleQUT2wD7Jnl+JV8aMc9O6+nPq9i+HmefukL3ERvfrZ+box/Nt6PVjqQI/Fyc\
FUU7VjlJbq3EzdCYtpEe9wGZoZOZlggOd6B2+J93AerjA/dL+J8VM/yf4wYOP6B2+WJ3Ac7jA/eG\
+EcVxQc0WQX3pzP71Qc0WQVRB+K9Bfvn4/gVBw73zX3XVHb4UXa/13ieEq7m98TmE7YTrvit+JsV\
E3ZspUtDBbNkVaREG/s7P/sh+xtInkexWB9LQwUTrqpxx88FZrHAdM8b9zvX9yH3G8t6zGe9H/vR\
+74VeLOFuawaE7bRpfcW9xK+rnVtoh6gZhWcZpBgbBpFcfsW+xJbaZ6mdR4O+Rp810DX9z/R9zPX\
Eq7m98Tk97jmE374m/fDFdzM1tz2rUA6kB6H+x8VE75kiFpLOBsmV8r3AR/4Ewb3TUH3DPsrMExs\
QFoeYNJFrTiMCPs7P/sh+xsfE377G9f7Ifc7HrOK75O76AgTvkO80m7VG9+tn5ujH823o9WOpAgT\
fv2V7RXRpfcW9xL3EqX7FkVFcfsW+xL7EnH3FtEeDvfNfNpD00vL97bU93XXEs7j97rjUuYTnRM+\
zhbj+JIG4KjE9wXwvltLUGhV+wyDhYyMgh48B46XloybGxOd8c9oIiZOWvsBfXyMjXwfE11ABxOd\
iJmVh6gb9yP3Adf3RcZh3kGfHxOeoprGr+wa9wEw0vso+yo6OvsHHg6u98J297y523cB9yvSA/cr\
+NUV+7zS+DpTB3lGbIVBhghdBw73svfR1AH4bNQD+Gz3ABXU9678jkL4RQYO+05291TXTsj4YncS\
z+P3ieMT3BO8+H0W+J8z+7QHE9w/ain7A1JfqOEe9+8z/AwH+/bj93aNB22isIefG9+4q8iwH42J\
BRO8PwcO+VL5JscB90HU91vU98zUA/jB+WIV+wT8MNT3440G9xX74wW9BvcV9+MFjfvj1Pgw+wQG\
+wj7wwX7wvfDFfvbT/cT+/TU9/T3EwYO+DyL3veH4fdz3gHc7Pgg7wP4BvgwFftU93P3SQb3GN4w\
+0n7STko+xIf+1D3h/dUBvwGFtz72vezBve3mvfIyPdj+wf3IvtPH/u3+8Y6Bg74rIvIUHb3y3aq\
dt7F9y+5XfcSEvcr0veLx/dC0RObwPj9+VMVUgYTW8D8Tv1mBcQGc/joFfu80vg6UwcTncB5RmyF\
QYYIE6vAXQf4evvGFaSNj8/fG86fZ22IH1RqfFdvHk9qUWthXIczGffTyPuIBpminqG6pLiiGM2s\
wKzXGtlLuzla+wF9+x0eDveyi9T3W9QB95TUA/eU9+0V+21C9237NdT3Nfds1Pts9zVCBvtt/I4V\
+I7U/I4GDvgFoHb3TN73jd73C3cB4ez35+8D90v4mBX3WQbbyW4wKlBrNB/7VQYq+58V7PdM93YG\
9zDE9wXv9wg74fsZH/t29wsqBg74rI12s3bpxfcqdsXe9zK2YPcPEvdJz/c+yfckyxN7wPg36RX3\
Yi3L6c7FSPeiSwb7YvugBfdiiRX7JAb3IvdPBY0Gp/gAFVIGE7vA/E79ZgXEBm346xX7v8/4OlYH\
E33AeUZshUGGCBN7wGAHDveyePcc7tTu9xwB93T3HBTg+LX3bBXU/I5CB/dN94QVZqtsr7Cqra2w\
bKpmZ2ttZR78KwRmq2yvsKqtrbBsqmZna21lHg5ljXb4c/epAenUA+l4FdT3ukIG900E1PepQgYO\
8fgvtvditgHBu/dYuwP3XPkoFcC4WVZWXllWVl69wMC4vcAf9yYkFd5Myjg4TEw4OMpM3t7Kyt4e\
DvtOdvdU1/gi2vdIdwHF4/es5gP3IveBFeCZ9x33E/cckvsVSPsHXjQrUzSw9x8eN/xQFeP3mo0G\
ZqK5atEb90a/9zv3EfcnPPcG+x8sZVBveR+J958zBg74rI12s3bpxfcKxZbevL56nPcPxRK4z1LI\
v/cMgNFW0fcAyfckyxN1Tvht6RX3Yi3L6c7FSAYTvY73oksH+2L7oAX3YPdNFY37T/skBvdF+LsV\
Ugb8Tv1mBcQG+3X4QRUTc46PW5lavnMIgKGng60b9wu7ztDDbKhelx+NBxNzVqSSraXBGstWujP7\
DGw/TR7IBq6Nl7jOG9CWaHYfE3VWVmV9Xx4TdW5wWKIGE3WOssuKRFpldVFAgrquiR8OrvetyPfD\
xQGex/dC0QPa+LwVpI2Pz98bzp9nbYgfVGp8V28eT2pRa2FchzMZ99PI+4gGmaKeobqkuKIYzazA\
rNca2Uu7OVr7AX37HR4O+Et4yveyw/cbw+zKAX3X9zbN903Q9xjXA/gGeBX3YvdE9zT3b/dt+0T3\
NPti+2T7RPs0+237b/dE+zT3ZB/7yPgPFfdH9xv3G/dB9z/3G/sb+0f7Svsb+xr7P/tB+xv3GvdK\
Hvd4bRXXBvcH+1EF1Qb7EPdUBcuTvK3ZGuFYsSQe+zr8SM0G94kE9xvlB7m8gVVIWYdTHw73svds\
1AGy+I4D+LX3bBXU/I5CBw581/gW1/dCd6N3w3cSrub3xOcTzvcS95IVwqP3HvcU9xSj+x5UVHP7\
HvsU+xRz9x7CHhPu9+v4TxUTzlyzIVhwoGycapgZE95BZ6p+qnmmdhn7ClK1YfcQybVpuViiUhmJ\
iQW+XFaPahv7KTv7HfsY+xjX+x33Q/L3GdT3hB8T7vcUUfcSJekeDveyoHYBsviOA/eF95EV+177\
Xr5Y9173Xfde+12+vvtd9173XfdeWL77Xvtd+133XVdYBQ6u96LF9yO+epz3D8USkM9SyL/3DIDR\
VtET1IATuICQ+C4Vj1uZWr5zCIChp4OtG/cLu87Qw2yoXpcfjQcTtQCkkq2lwRrLVroz+wxsP00e\
yAaujZe4zhvQlmh2HxPVAFZlfV8eE9aAcFiiBhPYgLLLikRaZXVRQIK6rokfDvhLeMrfzffgzd3K\
AX3X7df4TNcD+AZ4Ffdi90T3NPdv9237RPc0+2L7ZPtE+zT7bftv90T7NPdkH/vI+A8V90f3G/cb\
90H3P/cb+xv7R/tK+xv7Gvs/+0H7G/ca90oe+EtEFVOCXWRMGzBT2eXovdLpyrlnVJcfzgbxfTzC\
Kxv7HDgp+xn7F+Il9xzn1snunh8O+AWgdvdl4fiB9yESmfkU/AH3WRPQ+HH3ZRXS+2UF9Qb7mPli\
BfsCBvui/WIF7wbW92UFqeEV9wf30AWNBvX70AUTKLf5DhX7AAYy+yEFzgYO+AWgdvdl4fiB9yFb\
uxKZ+RT8Zve3E8j4cfdlFdL7ZQX1BvuY+WIF+wIG+6L9YgXvBtb3ZQWp4RX3B/fQBY0G9fvQBRMU\
+wH43hUTJM0uBdsGKPchBS4GKPshBdoGDvgFoHb3ZeH4jPESmfkU/FPm0uYT0Phx92UV0vtlBfUG\
+5j5YgX7Agb7ov1iBe8G1vdlBanhFfcH99AFjQb1+9AFEyj7JPjyFTAl5gYTJPc28RUwJeYGDvgF\
oHb3ZeH4gfchEpn5FPxt91kT0Phx92UV0vtlBfUG+5j5YgX7Agb7ov1iBe8G1vdlBanhFfcH99AF\
jQb1+9AFEyj7F/iBFc4GMvchBfsABg74BaB292Xh+Fiz87MSmfkU/DCz87MTyPhx92UV0vtlBfUG\
+5j5YgX7Agb7ov1iBe8G1vdlBanhFfcH99AFjQb1+9AFEzb7APkQFVZkZFZWsmTAwLKywMBkslYf\
Vy8VqKKiqKiidG5udHRubnSiqB4O+AWgdvdl4fiO9wiLdxKZ+RT8f/fpE8j4cfdlFdL7ZQX1BvuY\
+WIF+wIG+6L9YgXvBtb3ZQWp4RX3B/fQBY0G9fvQBRMkl/kCFXiHhXBjG4N3kJN4H3WUBZN2dpJx\
G0huW1CDH7sGpZOXnaYbl5mIf64fpYIFgqaZh50bxK+8x5IfDvg8+3W63LKy4fjc4RK37/d+1xM4\
+Tb4ihX3DH0r9wf7TRv7ZfsS+y37cfuo9zQh9zfB93CX96OzHy0G+ztq+wFtTBv7Rk/3Fvc991H0\
6/cW0PR0+xKmHxPE+8b84BWceAWRoJSNlBuZrYNsZmaGeHJ8kJVyH3pjBYClrICwG9W3s7+reLZK\
foSJiYAfuckFXAYO+AWL4feE4fdw4dH3IRLh7PX3WRPo+PzhFfxF94T4HeH8Hfdw+D7h/J/9Yvim\
BhMU+xb6NRX7AAYy+yEFzgYO+AWL4feE4fdw4dH3IVu7EuHskPe3E+T4/OEV/EX3hPgd4fwd93D4\
PuH8n/1i+KYGEwr7r/oFFRMSzS4F2wYo9yEFLgYo+yEF2gYO+AWL4feE4fdw4dzxEuHso+bS5hPo\
+PzhFfxF94T4HeH8Hfdw+D7h/J/9YvimBhMU+9L6GRUwJeYGExL3NvEVMCXmBg74BYvh94Th93Dh\
0fchEuHsifdZE+j4/OEV/EX3hPgd4fwd93D4PuH8n/1i+KYGExT7xfmoFc4GMvchBfsABg53oHb5\
Ynfl9yES5uwu91kT0PdQ+WIVKv1i7AYTKPP6NRX7AAYy+yEFzgYOd6B2+WJ35fchW7sShfe3+1bs\
E8T3UPliFSr9YuwGExha+gUVEyjNLgXbBij3IQUuBij7IQXaBg53oHb5Ynfw8RKY5n7sfuYTyPdQ\
+WIVKv1i7AYTMDf6GRUwJeYGEyT3NvEVMCXmBg53oHb5Ynfl9yESfvdZLuwTyPdQ+WIVKv1i7AYT\
MET5qBXOBjL3IQX7AAYO+DyL9x74Tvce3vcIi3cS1+mq9+mV6RPK+Lz3HhWJBvv7+NgF+wf9Yun4\
2I0G+AH82AX3AfliLQYTJE/3WxV4h4VwYxuDd5CTeB91lAWTdnaScRtIbltQgx+7BqWTl52mG5eZ\
iH+uH6WCBYKmmYedG8SvvMeSHw74dHjh+NzhvvchErLv92L3WezvE9T5E/f7Ffs9LfsP+zD7MC33\
D/c99z3p9w/3MPcw6fsP+z0e7xb3Lzn3c/ug+6A5+3P7L/sv3ftz96D3oN33c/cvHhMo+1n4zhX7\
AAYy+yEFzgYO+HR44fjc4b73IVu7ErLv9Pe38+8TyvkT9/sV+z0t+w/7MPswLfcP9z33Pen3D/cw\
9zDp+w/7PR7vFvcvOfdz+6D7oDn7c/sv+y/d+3P3oPeg3fdz9y8eExT78vieFRMkzS4F2wYo9yEF\
LgYo+yEF2gYO+HR44fjc4cnxErLv9xDm0ub3D+8T0vkT9/sV+z0t+w/7MPswLfcP9z33Pen3D/cw\
9zDp+w/7PR7vFvcvOfdz+6D7oDn7c/sv+y/d+3P3oPeg3fdz9y8eEyj8FfiyFTAl5gYTJPc28RUw\
JeYGDvh0eOH43OG+9yESsu/t91n3Ye8T1PkT9/sV+z0t+w/7MPswLfcP9z33Pen3D/cw9zDp+w/7\
PR7vFvcvOfdz+6D7oDn7c/sv+y/d+3P3oPeg3fdz9y8eEyj8CPhBFc4GMvchBfsABg74dHjh+Nzh\
y/cIi3cSsu/b9+na7xPK+RP3+xX7PS37D/sw+zAt9w/3Pfc96fcP9zD3MOn7D/s9Hu8W9y8593P7\
oPugOftz+y/7L937c/eg96Dd93P3Lx4TJPt5+MIVeIeFcGMbg3eQk3gfdZQFk3Z2knEbSG5bUIMf\
uwalk5edphuXmYh/rh+lggWCppmHnRvEr7zHkh8O+AV43vji3r73IRK85kjpoPe3qeZB6RPJE8r4\
7PiQFdqHcvcq+48b+0JC+wL7BPsR9Wq6gB/3J2kFE9H3Cm+uc0AaJPsKeUUvIbH3Fh4wBkiXT8hU\
Hm6rv2P3GRv3D/cyuvc+9wZFwy6hHxPK+2G6BWGVYJ/IGvcB8pzI3Odo+wOQHhMk+4b4ORU8Bu77\
IQXoBu73IQU7BkkuBQ74PHjh+R935fchEtrs9yH3WazsE9T4t/liFfxaB/swJ2IyNCy29zAe+Fgq\
/HMH+xre+xD3VfdZ5/cQ9yce+GYHEyj7FvdnFfsABjL7IQXOBg74PHjh+R935fchW7sS2uyz97ez\
7BPK+Lf5YhX8Wgf7MCdiMjQstvcwHvhYKvxzB/sa3vsQ91X3Wef3EPcnHvhmBxMU+6/3NxUTJM0u\
BdsGKPchBS4GKPshBdoGDvg8eOH5H3fw8RLa7Mbm0ubG7BPS+Lf5YhX8Wgf7MCdiMjQstvcwHvhY\
KvxzB/sa3vsQ91X3Wef3EPcnHvhmBxMo+9L3SxUwJeYGEyT3NvEVMCXmBg74PHjh+R935fchEtrs\
rPdZ9yHsE9T4t/liFfxaB/swJ2IyNCy29zAe+Fgq/HMH+xre+xD3VfdZ5/cQ9yce+GYHEyj7xdEV\
zgYy9yEF+wAGDvgFoHb5Ynfl9yES97HsLvdZE9D4Eve1Ffej+EEF+wYG+2L77fti9+0F+wUG96P8\
QQX7tewHEyjz+jUV+wAGMvshBc4GDvgFoHb5Ynfw8RL3Y+Z+7H7mE8j4Eve1Ffej+EEF+wYG+2L7\
7fti9+0F+wUG96P8QQX7tewHEzA3+hkVMCXmBhMk9zbxFTAl5gYO982L4fi24dH3IRKi+Mn8QPe3\
E9D44OEV/E4G+E74uAXf/Kg1+CsH/Ez8uwU6+MkHEyj78fo1FTwG7vshBegG7vchBTsGSS4FDnzU\
R9H4KdTC9yESr+ZK3Ob3WVzjE6kTpfcj+AAVs5GZyO8b3rNtVFdyg3aJH/sleQUTqfsmeX4lXxox\
z07r8cC7sK0eE2Vjjpdj2RufmJGPmh/LB4mBgImDG3mAlKEf98EH9xr7LJdh+xU4WvsRhx731vtL\
FROpRTtWOUlurcTN0Ji2kR73AZmhk5mWCBMSuvhnFfsABjL7IQXOBg581EfR+CnUwvchW7sSr+ZK\
3IH3t2PjE6SAE6KA9yP4ABWzkZnI7xves21UV3KDdokf+yV5BROkgPsmeX4lXxoxz07r8cC7sK0e\
E2KAY46XY9kbn5iRj5ofyweJgYCJgxt5gJShH/fBB/ca+yyXYfsVOFr7EYce99b7SxUTpIBFO1Y5\
SW6txM3QmLaRHvcBmaGTmZYIEwkAIfg3FRMRAM0uBdsGKPchBS4GKPshBdoGDnzUR9H4KdTN8RKv\
5krclObS5nbjE6iAE6SA9yP4ABWzkZnI7xves21UV3KDdokf+yV5BROogPsmeX4lXxoxz07r8cC7\
sK0eE2SAY46XY9kbn5iRj5ofyweJgYCJgxt5gJShH/fBB/ca+yyXYfsVOFr7EYce99b7SxUTqIBF\
O1Y5SW6txM3QmLaRHvcBmaGTmZYIExIA+yH4SxUwJeYGExEA9zbxFTAl5gYOfNRH0fgp1ML3IRKv\
5krcevdZyOMTqROl9yP4ABWzkZnI7xves21UV3KDdokf+yV5BROp+yZ5fiVfGjHPTuvxwLuwrR4T\
ZWOOl2PZG5+YkY+aH8sHiYGAiYMbeYCUoR/3wQf3Gvssl2H7FTha+xGHHvfW+0sVE6lFO1Y5SW6t\
xM3QmLaRHvcBmaGTmZYIExL7FPfaFc4GMvchBfsABg581EfR+CnUrbPzsxKv5krct7Pzs5jjE6RA\
E6JA9yP4ABWzkZnI7xves21UV3KDdokf+yV5BROkQPsmeX4lXxoxz07r8cC7sK0eE2JAY46XY9kb\
n5iRj5ofyweJgYCJgxt5gJShH/fBB/ca+yyXYfsVOFr7EYce99b7SxUTpEBFO1Y5SW6txM3QmLaR\
HvcBmaGTmZYIExmAIvh9FVZkZFZWsmTAwLKywMBkslYfVy8VqKKiqKiidG5udHRubnSiqB4OfNRH\
0fgp1M/3CIt3Eq/mStxe9+lU4xOkgBOigPcj+AAVs5GZyO8b3rNtVFdyg3aJH/sleQUTpID7Jnl+\
JV8aMc9O6/HAu7CtHhNigGOOl2PZG5+YkY+aH8sHiYGAiYMbeYCUoR/3wQf3Gvssl2H7FTha+xGH\
HvfW+0sVE6SARTtWOUlurcTN0Ji2kR73AZmhk5mWCBMRAJD4WxV4h4VwYxuDd5CTeB91lAWTdnaS\
cRtIbltQgx+7BqWTl52mG5eZiH+uH6WCBYKmmYedG8SvvMeSHw73Xvt1utyyttf4ItoSqen3I9cT\
OPhx9+8V5oRX7/sdG/tBPfsV+zb7K+L7A/cn9y2+9wnelB82Bjt8WF9QG/sNdfcD5eiu8/cB1bJh\
RZYfE8T7efxFFZx4BZGglI2UG5mtg2xmZoZ4cnyQlXIfemMFgKWsgLAb1bezv6t4tkp+hImJgB+5\
yQVcBg581/dA0fcw2sL3IRKz6e/3WYHjN+YT6fg59zgVZIhaSzgbJljK9wEf+BIG901B9wz7K/tB\
PfsV+zb7K+L7A/cnHhPq362fm6Mfzbej1Y6kCPwL9x8V3MvW3B4T6fatQDqQHxMUkfhDFfsABjL7\
IQXOBg581/dA0fcw2sL3IVu7ErPpive3iOM35hPkgPg59zgVZIhaSzgbJljK9wEf+BIG901B9wz7\
K/tBPfsV+zb7K+L7A/cnHhPlAN+tn5ujH823o9WOpAj8C/cfFdzL1tweE+SA9q1AOpAfEwoA+yf4\
ExUTEgDNLgXbBij3IQUuBij7IQXaBg581/dA0fcw2s3xErPpnebS5pvjN+YT6ID4Ofc4FWSIWks4\
GyZYyvcBH/gSBvdNQfcM+yv7QT37Ffs2+yvi+wP3Jx4T6QDfrZ+box/Nt6PVjqQI/Av3HxXcy9bc\
HhPogPatQDqQHxMUAPtK+CcVMCXmBhMSAPc28RUwJeYGDnzX90DR9zDawvchErPpg/dZ7eM35hPp\
+Dn3OBVkiFpLOBsmWMr3AR/4Egb3TUH3DPsr+0E9+xX7Nvsr4vsD9yceE+rfrZ+box/Nt6PVjqQI\
/Av3HxXcy9bcHhPp9q1AOpAfExT7Pfe2Fc4GMvchBfsABg53oHb4n3fl9yES6uMz91kT0PdL+J8V\
M/yf4wYTKPcB+XIV+wAGMvshBc4GDnegdvifd+X3IVu7EoX3t/tS4xPE90v4nxUz/J/jBhMYX/lC\
FRMozS4F2wYo9yEFLgYo+yEF2gYOd6B2+J938PESmOaC44PmE8j3S/ifFTP8n+MGEzA8+VYVMCXm\
BhMk9zbxFTAl5gYOd6B2+J935fchEn73WTLjE8j3S/ifFTP8n+MGEzBJ+OUVzgYy9yEF+wAGDqB2\
+F/LS9rP9wiLdxLM41T36WfjE6X4f/f4FfcpJaw6MVpOb3geiQYTxdU4/J/j97EHE6X3IuOrveGl\
XTAe+9bjBxMSJflmFXiHhXBjG4N3kJN4H3WUBZN2dpJxG0huW1CDH7sGpZOXnaYbl5mIf64fpYIF\
gqaZh50bxK+8x5IfDn3X+CTXwvchEq7m9wD3WYrmE9Su95oV+xvX+yH3O/c71/ch9xv3Gz/3Ifs7\
+zs/+yH7Gx7mFtGl9xb3EvcSpfsWRUVx+xb7EvsScfcW0R4TKPfF+GwV+wAGMvshBc4GDn3X+CTX\
wvchW7sSruaS97eR5hPKrveaFfsb1/sh9zv3O9f3Ifcb9xs/9yH7O/s7P/sh+xse5hbRpfcW9xL3\
EqX7FkVFcfsW+xL7EnH3FtEeExT3LPg8FRMkzS4F2wYo9yEFLgYo+yEF2gYOfdf4JNfN8RKu5qXm\
0uak5hPSrveaFfsb1/sh9zv3O9f3Ifcb9xs/9yH7O/s7P/sh+xse5hbRpfcW9xL3EqX7FkVFcfsW\
+xL7EnH3FtEeEyj3CfhQFTAl5gYTJPc28RUwJeYGDn3X+CTXwvchEq7mi/dZ9uYT1K73mhX7G9f7\
Ifc79zvX9yH3G/cbP/ch+zv7Oz/7IfsbHuYW0aX3FvcS9xKl+xZFRXH7FvsS+xJx9xbRHhMo9xb3\
3xXOBjL3IQX7AAYOfdf4JNfP9wiLdxKu5nn36XjmE8qu95oV+xvX+yH3O/c71/ch9xv3Gz/3Ifs7\
+zs/+yH7Gx7mFtGl9xb3EvcSpfsWRUVx+xb7EvsScfcW0R4TJPel+GAVeIeFcGMbg3eQk3gfdZQF\
k3Z2knEbSG5bUIMfuwalk5edphuXmYh/rh+lggWCppmHnRvEr7zHkh8O91581/gl18L3IRKr4Efg\
bve3aeBE4xPJE8r4U/gKFaV+9x77TyQsV/sHQ7tm1Hke8XIFE9HWeKh9YRpRUnJM+xB/zbSIHjYG\
TI6a+wz3Yxv3CuTM9wDSZbP7AaYfE8ozoQVHnHCVthrM2JSm9wOXVGqMHhMk+0b3/BU8Bu77IQXo\
Bu73IQU7BkkuBQ58107I+GJ35fchEs/j2fdZbeMTqhNq+H0W+J8z+7QHE6o/ain7A1JfqOEe9+8z\
/AwH+xHoZtjfuKvIsB6NiQUTaj8HExSk+XIV+wAGMvshBc4GDnzXTsj4Ynfl9yFbuxLP43T3t3Tj\
E6UTZfh9FvifM/u0BxOlP2op+wNSX6jhHvfvM/wMB/sR6GbY37iryLAejYkFE2U/BxMK+xT5QhUT\
Es0uBdsGKPchBS4GKPshBdoGDnzXTsj4Ynfw8RLP44fm0uaH4xOpE2n4fRb4nzP7tAcTqT9qKfsD\
Ul+o4R737zP8DAf7Eehm2N+4q8iwHo2JBRNpPwcTFPs3+VYVMCXmBhMS9zbxFTAl5gYOfNdOyPhi\
d+X3IRLP4233WdnjE6oTavh9FvifM/u0BxOqP2op+wNSX6jhHvfvM/wMB/sR6GbY37iryLAejYkF\
E2o/BxMU+yr45RXOBjL3IQX7AAYO9177atpZdvltd+X3IRKW+HL7r/dZE6j3kOwViQb7H/g+BScG\
91b8rQX7AGGGfl0bc3mQj4AfE2g7BxOoh5mVh5gb9wWUl/do3h/O9z/H9z/I9z8IKgYTFJb3ZxX7\
AAYy+yEFzgYO9177atpZdvltd/DxEpb4cvwB5tLmE6j3kOwViQb7H/g+BScG91b8rQX7AGGGfl0b\
c3mQj4AfE2g7BxOoh5mVh5gb9wWUl/do3h/O9z/H9z/I9z8IKgYTFPtF90sVMCXmBhMS9zbxFTAl\
5gYO916L1PgN1NH3IRKq+Er8APe3E9D4YPhXFdP8KUL3twf7z/wRBUb4StT71QcTKK/5KRU8Bu77\
IQXoBu73IQU7BkkuBQ54nvlinvtqmvcqmgb3opb3EI/9CZgHHqBDeRf/DAnXCuML+MAU974VAAAA\
AQAAAAoAHgAsAAFERkxUAAgABAAAAAD//wABAAAAAWtlcm4ACAAAAAEAAAABAAQAAgAAAAEACAAB\
BKYABAAAAC4AZgCAAJYAoACuALQAugDwAP4BCAEiAUABVgFsAYoBrAHGAcwB5gHwAioCOAJqApQC\
xgLMAtoC+AMCAxwDOgNAA0YDUANaA2gDggOQA84D3APyBAgEDgQkBC4ENAAGADX/zgA3/84AOP/Y\
ADr/pgBB/8QAaf/iAAUAAf+6AAj/xwBF/84AU//OAFT/zgACAAj/nAB3/5wAAwAB/8QACP+cAHf/\
nAABAAH/zgABAAH/zgANACT/4gAo/+IAMP/iADL/4gA1/4gANv/OADf/ugA4/84AOv+cAFb/4gBX\
/9gAWP/YAFr/2AADAA3/7AAP/+wANv/2AAIADf/iAA//4gAGAA3/ugAP/7oAIv/YADf/ugA4/9gA\
Ov+mAAcADf9qAA//agAi/7AAQv/OAEb/4gBQ/+IAU//TAAUADf/iAA//4gAi/+wAQv/sAFb/7AAF\
ADD/zgBG/9gAUP/YAFb/4gBa/84ABwAI/2AANf+SADf/kgA4/7oAOv90AFr/4gB3/3QACAAN/9gA\
D//YACL/7AA1/9gAN//OADj/4gA5/8QAOv+6AAYADf9MAA//TAAi/4gAQv/YAEb/zgBQ/84AAQA2\
//YABgAw/+wANf/iADb/2AA3/84AOP/iADr/zgACAA3/7AAP/+wADgAN/4gADv90AA//iAAb/+wA\
HP/sACL/iAAw/9gAQv+IAEb/iABQ/4gAU/+IAFb/iABY/4gAWv+IAAMADf/YAA//2AAi/9gADAAN\
/4MADv+wAA//gwAb/9gAHP/YACL/sAAo/9gAMP/YAEL/ugBG/7AAUP+wAFb/ugAKAA3/sAAO/9gA\
D/+wACL/zgAw/+wAQv/YAEb/4gBQ/+IAVv/iAFr/7AAMAA3/dAAO/3QAD/90ABv/xAAc/8QAIv+S\
ADD/qwBC/3QARv90AEr/7ABQ/3QAVv+SAAEAQf/HAAMAV//sAFj/7ABa/+IABwAN/9gAD//YAEP/\
9gBN/+wAVv/sAFf/7ABa/+wAAgAN//EATP/sAAYADf/xAA//8QBX/+IAWP/sAFn/4gBa/+wABwAI\
ADIADf/iAA//4gBC/+IARv/iAFD/4gB3ADwAAQBT//YAAQBa/+IAAgBG/+wAUP/sAAIAVv/2AFr/\
8QADAFb/9gBX/+wAWv/xAAYADf/YAA//2ABX//EAWP/xAFn/4gBa/+IAAwAN/90AD//dAFr/4gAP\
AA3/zgAP/84AGwAeABwAHgBC//YASgAPAEwADwBNAA8ATgAZAE8AGQBRAB4AVQAoAFYADwBXAB4A\
WgAeAAMADf/xAA//8QBY/+IABQAN/7AAD/+wAEL/5wBG/+cAUP/nAAUADf/EAA//xABC//EARv/2\
AFD/9gABAEb/4gAFAA3/nAAP/5wAQv/sAEb/7ABQ/+wAAgBG//EAUP/xAAEAAf/YABwADf+hAA//\
oQBC/8kAQ//JAET/yQBF/8kARv/JAEf/yQBI/8kASf/JAEr/yQBL/8kATP/JAE3/yQBO/8kAT//J\
AFD/yQBR/8kAUv/JAFP/yQBU/8kAVf/JAFb/yQBX/7oAWP+6AFn/qwBa/7oAW//JAAEALgABAAgA\
DQAPABsAHAAiACMAJAAlACcAKwAsAC0AMAAxADIAMwA0ADUANgA3ADgAOgBBAEIAQwBEAEYARwBI\
AEkATABOAE8AUABRAFMAVABXAFgAWQBaAFsAdwCTAAA=")}
]]>
</style>
<g>
<defs>
<rect id="SVGID_1_" width="850" height="600"/>
</defs>
<clipPath id="SVGID_2_">
<use xlink:href="#SVGID_1_" overflow="visible"/>
</clipPath>
<g clip-path="url(#SVGID_2_)">
<image overflow="visible" width="4368" height="2912" xlink:href="data:image/jpeg;base64,/9j/4AAQSkZJRgABAgEBGgEaAAD/7AARRHVja3kAAQAEAAAAHgAA/+4AIUFkb2JlAGTAAAAAAQMA
EAMCAwYAAunsAAbw9AANNlP/2wCEABALCwsMCxAMDBAXDw0PFxsUEBAUGx8XFxcXFx8eFxoaGhoX
Hh4jJSclIx4vLzMzLy9AQEBAQEBAQEBAQEBAQEABEQ8PERMRFRISFRQRFBEUGhQWFhQaJhoaHBoa
JjAjHh4eHiMwKy4nJycuKzU1MDA1NUBAP0BAQEBAQEBAQEBAQP/CABEIC2IRDwMBIgACEQEDEQH/
xAD3AAEBAAMBAQEAAAAAAAAAAAAAAQIDBAUGBwEBAQEBAQEBAAAAAAAAAAAAAAECAwQFBhAAAgED
AgUDAwMEAwACAwADAQIDABEEEAUgITESEzBBBkAiFDIVFlBgIzMkNDVCJXBDFwdEJhEAAQMCAwQG
BQcJBgUDAwIHAQARAiEDMUESUWEiBBAgcYEyEzCRQlIjQFChscEzFNFicpKyQ9N0BWDhgtJzs3Ci
UyRE8PFFwmM0g2QVNXXik1SjEgACAQMCBAQFAgUEAgEFAQEAAREQITEgAjBBUXFAYRIyUGCBkSKh
scHRQlID8OFicnATkoDxgiMzorL/2gAMAwEAAhEDEQAAAOnn8fi9fk+lvzSz6V80r6V80T6V80Pp
XzcPpXzY+kfNj6V81U+kfOD6SfOQ+lnzg+jvzY+kfOD6O/Nj6S/N1fo586Po3zo+ifOj6N86X6G/
Ok+hfOj6K/OU+ifOj6J86Po588Pob86Pob85T6J88PoXztPonzxfoL87U+hfPU+gfPVfoL88PoZ8
+PoXz1PoHz9PfeAPfvz4+geAPffP0994I994BffeCPeeCT3ngj33gD33gU954I954I954UPevgj3
p4cX3b4JPeeCX3ngk954I954Q914Q92+DT3L4Q92eGPdeEPdeHD3b4RfceFU9x4Y9x4Y9y+HF914
Y9x4Q914Y9x4aPcvhK9y+FY9x4VPbeJK9y+GPceEPceIPbeIPceGj3Hhj3Hh09t4g9t4g9t4g9u+
GPcniD274dPbeIPbeIPbeIPbnij23iQ9x4g9t4dPbeIPbeIPbeIPbeIPbeIPbeHT23iD28PHxl6+
fxvN8ns+ofJSPr9vxu7U+76Pi/X9Pm914jWPbvh09q+IPaeKPaeJT2niD23iw9t4g9t4g9t4g9t4
g9t4g9t4g9t4g9t4dPbeIPaeIs9t4g9t4iX23iD23iK9t4iPbeJE9x4kr3HiJfbeIPbeHa9t4hPb
eGPceHV9t4hPbvhU9t4Y9x4Y9u+FT23h09t4Y92eGPceGPceGPceEPdnhk92eGPceGPceGX3HhxP
deEPdeEPdeEPdeFD3b4Q9x4Q914cPdeFD3nhD3XhD3Xg0914UPeeBa914UPeeEPdeCj3ngyz33gw
96+APeeCPfeBF+gngo954Mr33gD33gD354I96+APfngw994A994A994EPoHz4+gfPj6CeAPfeBE+
gfPj6B8+PoHz49989T6B88PoXz1PoHzw+gvzw+gfPE+hfPF+hnzxPoXz8PoXzw+hfPD6F87D6J88
PoXztPoXzo+ifOj6J84Po586T6J84Po3zg+jfN0+ifOD6N83T6N82PpHzY+jvzY+kfNw+kfNj6R8
2PpHzY+kfNl+kfNj6R80PpXzQ+lfND6R82l+kfNj6R82PpHzY+kfNF+lfND6V80PpXzSPpXzQ+lf
M1fpHzQ+lfND6V80PpXzQ+lfNRfpnzJPpp80X6afND6V80PpXzNj6V8yX6Z8yPpnzI+mfMj6Z8zD
6a/MD6Z8yPpnzI+mfMF+nfMD6d8wPp3zA+nnzKPpnzA+nfMD6d8wPp3zBfp3zA+nfMQ+ofLj6e/L
j6d8wX6d8xD6js+L9TOnF28XbzUWBQIWAoCAKhklEUKAAKVUolUllFlECkKBFEqwBSAVZQKFiKEo
CllgFCxZQJaQpQFBKgthaAEAWUlCUFBKEoBQSUFFSkAAAWUAFIpYoABFhalBSAAlAUgShQBCkSiU
CgQKAlAABSAUAUABKAQAAAsAClgQsAEpfP8AI97xvJ7OOZY8ujPXlZ2e18/6nfh6sX0ecEASiVAs
ACiFIUiwRQAACLAABULFMaACURZYpLKEpSUIqSiwIUQFlighKRYAAAkqKCCkAAlgKQCVSUQIAEpY
BUUkKssAiFEABKICwBYRQABAAEogCkAAgASglCUQACUYrAVEpYEAiyggCLKASxJQSwqABFIsABEA
lQAAQAAAEUggFAShLAFAACAWUJUAUAIAlCBQAAEpYAAIELFIpYAABLAsAAlSiABQAEogUAAASAAA
VKJ6nl+pnTj7OPpwDUBACkAFRKAFFlCpQoFBVkooBQQoAFgoEolhKsWxSUpUhQApAoAAoAClAWUF
IsFAUAFIoFIoAAKVKIVCUAKVLEsohSUACgFFIAUiwKSUUoiwBBVlWILJSUKABBVCACwFIUSgAFLE
AKVKIoiklFikgFBLAoBQEoAAw8j2uDj28LX08/k9eJlc5dnFt3PpNviel6vH0jeAACwAFWBCwlBK
IsCwsogLKIogAAEoASiKSBRSCwILFLAogAsihKIACKAJMoARSSgIAAJRKEoQEpSVCLQRC1BCUShB
YABFgAAAlCWFligiUJRAAJRFgBKCWAolEAlgWAJKKSkmUAIsRMpUVEWAEpUAlJCklEWBYARRJlCB
CUlCLACLAsAAAIsAlAAiiLAFACUACLAsAUABKiBQAAJQlFgAhKAUCAAASiLAFCEoiiBQAEoiwBQA
IqIogUB6fmennTj7OPp5woLBSUQsFEWAoLFUChZVKUEAlolBKKCKJQAWACgAFIqrJYWCgAsUigCh
SwFJZUWFWUJQoAUBVllSUBVgRQlFAFSKUAEAFJQBQQCyxVElFALBUFAAIWUgAKBKAJQAFIsAFQoU
oixBSKAWWwBAUogQFLEAAFWLAoiiFEomrdhL4Wju5vD75r2YS89ks3+p5Hd24+5ccvR5UqkygBKE
oAARRFJFGNsCjGglACUJRKCWFlEKQChAAJSSigAJRKEWBZYlEoRRFigkqIAABKAEUQBYJZSxFikA
FRYgKLEWAVFJAJQlEWApAAAQAAAEUIACWAAEoRRJlBLCywLAACAS0xoRYhSoEFhZEpUsAEUkKQCU
QCWCUklpCkBAARRFEABFEEAoACUJYAoQAACpRAAoAQBFLAAAAqUQAQAQtgAgKABFgAEoEWKAAAlE
WKAAABBKA9Ty/UzZx9vF14C2RQACWUKEqkKJQUCqKCrFiAKBRLYFEKQpFEUACkUAFgUAFEKAALLQ
KssAAWUKEoAKAChKKsFAAAUJaY2wLBQlAUkyiALBZSygAIiwWwtEBQAAAQWVUsAAAFEURQlEoFEs
oWKURRJkICygCUACwSgCUQFSiUIUKEsiyhjkPO8v3/E83rxZ6ePbVNmFl36NnTn7+/zvQ9PksrUi
iKIoAlQohKpKIsIpIoSiKAIoihKIACKIolCKJQASiKIoiiKIsEoAiiCwFikiiLACUIpIpYsEpEog
ABCyiALAUkspQiwBEoiiKWABBAACUIoiwlBKEACUSFWKIACLAEBUogQpYACAASglMaWQAEUICUkW
FgJYLAWEURYSiSwRYCkBLYQFgARYBAAKABFECgBAKAlEUQKAEAIqxYAAAJYpYJcZSSXNjkgWFigS
gBFgAEqWFgAAoCWAKAAAlRAr1PL9TNcXbxdeFFyAAsoAstBCqoCwUoC2UCksJQLCrKgqwIuNBRAL
BZVFSVKolqUAAAKABSUIqgilJZQABQFIBQKJRZQsoAsUSoAKqAlEWVSUAAsAsAEpJQWVQFlgACUE
olAAUSgAAAsLKWWVABSKWUBSKIUhSKAAJSIqkoCJSksABQAsgogMPI9nyuXfmmerzenXp6OQ37uP
q6Y3+z4nr9vP0WOnIKAAssCiVACwAJZSFIsCxCwAABQASUEoBYpAUEECiKIsLLAABKIsEoAiqixJ
QiwAAAiiAAAhUlQAASglIsBaghLKLEABYVIAACUEAsJQiiAAlCKIAACASiUBAAABKIAABAEsLFLJ
AqLACVCwIEAASiAiwSiKSLACLCwAEsAAEoiwCAUAAgsFACAAVKEogAUIAEAUAABLFY5c+dTnw1+f
0b+7h798qrtyQAAUBKIAFSyALKIAFASiBVQAAAen5np504+zj6ecLAKlAJQWZUELKAtSlKJSgkyg
soAqVSwWUSgEABYWwWUlBSCxUKJQpSACgAUgAqpYoAAKlKlAUoAFJQAoJQsWJSiAAUIKABQlBKJb
CKIoUgFWBQikhRKAAFlWUARKCxRSUJQAlAUlCKEoCCwKIAACxSLKKhKIoAASgAAsHJ16s78XHZo8
ns1c1kro59+s7uvg29Of0GzTu9HmSkAAAAAAASqiwGEZuLsKKAAAARSKIpEsUUiwFIAAAAsABAUg
QCLBYAEsoogSUEpYsQAABLAAACFIUxoiURZQsQUBFEBKBZEVSURYASyksqQAEoQAEUQACURQgASo
AFEAAlEAlEsICxZYAlEABAhYoECARRFgsgmUBSQQABLBKJUAAIoSwACUABKIFACAAWKIsAUAISiV
AFAARhNY8d0eb0t2HdF2nq8wExnHjfXs87vkyHTIACUQAKEAJQgoAAAECgAPT8z086cfZx9POFgC
iCkLSzJZSAFlWpRZSWxVRFApYpJRRRFJZQsAQCoKBZQFllQFoJQAVCgWUiwoJQKAFCFBSKFhaBYF
lAFlACoJQKiiKIsFAUAAFiKEqoUCUUiiKBSAUIoAlUgCglEEspZQAFIolAAIKJQlCKAAAAABRAAA
FIoSgFABJjmXx+T0vO8ns0advHjeWerO527dfRuen6Pk+t6PMprmlirKkKJQlgKQAADHh6pfBxz8
/wA3q+q6Pn/oPR5rLN5oEoiiLABKAAAABBQAlBKIogAEsSwAEoiwASqSiKIshKsSiFIAABLAAAlE
sQACUqFEABKIUSwACAqLACUEogAASLACLBZSWAsIoiiAASiAJQBLAABKJMhAQWAJRJaY0CACKSAS
wASwsoixEsBVkEsACLAUgAIsAAlAELBQAAgFAAQAAUIAQUBGM249vFw9El3ct59uGz1eQNZYOXPS
c1x8vp2ej5/odeGVjvyBAUBLAAFAEgAFAAAAixQHp+Z6edOPs4+nnC5FJSqBZQWUCVRZVihZSVQR
bYKBZQBKABQgKJQAsoALBZQAAAsFAUAAAFJQsUAAKBQFAAoKACoKAAsFSFFirBYgCwFqUgFKIoAl
AAoASiUAAKAAAAsCiKEolAIGs2Y+L5PLf1e74v1F+ia9vXEUQIoRRCkoSglLKBYiUAoIAKsKShFE
WHB4v0Pz3m9XJd2vl1xywwTdcOmzZ7nz3R25fRtO7v5wQCUUEAAAlCKXw8erw+Hfdx5Yceu36P5v
2+mPXHp8oUWQAFAJRKEoJYKAEUJYFgWACUQApARYgACURYFgFBEWUWIBACkWCUQIAlKCRYWUQACW
UWACUSiQFiywUliFiggCWAEUJRAAARRFEWAEspFEABFEABFEWACWFlgJZZYShFgWEURYJRFiASoS
gmUEUiwlBFEsJKRKEsCwASiLAJQEohSKIFACAAWKIAFACVEW4scdLrvNz64a8+jnrX15ZejzRZvL
G8uN4c7HzekMXf38nb6fNKdecWAKAAlEACpUQKAAAAAgoD1PL9TNnJ2cXTzrLqAVKLBQtJFKEpQq
hKpLKLKoBQlpFEKRYVKACiWCgAqFKsUgAAAFAAKJRKpjaIoAlBQFAVYLFSKUC3HIiiUACoBSiKIo
FIsFlIsLLAUlACgAlAUiiFIsFCKBSAWUlAAUlSADyvMxr6l87Jfa+d0auPVLOfVYT3va+Q+p9Xn3
jpgAEWVZQiiKAIolACUABSAAAFIDV4H0HicO/Hx9XHw7spmbNvNsXs2ad3THpd/lep6PNZWsRQIl
lEUJSyiRRCnF8r9h8rw9HPZeHbL0/M9rc9nI9fjCwAACLAAogAABSAFIBKIsFgASiLAESiLCywFI
sEogoBKEsQAFSkY0ASglEACSiiIWUWQJVERVQCUgLKQhQAEKQEqIAAAAgFgAgFEAAQFi2WCWJYAA
CAlUlRFEFIJYCAWBKIACWAIlhUqwqYqIoxoSypjQgEogAAIoiwCAWUJQiiBQAgFAQUxxl2Malkxm
rji597hdWN4623Ny6pl6PKGoxmnO5y56PP6IOWlnRqdHRL6/IFiUJSwACUJYAoCVEUsWICgAARYP
U8v1M6nH28fXzhYWFAotlQS1SxFhQtAopKiwZRVKAUogSxSFJZRMoCkUAFAqxYCpChKAAKhSksCh
KApAgFsqgCgEoKKlpAUsJSygBUoKRRFApKEUQAACygBYqhKApFiFgURRFAKWBSSgEoAGPgdPhcds
Tz+m3EWIWACX3fB29M/aXk6/V5gJQFIBUFCKAEoSiUJQFIBKBSAAnk+vw4381ozw8nqysS5ZY5Ln
08+3Tu9fwPQ7+f1UvbhCkWIosUShFEABj859L5eN/NZWeT15fQeD9L25dt83f6PL1LAUiiFIKLIA
ASqlWMVVKAEURYAJRAAARSRYFglKAliLABFEUQUKYqSKXG2JFEKSUACFgAiUQBYFLAiUSlQFgRYF
gABFJAAsUQqRYACKUkKSWBRAAJRKiggEqBYJRFgAAlgUYrKBEogEoiiLACLEAiiSiKIEiiASiSgs
EUgAJQhSABQhKEAFAAGM1hzYTh6McZhx7b9nJU7roz6ZzuEluq4Z1l2aert5w68mE046tNw5dsNe
eHHoXNHoYb/T5g6cwAEoixQAAEohVgAhKIogUAAB6fmennTj7eLr5wssUlBlBZQsqzIiVSWVSwpC
gpVhUWValAJVIsCiAoALAoAVQiklAAACyglQFBKlUBZQCKsWWUUAUUEAUUolJQLKAJVJVAWZSkUQ
IAUslJFAFlEoAAtAlIKRQikKSgAAWS0ADl6fnsXg0Z4+T1yWmKxLM8FMszU26yBPZ+h+I9Tvy+jv
nd3bllYsoACiWUSiKEogCwFIoiiUJQAlCcuzxOW9Ojkx83p6t3npfU5MvQzrztmM0353Lrx7PQ8L
o7+f3Ey3IBYFCALCUIonF3c818nr7OXx+/Z6PJ9H18/B6mc7+ei5AFWSkQqkFIhSLApQEsQKAAAg
IpEpYogAAEVIBKEoiwlAAKlCLAEiiAFMaEWAAgmUQCUEoAgoBKEsAQFECxAUCFRKWBCxRBYQsCAo
gEoiiAARSAAgqwgABKIKFiEoBKSLAlIsAEogAIEShFMaiFECygliJQlgWCWCwAJRAABKBLAMZcmj
TnfZr48efXq0apz63DLXz6QZqym7LDPecowqY4sX0d2jH1eTo16sW7iY3jMsc6147Msaw7Mtno86
nTklgChAAEACgAJSxYAABEKQKACPT8z086cnZx9eCLYlpKCqQqpaShKqiwlApKpKqyhFFQVYqgKk
KQAFlEqkosqkURYllApFLFEsqJSAoAIUoFlEUSqCkqrCgpFEoBLQJQABUyAVSAChKMVliigkWFAA
KAAFi2USiARQBLRKQlKAAKaPD+i8znv57H1Hn9HlZ+t5kasbsNNyxXr15zmwy27m+HV2yuTPZjWz
p4bZ7XZ85t6c/qNnzPV05e5fO6emOhLYFgAApAAFgAWApFgYebl6nHy+Jy6buA4egRasTp6vMzzr
v6PH92vGnq8+su3Vs29nZ5fo+jy5jWQAAEoASicfb5OOnj6bPJ78+vD0OmN3Zh09/EWbwAlEUkWA
AAACwoIBKUBFgAlEURRJSCBYoqRYsUkUQpFEWCURRFgFJSRYAJSwIlAEUYqRKVFIsEpEUShKIsJV
qSoiqhSShKIsAIsQVYEBUsARKIsAAEEBQIpIUkoJVgoWIBKIsAqUiCxLJbjksxWAIWKSkqAiWWCU
JRAksBYqCFEgFgKSURYAJZKa9c10Tk146duvjmN9WvTMb2YSZ3WLOqkVljlGE2YmtZKyxzjZsxz3
nHTu55cRm5zFZ054XrjLHHVLlNd566ezV0eryh05gJSxRFgEJQgFilgACpRAABAEURYoD0/M9TNc
fbx9eELYBbClCwKBYLKUktiiyiyqKAUApKLFIsqgCkWEolKsWFlIAAWCgKsWBSRRFEoCkoSgAUFK
UFCZSUsSyligCwLKBSVBaUsgUFJUBKAUEoikiiWiAKWKJQFiCwVZQigAIFCUgCgADXz9qa49nQXi
5PZS+Hh77L5Dq+juOni8f03PnXzmPr8PDty6uvHOuS79dk2apXVs4807ejzMtY9vs+ay68vpsfm9
28fRPO6+mN6W5CpQAFIUgGOWqPnfM9XyPL6VjHRFSbM7LMctIFUL6vV8/wCjJvz382m3s4NnTHuO
Ts9HmSrJQiiKIoiw1/Oev855vRhs1dHLv73o8vX7PFjTWQJQlBLAUgQCVABQhSUEohSCkoiwAASi
UIUiwShKIsQAFAikgAIsBSTKEUShFlRYgCURRFgAlgKkKQCURRLKQpJQIBQQFRYSglEWIAIFgABK
EAlJFgWKlCWAAAEUQCUAQUEJZYlEUQCUJRJQghYShCklElJCkWEoRRARYWATTnW/Hl1Y6dmvmmd7
tWE59cpJnViS1EWIthKItYoygZSyzXLJW7DoTHLLLU06OzmmtViFmRuG846t2Gbr6Xb05ZU9PmLA
AAQpFACAIogUAACFIFACAEogV6fmenm3k7OPrwUoUlABZSpVktSVZQVQUJVAKlFgWClJRUyEKAAg
CyqACWUJlFUABSAUAJbAoihKIpJVIpRQCllSgAUFIAolUilFhVIsJbBKEsAoWIqpSAoIAlUiiKJQ
AAACwslKAJQBAUUlCUCiWUALAoilmjfc3x+P6Pl4ejwse/n8/fj19uqXmm7XWFkszy1E29XFv1Or
ZyZbx6HoeHv3z96+R2deXVcctZAAFIc0ZeLo8vzd0sx1Rkk3zKaz1Y60VbZMoQJYq9frfO75PTy2
6tt/qeJv7cfZujf14SlSglCWDHPhzryPM3aPH7L2cnpWe/tl9njSrAEoiiKIsIoixEolUilikhVi
xAEogEqpQiiAAASiKIUgQAFSiBAIUiwlBFIACLKBIsCiASiKIABKIpMaAALFiCVRAEFAkKIKWCUk
WACURYFLAgEWEokosWFlgBKEABKEWBYJRJkqSiKTGgIAJQlgKSWBYAQqQCURYShAkBTXNTRr0cfR
swk5dcmCXPGJSJakKiWyCxJbFJQqQuWFTIVhMpLljjCoMsQAb8+7rx5dnQ7ctV2NZlLkCAAAAlFS
iCAAEsUAAACAABQAIqJ6fm+nnV5Ovk68CWyoWpSgLChQigUVQFJQWUJRRZRBQChQKlIolhKFFIUF
EUAsolAAUSgAoiwFRKUUlAUlFFgBQJShSiKFWIyLLaS2RJZZFEURRFgKRYFEUASqSUCkoCklEUJQ
AAWFAKRYASgUSgClAEUkUAACy4cfezvw9Pu8fm9Pk6+7Vx68Gv0NJxN01GWWFmWzVdTbhMEy6NGN
npex4WfTl9A+W3az9LPI7946Y5bHz+rh8vekz0hsSbpZqa5iipbbjkBLJksxWJYG32fC9GXbjv1b
zh7vk7uvP3GGfbzi1KRAY/P+t8zx76cV8/fP2vG+i649Knp8sFhRFEURRAJRAFgAqAApAkUQAEUQ
ACVUUShFEAAlCWIWKABFgCJSwIlEoJYJVJRAgAEABFEqFikWCWIWKlIlgURRFhFlAAkoSiwAJKLF
ECEpAACFgAJQlCBKEqApACFIAEpFgllhYFglEABCiWAECSgQFipYgCUJYa+Lfx+f05aWPH0W6882
pRELElqQqUAJRcRYhQXLGmUssa7JUAICmzHq1np2ns8QWAAAqVCWAAACWKKQQAlEKsCAqWAAAqwB
YAPT8z08avJ18nXgFlItlosFEqgKAUKsoShKKCygKCUKsoKJRKAAIsCyiwtIWygsqWFKgqyWJUtJ
QAAURYKACrEFpUACkoUCrKWrLaTKWCxUyhJYkq0mSMaVJYgApFEUACiUsUkUAJRCkBUoAWCygpAA
CkUoAABRFgKkosUgSrBZRz8PrY8+vh6/X4/L6fPw7JnfLr7prPn3dqMdWzVZlqyidOGNrZt1ZLva
eW59Xj6ODM5csasNpjsSW6RJUtGSSk0oRYJRi2dLPH28eJ1d3kD6bi19+pfV+Z7PR5vdat3TMAl0
HkeJ08nj9axnW/6r5z6n0cNks7cQsALAABKIogBSLAohSAAhSAikgAIAAohSLAsEsoABLAogEolC
LBKRLBRYsSUIsAqUiUIspKRKIAVYEgAIokoSxKlIsCwSqiiSgAlEsLAASwBEoSwASgQAASiKICKI
ACUEsCwSiLKCEqosQBKXGiAIpJRAiUIpARRFE17ObPTm07tHl9mOGevnq4pLsmFMpBUFSFIZRC2A
BZQC3EZSQuLIjIYNmyzTl2dHXlydebv55TWJZSLFABQBILAAAFSwAACAJUUokolCKAAEFAen5np4
1ePs5OvBZbEoWUFVYW2UJYFAFFAWUKBSUJQoUUiiUKBMkYrLKApQBQBZYFJRalBClIoksspYiyhR
KgpZUKBZSUCwKBRVltlWiFlFlVLBKJaEoxWJFUUYqRKWVRLSKiCgIoikgBSFIsCwWULCgFWSxKBS
UlqKAABYgoWIUlCUCwAY5l4ub1tfHt5bu0ce3FxepyZ15zs5DRmyMMyzKat4w2Wzt87Z3SeBcsyb
Oexs1KKNJQKSgAlDbl0RNDmRLNRYHbxRPe5PP7N47va+V7O3L6ZxdvTnPF9b5Xl25cLPN6KmZ630
fk+t6/KG+coAJVBKlJKLFEWWCkAKQEUQACKIAJJlBLAUFIACUIAKllIACUIogEpBFBJQSiLAACLA
sAqUECUAIUxoAkBFEACAoEWIAlEWUAQWURYARYAgCWACWAqJYoEoShFgABFgWBKQAgFFgEJZQCUI
CUkAAlEWCWIsALjyb+Xh6derLXw744Z4Y1iAJAUqotMQCiwVAAWkXJML1dHXnw30M+nPi37nTlja
1gAAABKIFABQgQKIAAFiiACAEpYUgAAABAsBVnp+Z6eNXk7OPrwosKFRaABUMkqqQqLbAsoBQKEo
VKtSgFii40qWJQiqlCUBSVSUgoKWUFCTKCyigiFBYEstikoAFlACZCLSVVlWCwtlVSGUyWASxBCo
KADFVksqggFSraS2WCWJFxpQBIoAFIBKFlCxSiLCLEAWUoUUggCgAAAKJQSgAoSiYbE1yed7nFx7
eFo693n9HPr6YauPu5k0Nmu3HLMzzenxaU9Tl1641yXWaGqJbApCgJR055Q5cddilJYSWMgLCelO
T6Ptz8r2vJ16x7vy/o8HLvzzZOfTHbr6tZ+m7Nez1+QLJQAAASiLAAUiwSrJSAqASiFIsAIohTFS
RQlEWAAAEKQUBFEWACUkAmUEoSiLFBEogAEsCwALAKgEogQFixEolBLAABLLACyVKsiwLCUIoSiA
QQAlIUgAAEokoJSAASiLAABACiUCIsApLARLKIABLBKICKSLAa8708u7T5vZhgY1MM8M3HGyUIZY
51nixshJbAAAFJm2WYt3X058vXsvo80VvkAlECgAAoQBFigABAALAASotlEsAAQAlEWAKAAAAlE9
PzPTxrPj7ePryWWwlJQqUBZRFlWgqUKlAoFlJQWC3HJSwVBQAWUlAAoSgBlLKKAFEBUpZRKFiCFl
SkoLApAKAoKoKsqxFCqQoC2ywsGUlJKBSLAAAihSFSKALcbLUAAVCpFLAgCgCghYUKLEAlVFJFqw
sRRKBYALKRRFpAFBKAAAAFLzaPQY6cPn+75nLpw8/To4d9OG3dHHo7OaxhcjZ2eXukx4/o/H1OW4
2yhaJQBmTsyxhxMbFLREREBkAFvqeZ36z9N43TfR5+Dg9byeHp1Dn0y93x/q+3LpZT0eYCUIolAA
CLAAsBSSgEiiASqiwASiLAsEpIoiwFIABKIABKIsAoCLACKIVIBKIUQACACKqVIAAhaQJREpYEii
AASwABAEsLLKAJSASkiwsACVABKIogEoShLAQoCUgEoECwSywAAFQAEqIssAlCASwKJLEASjHm38
HHu1bNfD04zLCWa7jjRArYmOSVcbFxlkAAGXRZo3dXT24cm/a7+eVN4sECUACKIFAACUACBQAAAl
iwABQEsAAEqABAAFAAAAen5npZ1nydnJvlC6gCqsAAKLAqgssygoFlJQAsUWVVlFJYqyKAJRFlVZ
YLVxoCgAFAAoMbCwsAKABCgUgFWKWWKVVgohZRZQACgsAgqUthbcRUJSrJkMaJFgsoBYqixFEKRR
FEFAgACgCgKQBFEoCkUQApKApACkUAAKAKAlIBFEoTHMurR2zG/L5Pe83j24eL1vP5deHn6dcnPt
27i9/mda8HL9F41xz3G1bC1N8rr16ovKWBVSFiMggAAqz0/Nyj1ezVz9eW/yezhm5Zlz69/1Pg/Q
+rzB05SgBFEUJQlEAlCZQlCKIoiwAixApLAsAAEoikxoAsUkWCURRFgAlEWAUlEURYJSRYAARRAA
JRFGNACLBKoIgoBKIEiiLBKSALFALBKSCkogARKJYAAIAACFIoxoIpCJZQlLAAAItSWIURYoACUI
CUkAlgWApASZQSxBqmufnyx8vrxxTO5gwxqzFLbNiMo1McWOdZSAAy22aNvV09OU3WevxhcgCAAA
KAlRFgCgABKlLCpFgCgAJUsAAAlCACgAJUJYFgUsAAA9LzfTzrZx9nHvkGoqrCwigKpQqSkqoUoA
soCqAAosFsqlRCWUCyiygsqwWABUFAsFuIyQCAWKAAApCgsoEUKAFlAosLbEUFQUAAAFSqKQIsou
NWywJUAUVSBSVFLEVSLBLCKqWVIoFIWUAAUAlBKABSFAJQSgABQBSyAooiiKJSIqooCGGY4OP29H
Lr5PH6XJ5+/NO3VnfJh06zdt4uvWfH0fReKzoNtZb+Gxt1SqFCCDIIAACrKAdvX4/uRo4e525+bs
yc+v0Pr8HoerzJVzKEVQRKAAEVZAAFLJSRRJQlEWIlEWAVFAEWAAEWFlECJSpYJSJRAARYLKQUAl
iAAsWIACyhFEWJFEUQAEABLLUAlEUSUkAAlEKQpAAkWUBFEWACWIAAlEUQBAABFgCAsFhUQKJRYh
ZKFJYAFglEAIAAiUQCWSmGEbeHbzY6apuy8/p5HZc64Nfdx43rM7M9PbxDbqqXEUBt1desdO/Y9v
hSzWAAIAACLAAFACEAFABQhKIsAAUAFSyAAEoSiUWAAACAAIVYAB6fmennWzj7OTfKUoUVApYoso
loAFIoAUFFAFQQoLZVWWW40JVkUAW40oUIABLClFJUWURSKAAFlCUFUIFJQFAAFFAoipSpSLCgAF
IoJQAUllAUBQUgoAlBZVSkgAIsCiS2yUlFIogFABZSKUUgQsUELCywFAAUqAEqgBYgABSLAoihr2
SXwuvTt4d2n1+ZfM09/Py68OHZhLls0ZXHNyel5cc6XUthRBBkEAABQAKlG/RD6Hlw6LOTXu5evP
6b1Pjvd6c/VlakAAWBRKCURYCkAAlWQAEWAEWJFEUJQBFglVFEAAlEUQACUQAiAJaQEFAJRFgCRR
FLFgAliAJRLAlCURYBUKkWKSkCRYFEUY0EoiwlSwoEAJQiwEQAAFAgSKJZSAAhSABEpUsCyhEsFA
AELLABLCyokok182N9OvRjz6b5pS5xslwxnZXDhhjy63HPGXBljLpmyS4Z4w36O7hQVZt1jdo6Lc
83Vy9+s99Pd88ACAAiwASiLAAAFAhZYAAAJQIsAUABLILFAAAlFSwAACAAoJYsAHp+Z6edbOTs49
80rWalloAWpQUAssFlBQBZQCygFLEFAKFXGlQUAChUoAAsFlAAChFCUSgAABUpbiLZZQCgBZQBZV
WCgWIoBSAUAABQAQtgoUCpRYipQBZSWUgLAlCKoWIoAAAWUiiWUKWKEsRZSUAUUhSKJSACwChYJQ
AsAEoAAA8/wfpfM49e3v+X9Ppy9Pn6a15Gn2ePj38udOjl1xww6mfB1/R+CmoWEAIAACgAALBZRf
b8LdL3cvXOnPj9Pzurpz+q3eT63XAAAAAAAAEBUpARVkUQCURYAJQlEUQIlCKRZUUJRAAEpAJYhS
wIlEBFVFhYCURYFglIlLFgCQFgASipRAiWAUgWAipAANezxM72TxHm7/AE3V8n9L34dMrpiCxMoR
YAAIAIBFEWACWAACWFgAQpAAARYBYItlRBRKkKsAlCVE5dvjcu96t3bGncnXkFzjz9XLz6a+rk6D
y2GXn9OWGWMsxslkszqY5RPU8n1/JIZLZMzDdrxs2+lwel382ynp8slCUEAEWAAEWAAAKAlEEoAA
ASgQKAAAlSwAAAEWKAAAAEAsoQD0/M9POtvJ18u+claiygCyyikoFACwUAFKAAUKKRRAhSggLQUF
AABZYCkoCiKBBYBSgVLJVgEqkoLLLKChQopKApFhSgQURRKFSgKSoAClEKkWFCglSrUsVKoIsLZY
RYgBYUBRKAEoSqQqygURRASiAoFALEqCglAEUCkURRKCUAJRFAGjw/ovL5defknrxy+z89nvH0DX
t3nn4/UY6fJ5+74XHvu1Y4875Wvs5LIEAAAAAAAAAA9Hf5P0Uvl9und25en6fnel25WFiykAAAso
lgAABFgLZAAJRFhKAEAABFgAAlJBQEWEogLFgBFiAJQlgASgCWBZQEAQLjQEBQEogEsQCFIBLKAA
ENfyv0XzPn9MHn6ve8H3+3H01nq8yWBRBYlEAAlEKRYJYhRAJYWUSUSgQAARQgFgi2JZKAlEWUEJ
RFlJZLxaOzLh33o7+cKllJp2zN5tPRxcPRz5a8uXXZjcBElkTOkQ9fyfX8iybNe0wxzi3GZRl7Hi
ep383Us9XlSxLLAQoJMoQAAEAACgARZAKAACgJURYoAAASxRAAJSwABYAAABKBPT830s628nZyb5
hqACkULLKKAUKCAKCgAqkCrAsBYlBYpKBSgCgFAAAAsoAAsoBKCwCqEC1LLApKBQopRjaJQlBZQB
ZYWCgAS1ZQlACUQFAFJRCllIpSKWWEqBKAAFFAFQFAFIoAAAiiKAAColAUgAACiUACiAKIsCwKIo
mGxL8zu9H5fl0+r8ns7tZ+f9/wArjzv6m8PV14vJx4sb14dXvY7fET6n5rLmnRz5oQAAAAAACAAO
nm3L0bOf1949nqw2enjAlAlCUSgABFEKRRJQFkWFlEAAlgWAEUShARRFEWIlEoRZRYRRFhFEWJFC
WAEoRYFgFJRARYkKuKwtCLAACAAiiLEiiTKVFEKkl1r43kdPP4vbBg9zw/e7cvUHq8yUJYhSwWJR
LAAlCAAAlJFglEBKBKQBKCBYWAlIIoBYJVIAEWA8jn06sfOvDt7zDP1eQstSkiw49fRhw9Hk54Z+
f0Z4ZYWSZY51McsZUsPY8r1vLTVt1bamOcrHHZJdfpeb6W+PePb4oEiwLCWUiwAlQAiiWUgAUAIi
wBQAUAIiiBQAAAWCAAAEoSxQAAAgB6Xmenne3l6+TpzKsAFIpRYSgBZQCgAoFlAACiFJQKEoAoVQ
LBUKCywssLAoAKlJQLCyllEAKWUgKWWAKC3GrQAALKSgEWWFAoAKKAAWFBFECWyqLCUSgsoClxCx
FAAAVUoFJQigAAsigiiKoIiiKCwUAJQlAAACxSUJQAASgAoiwni+3M6+W9rX42NfUeVy91nk7eri
5dct2j6bpm5HXlPG9rHOvhtH0PieftyrM0AAAAAEAAVkrPLG3q9j5r6rrz9gvbhFEUQAAApAAJRF
gAACAACUWAEURRAJRAAAAJRAAkFJRFgliCkUsCARYARYASlSWmFCFJbABKIABLEsBKIsEohKsonk
+r8vy7co8npgTL6bwfqPT51TvwASkiiSqASiASiKIEAAiwASwAASiFJKIogQACUUQSrEsAUx5M6w
5MJ5vVr9Ll6GO7Pl6vT5g1AVKjDl2c/LrzaPU8jzeralsmOWGdSJKQezw9nLZx7MKZjcSyXV6fmd
95+kl9vhhUiwAkoAiiFICAASiALAFASogUAAJQIUgUAABCAAUAAlJYAUAAIel5np51u5Ozj6YWWw
WAUsCghLYVaAALFJlKoAolApAigAsoAKsoKCULKBAUAKRQAUoQUAAACkFlEqygoBQtlABZBQAWCw
UpKAqxYUAAChFhbLCykAWCwWwWUsUkURQlAKAsoAKSkSgAAKQBYAFAAABRACkoSgBYBQABKEoAAA
YfNfS/O41w9W1jp148/q75+Tu9PyufT3NnzvtdefQNZ5/mfrODj2+O1d3Hy3iJQAAQAABbVb3PWW
sjL7H5D7XtjtHbgAAAAAAlgBYBYJYBYAEKLFiFipSRZQCUARRFhKEUQEokUQCWCVUAAWEWAIlEWA
EWCUCVYpAAJRFglCCARRJRFgAlEY+VNdHzeevyetDFGVev7XN0+3xFmspRFEFRSQpJQlgWAEoQpJ
YlikKQAEABKEWAAEWWACCxKAa+OXt5NXVnXJu7VaMtqzj0+lz43yTZ5PLp9Bs8H0evHtYce89vNw
dHProrXz6ep43qcmNczDLHRhcZUAI9fl6NWs+fs17JchuJRq7uLruPUp7fDAJYgCURYAQpAAQACW
AAAKEIAKAEJSpRFigAAARZKAAAAikAWKAA9LzfSxrfydfLvEVpKQsoAFBCylKssJQAKAFCgLKACo
CpSKKAsoBKCyiwFgAsFAsqzKBSFlJZQAVZRJQiiUFlFFogoi0iwWUSwssFCgAUUQspJRaAAWJVJR
QSKJQlUFWKAAIyhFhQACgQAAKRYFEoAFgAoASgAAAKAAAFEUQApCkURYFGvzfW45fE0ely8e2e7L
XrHt6vI9vpz8jl+g8bl26dnmZY10c+vbnWHh/XfOnmM8ECUAEAAZ3Jcd95BBBV6PtflPru/LcOvJ
KIolBAAAKIBKIogJSyUlIqiAEolCBAqKEqIqpKEsAAEoiiSxAIoxFALKRYQqQCURYARVRURZSUQA
A4pex43rxWGiupxa830Xk6M69yfPYZ39G+cH0WPz+mX2+DyWNbtBjYRAOrl9fpz9u17PIlgAAlgK
SVZAAARYACAIlEURYAAQpJRFEUQErVLsc24zFyaePO+zjy65ri6uiayGspYgDHJNcOjfjw7eVh9F
4nPtlt4OyZ9jzbw9MbccsOfXq6fP9a5+fu3Ty7WAEAenrw0WaturbUyY1klsw6OfbJ7Jff8APSwT
KISkIWWUEJYARRAAAQpAAAqWQCgAAoQBAAoAEqQKsAAAlACAChE9PzfTzrdydfJvKlhQAspKEUFK
WIoAWwtlpAUCUAWKACksoBZRKoAlEUCiKAAVLKoCkWFAAoCrAiygAAosqiixCgAAKsUkWCyiyhYo
BQAALACgAUAqykoKRFCWAUVEWklEVUqxLKAACklEoUAACBQLKAJRKCUCkKAoAAIACgAACkVE5unE
8j0PL9rOvF1e35Uert+c9zeNmeUmsblLJ5Xr68b+G5fd8fj10jNBABRsuxrHLHnsCQVXv7PoO2Pn
/f8AC9/fPIu8JRAAAAAAQpKglEKRRFEAKQBRAAJQAlgCSlRYJQlEoRYRRAiUYzLGkomUFmUIsJQS
kiwlCFIBKpLIAiqnhe785z7eT6XmZ+Xv6OOmZ6b5jSoEmC7JhDPn26ExFwliLAA+g8D6bvx7Venz
QEWApFElEUQWRYAARRFgBFgCASykAikWADXl5HPt05+dv8/o3cOzlx0ZacI93R53uenya+rZO3Es
sSiLABOHg59Pcx+e18+nrvJ9bN3dHz2NdXFljx7rMo2yXUen5nUzu8n1vKz0ggEZ4523UQ26tmpk
NSLJVwke7nw9vu+dZWsxS4qSLKCAIAQsUkyxLKIACLAAAFSogUAAAJUoiwBQAEqWLAAAACVAFAen
5np41u5erl3kLKgoLKCwsoFIoiyrYi2ChalCUKJQlACgLAEoWywFCUAolCgQoCWgUAFlAJaCUiwU
ACwWUqClUCpYKUAEBSiKSKAUCoKBZQIoVQlAUAAlCpYsAoiwWAAojISZQFIABQSwFAAAABQAAAoi
gFLBQiwAAKJQlCLBSAAAPH7Nnmy+1p3LPE4PpfKxrs7flPa1n0S6kLLwfKfb/P8ADt83j082NBAz
WbJvWasdaAgzWejv09On2DyfU7+Txfe8P3ZKNZAiiUBAAoiiAAiiLAAACUAAAAIogAQAVcVJABUW
CUQEWBRMcsSLLAKUkoiiBAVKMaJFgAlCUIE+U+n+U4+nlHn67M9Oc1vy0pd2OstxgyY0uvO1odfX
05eS+k6t4+Sn13jy+RcnPWX1fB6vq8sldecWIWAKCQAEWWJQlEAlCWAAEACCAACAIvn8G7T876To
0bsbw5t2s047MdZw7+Df04/Rpfd4BViwS6Vy8bRo8vqDh1Ad/B0az0ef7Xi2hirM1txtXLAm/l69
EuoIyZWtYgQBlnqum1LqYY7MM69Pt5Ov2/OLN4iiKXGhKEWJFlAQAsQCUQAEWAAKEJRFigAAoRKg
ACgAAsEAAAJYtgAPT8z08a38nXybyFgFoSqSgKRQAKAKKoRYiyiyqACCgKsFAKJRKBYCgpKi0sRQ
AoCqCCkUopAAhYqgCALBbBQqywspULQAACiWABRFACyhUAUKoAAACgsFgABFEoLastsYTOGLKVFJ
FgAWBRFEKEoKsCCkooACgAEAAQtQqAoAAQAAAKYeH7/jS+rnx9ljDMeT5X1XBjer1Plu2591z9Gs
ubp1538f5v1fzfn78q7Imc3rOWRAQb1w93bd607NOV7Pa4e/p5vP9zxva1wlNSUSAFWFSKIogAIo
iwAAAiiUEogCwLACUEoiwLAogEpIsIsqKIsAIowWWRQsolCUQAEBFJFgAlEAlHB859J8z5vboZY8
qQmbCrm6MK1tvUvDs9v0OvPxfT6nbhKm+ZYAabtLBcpYJSJSxYgqyUkKYqSCghKqASiLCwAIUgRL
AogGGnVjfHr9XPz+vx719fPfkavZ8jO9WGzDOtWzCb5fT58/T9D5sqWl4M62+Hqnk9YctgALB7nk
9eXTHmDntnjk1FgRZeviyiZdHIZ4AAACANmtW3Wi+30cvV7vnJW8xYAAQEUklVJlCKiFIsAIsAIo
QAAAWLJQAAUAISiBQAAAIslAAAAEV6fmenjW/k6+TeSrJlKLKoBYhQAWBQsq2UsUgJKCyqKSgAAW
FAsoABQqUii2VEspLKLBQAUqygoEBRKCUgKUkUCkBUqiiyxQpQgWBQCkKQAAFAqRQUFCiiVEVUoL
EZQAAFgWUyss1TZGvHbjWKiY5RILAAWgihKJQAAALAUCAKABKEqoAAIKJQiiUAEoAcnXieP7Pge7
LkqxKPN+Y+1+b59PM+t+a6836bxebz9T1ODS4dtenZC8SIAs7Fe/i3dfRu9bpnyL6zWOB3+ZeeHr
eb6SBqFkJVgKEFgABKlAiUARRFEAKQAAAEWABYAFEUSURYkWACVUAlgBJYkWCxVVEFJRAJRAAJRJ
kIQFPN+Z9rxPL7Jjljz1Ayzwtn1/T8t9L6/JsG4lEUkKSUJYAJRFlAJRAShAgEAlCKQWJRFgBKCA
ABAhq1Z1nrvRNcvVlNYSrEmEuzxvS4PP6+PDbr8nr04bMNc/b9Dx/Y9/zx5lmfiJ5PYHPQAAAG70
/I37zzLMaZ4VahYVG6arLjZKUQAAAIAB6/bx9fv8Flm8AJYFgBFEliAJRAShAJYAJRFgWAAKAlko
ICgBKlCWAKAAAlkoAAACUs9PzfSxrfydfJvNstiirBYpFIAAsFKqwUBYJYigKLKpKALBUpUpKpCg
AAoCixKAEpYEtBbKoCoUFigpFRFEVUoSqSUFAsShbC0QFALLChFEWAtY0BSVYhQVQChSEoAlAolC
UAFVblMpbnhtzbr261147MbnCZSzFlKiiUABSUiUJSkoCAAFAACwACwSwUAAACwKIsCgADxu5w51
7I1mMfKj0/B8zk4d9uWnPnvs5fQsvj9mjSvfw9fTq+JPT1M8N9bts8v2PSdcc/qeV7G+YaxFpPG9
nw5e7u5eqwABKJSggBKIALEsAAAAIoiwKJUAAABCpQABKIACLESyksBSLBKGGeJJSEpQoEWIlVFE
UYqIAQBBg1855fVy+P2yGZAi3FM/o/muzry+sad/q88KsABAkoAQAEmUoBKIBKSLBKJYACUSwCyA
AgQVYnBnXZy7N0undsmshZLeXOt3n8vF5/Tsuhx65bdG3O92GzHnvRr6NG8dPs/N3v5/U8u3HTCZ
4ZoIAAbMlxw3Y1Orh67nr8n3fDzqBBsWZ44USwIVAAAAACNuHsdee/M9vilESwSwpABAAikkqkqE
CCgggAShLAAFAAiyAUAAFSogUAAACLJQAAAJ6fmenne/l6uXWbKsoFFWAEWAolBQpVAIFCUAKAUi
iUQVZQUUELBRQFAIAWCxbJSVZRZQCygFoFgpYhSUBSKBVihKCiWUAFAgUBQAQBKABQChaAIqUAWC
gAAApVlqLYXPPXZd7TnKw2jnbddzMcla6WEosoEAFgAAKRRKApKAAEoCAAolCUAAAAAAAYeF7Hyv
Pf0XleHjy69fPqnPpYyXHo17jVnntzdLoxlnZo7uuNGec3jq9fl7+3Fy9WnWPH93wPoJUqyUHge/
4Gb7O3HPUhSWAolCUIsAIqoEASiAAAAAKIBKEAAUiwAAAlCASjFVkpEWUAIJYSWEolsqxRARYhRJ
VQETijux+b8znv7G/E3OvufG8bTNIce8CQJbiq+3wdOufoel8Z9J34+hU64LAARLAAiwSqSoSiSq
SiBIAAlIACKILEsCwlw0S7+LD0c64uvZLA1klSLF0eR0cvj9+vGzj21zPFG3TujdDGsOfo06mob5
MpVmNiDJMbsi1rhYJYCwehwdupeY2ImMtsIWCywKIAAABnl7PXlr6z2eOKsgJRIBKICWjGoJZYBF
gEJRBSVCUQAEUQAAKERYAoAKlRFEUQKAABKkAoAE9PzfSzvo5Ozk1lSwBQoUoigAsFBZQABYLLBQ
AqUUABSKUUlAIFoIAKBCgCioLCgUAFFFIUCKlClAAWUAETJC1KAVBRFAClEKkUoJFEoKKsoAqwAB
KCiwqRQBasoKoLBc8EbbqLnrsRFMZlLMVlLKBFigpFEAAUQFSgpKACUQApFEKRRFEoAAFgIOPk+c
5dNnCw4dyVWWWUt2urOuee/39uXzfqz3t48N7mOs+Xn6Szx9Xujg7q3iY5xPnvofnvoZaLIsHz30
Pz2b7+cupFgKRYAAARRFgJYAABFEoAAJRFgAAAlACWAFlgBFgABFiAAJVSZYmMyhKFspAARYJdSb
J5PjY19H5HiTl16Oc59Fd5o0dvLbrGaAAlJFg7ONZ1dPLt3z+rvD3erzBaBFglJFglEUQACLSWQl
lgEAlCAACQtReWXo5sM83R2bVSpYFkUSUjXnrm/Daej5v09Gjs5k2Ydlzrg3b6ahE1bcbOfHZjvC
ZWzVty1pnjhCwQAAAB6PnZnVx+55a8zKmDLEAAqCwACwdGz2O/DHYerypZYWAEAmUSLAAlEQssSV
KFjFRjbCUqWIAQCwAgCwABQIIBQAAlASwBQAAEsUIAel5vpZ308vVy6ysWVKLKCqAAKAAUAACwUA
AoAspKBSyiAoJQtEALKAFgspFAoCigIKAtlACywAsqpYllCgKsAoFEUCkpAqxSAAAAFEWrKBbEWi
rLiyGNKjKGKkAFVVgQqQzkFQVBWNLcRbjTLEJMpQQsoBQAAQCoUBQAAAAAAWAABQCyiSijkjd8/y
edw7Z87Hl2QuZcouXXl9fuef7Gbv55hmufnfoPA9yXbK1AAIpID576D5735cxYKYeF7ni517qzWS
iKICKEoiwASiLAKSkiwFIolQLABLAUSwsAUiwSiKAIokoSiAASiKSFWSxMZZQFoShFhMNXzub0eN
17vN6fKx9LmzrmZYorpMejfbrk5OzkzrEXAAFlgggI26le97nxP0Ho83rJe2CwQQCLAogEoiwLAB
CkUxUkAlhYA0m7RwdedatvTkY01ksRCgEpIsAX5/V9F4/l9hxPJ6O/LyrXqvP2ZuWOdrVMrZqt5t
Z26cVyCAAAAAFLKDs4x6/nOmuCbcIxCAAo22a92/k1Me/Z6XbhLXp80BYEAIgACWBYCAWJYJRLKR
YEpAiWSgARRAAQAAKAlRAoAAKAEQAKABBKAWD0vN9POujm6eXUCxQUVZRYLKABSUFgAlEWVVgWCg
FAAFlUUAFJQCCgBUKAUihZVlUgShRSKAKWJQFWLBQKJQFIsBQIKBSUUsCwssBSKJQWUKgoKFJaQs
ABQkyGKqUCUQJYSpVsCywoABQWEoxWkURRKAAEspKBQAURQBKKAlAAIAKqUQIY+JnW753Xz+b0XC
3O8Wzac+zs9CvI6fpODpz9jo0b+/CVLKI8H0+a5vpyzUCgARLD5/2/K9LN6ZWpKGnyPU8/OvZGoC
AJRFEWAEAAAlEUQWAAAQpCkBKEWFABAAFgAWCWACURRFEAAxyxJLLJQtUxUQ0nhcezl8ns2zk38+
mrTu6bnly75WvOSWyM6nL1SOCdmnedBLzqAAAEAZYj3Pc+I9T0ef6Vp3d+UoJSwIBCkABFEFRRjZ
SLElQLrM9PGzrHb2ZAayCRVQAEAlEUiBMoXzPD+r8Ty+zzrt38d8vT29m8+XfZnXn4nJ9PwZ14bb
q49AgAAFBBal6OjU4Ju046QMrBbB2aNPRWnHq0mtZIOitm7Hi64eps7evCDvwAiiTKCUSUiUsUJY
RYiUQpjSyFjGlQAhYEAEAAQACWFlgACgQQCgAAoRFgKsABFRAoD0/M9POujm6uXUirJVAVYKCgLB
ZQBQiiAoABQACgsolAUBaIFJQKUAEKBVhQBZRUKIKJZaCAKFBKgqFtACgAAogChagqCygAsBSKJl
BSwKFSrKCiZFxWJKAAAlLAlgCKqgAAKRLQKCriEAFJRQQBKAAKFAALAUlAACKAgA4fKl+g+f4/J5
79zn8nZz2xds6c+71/Ys+e9X1725aNxvm8z09Sef6vzv0UBoBy+d6/h5v0EqwKAlCKTyd0wzfUGg
HFybteb60qyKsQAUEASiAAiwKIogJQSkgoCUIogABSAlCUAUCLEAiiFEogEUiiY5YkliC1aEA8L3
PjcdNvD14eP2cvTvyMc8My4bNZiTNIBgXhuvfNS5gQAFABAFg7PpPj+jry+ycHd6fPRaBFiJQBAR
RBQEBDFMsePkzrdr9LavPvq5gsSiCxKIogCCwBzy9E4OPGvR5Z1y+fv9Kr53Vvmsym8AkqGHN2Jv
yOH6XDn0+X3fQbMa8fo9B058k7Gp52PpyXk37LrEWXOrx/dnPv8ALY+15Xl9WpZgBUG7p4Mqzw9P
XZ57ND270+vyxZ24gJRJSAqCAQACAliAAICUkFJURZUoRZCURZQCEAARYAAoCVEWAKAAChEACgAA
QQ9PzPTzvp5erk1KWwVYUlEAUUCgoJQFIBUKAUllAUpAKFCKBZQChVgoACwsCgFIoWC2IqUlBYKg
oAAJVBVFAFliUAKAFFIoAqUEKACpRSFVVIqDJC5SAokyiIBLUWAEUkqqAAURQsQqgqrjRMsSUSUA
AWglhCgFAAWCgFiUIoiiKBym/wCa4vM4d9mWjo5dsNeXZZ53R6/0Xfh5Ht73TEDIACUfO+5wpfUV
ZFGPz30fgS+3no32JRKACZQ4+P0vIl92VYKeRu5+2XrFgCURYBYCwIlEKRYAJQlEspFgAlJFlFgW
AAQCxRC2RUsWAIllFgKQCWAADDLEiiWWyrIA1/D/AHXxfPrPQ8f2vJ6sNe7XNYZqY4bMZcMdmJjj
nqRxtfTmsXOWNgCAoAAApAgGz6H5rPpz+2vl+p6vOFIAIBFgAlgIVxcMvZo6eyXl6jWYspMokWAE
UkACpcSuTjzfT5OLpzvXq9XccPVsbwmU1mVAACBABChYESwsKCEsCwNOuXq5+TZjr5nN7ujh6fIb
tfLWLd26z5/q92z0efHHY68vK39rOpTpzgRYAEsAUhAEogC4lgkWACUMclY0SLIAiwASiCosAEsA
gCAABUoSyAUAAAJUoiwBQAEonp+Z6eN9HL1cupbLZUqpQWBSSirKLKSgAUSgAoJRKCiikUFAQS0E
ZBQFAUSgEBQBQBQllFSLYCiUBSFBVigCgUgVZQAUACiUAAACiKFIUFgqVQFgtlioAIAKiwKJQAUJ
QAAFhQUUCiMVVFCUSgAACKLCksoAKBApCkURRx/E/oGvHT5nd7/xeOmiZdfHtj9hlv8AT5orfKKI
sACwKNHhfR/PS/Qte2yKHjez58s9DxfbIqyLAoSjV4X0Hz8v0FqyFPB9Xxfcl2qsigBKICKqAixA
AAIsAAAIoiiALAACUAEsABkYrVwWIFgACUQCWCykxyhioxylsVJQHzf0njzXyXo+ft8fp9zVdnPr
rmWsYa+Czu18DWN+mzWSUgQFAAAAApAAEBer6z5z6D0efoh35ARYgAEAaPMl7/P6fQl4e6tSCwoi
wSiSkiwihNfFL6OjyMsb38/d0y+f1dd3jG2awJQICgRYAgKliARRFgAjTLvnFqmu3Xp6JeZ2bDh6
ds1nHIuRFSgESqgQFgEohSBEoSwSiVBUAIsEohEKEAlJMhBUCJRABEJQACURYBAgAACgRZAKAAAl
SpYAAoAD0vN9LG+jl6ubeZkpKAKoCkAoKAoKsUSiAoIWCwUKoLBRChFAoKsoAFAFAKQEoCgAFEKA
qxSJSggpKKAAoW42KAFoAFlCiECwtlBYAWCpQBYVQAUKlgUgEoiygBSUhKoAUCABShaWDIuCkkog
oAAUlQoABSUgoALAsBSKMPL3fJ4658m3u8/o836/X7HfjVdOMoSglECALKTyvW55eP1PmvpSKsmj
owl+e+j+a+lLKsiiKEpZ819N85H0RbIsPmfofm/ppc5lLIoiwFJKIsEqoYxPN5uPj09L1/n/AKDe
A6ZAigBKIsBSLABKIoigCKIbDb3cWv5vt7u3jy49+Xh93xPd5MZXq80WUCJYFEABFhAACJZSzz/R
xX89vbzeT1dGzn6+XTfp3aGdXP0y54MenRqSWVd+OJg7OY1rEBQAAFQAAAMler7/AIX0Ho82Urtz
koBEUjDzJfR8vP0ZrzvTzWQWQtkAlEAlDG8kdeHk8+Nelx4d035nb6WdnPvs3zWLAsiwSiKSABZR
IAsVFRLqXY49Gdelq4t0uOrv2J527rtadlazAhLUWBUQUikKQEUQpJYgKBCkUQEWIACxYiAlCUQp
isQBKIspKIshLLAEogBCywSiLILBLAAAFSoiwBQAAAlSiLFAAel5vpY308vXy7yFUQKSgoohQUFB
QoAIClEqJQqyhRKRKCygoBQoFAAoAWBQJQAUAVLAFQUKUJRKFlABRFCULLCyqAoAWyxjKqFGWOUC
kmUJSgigWUABaCpYKJQiwSyhQIKEsoUKiKIUKUUWZRbCyWElWAJYUgoACgsRQKSglAAABSyZDHJY
iqlCAWAsAAAQqX5b6XyOqPRFJR857/j+lL0rNQogCifP/Q+HHsZ4Z0IfL/T/AC31UFWQFlEAlgWE
MTR5+Ojh023i6cb09WvHWevo8jXrP02XzW/U954/obz0DUKIAajbOfogKLAAAQsonTzcuNcWnKfP
9X6B877Hz3m9PoaOnR6uOofS8BKQWAAAASUQCURYAAeF8v8Aofjc+vyjfo4dNvd5vRnXXhqxy386
1yulqc8zwM+/zcjPD0OM0ssQABULBAUBsm/TG5Y6vqe943td/JFnTABPNjv8/T6MvnelvlBcgJYA
QUNcbJxc8vbx1nWjDd2y+f29s1EreEpJKIsCxAoQSiUIAuo2Tn55fR1+fnnW3n6ek8rd6Czn3rZB
cpRBQEUQpBAEFJQlhYCUQCUiAACpYWAQhYAqWIIAAICUiUSURZSZQEhKsSiAuNgAlgBFkAARYAAo
CVCWAKACgAJUQAK9LzfSxvq5evk1FLBQFUAFgUAKlClAABFgoFlWKFgohUKAUFAFlVFQFWUsCgAW
WEoFIoAKCUWVRCgqCgoCUqIpSKJaWUAFlKliTLEFJlKVBZRBSxFSigUoACwWxGUFikiiKBSKIoll
BQFFJViWiVVAksSCgABSUiVSFBSKCllCKJQSkigpZSABSASqAlAAAAsjn8T6P5aX6ljnZAeZOzTz
33Z9e3O+LT7HkxiPRxFieR7HmHV0cXaSZYJ8x9T8x9QqWAUWCOaOl5enN9p4OZ7HiTz+e+ri5c+P
bZ6HmehHT5/TwXO/p49lY47cEdenTZ3Xzaenu4Gs+pu8DZXocmjLGuv0/L9nvzyMOnPOcHmc99Wn
DRx6bfU8jNenlk4b7fT8TZ1x7vm9XndMvM9Dz/H3+07PD9Hz+nl9W7OufFmqfW+ZuY5agIllFgAB
AJRFgBFENI4dWPLfNq9bKb83ds8fnr2nzWB9Rq+Z6ca7eHey8+dOnTXNmNPS8yxs1+twVzNmtAWw
AAGU3Vdsx6pjuuXt+n53o+jxpePU6uLi7peH0elQWJQlJAARz8kvpcvN0Lx7fQ5Y283J6EvF2dbU
ll1lKIsCxIoiwShFIYGc06pezDzd0bNO3pOLD0avNntayllyC2WAEUQJFgAlVFhYQJVSkWIlgWAC
WAACWACAAAlgXEsoiwAiwAiwLCKSSqgCELAsIACKIQLABCAIoiwABQJUgsAUAAFCIAB6Xm+ljfVy
9fLoLYCrKACgAACyiipSFi2UJSAKAKKShRBQAoqygAoAUAAAUsSigjTn8vl2Z+JOPb7Ho+L9rrn2
rLvBYFAABYUQURQUS2LQLKACgQmUIoAAoJQiiWUpVJQBYKAIoABSKJQAKWKSUVKFWCgVS0ksMQgl
LLApFEqkUQoKSqsUAARYCkUFRKEoFglEURQABKAADwff5TR6Hz30IUauD0/D8/f7TS3+H1a+bf4e
8dMr6fhZYi8Hdojj9PwfVXo17dNnz303zf0i4eB9F4WK9Lw9ONfWcPgak7sMGNte7izduXJ3zWzh
7tep5/blqavTo0czVhmbNW7I871MM9ts5tlxtaIm/DXJeqac4y3ZdK82Wp0xt189zWOeGN7McVad
vPnHW5+nlrn3ZudmvZF2cvVyn1ns/K+9y7+d6/k+nL8lt5p7fJr9rwur08PcnLu78tnL1eUvpZ+P
6sZjUiywBAHBL3zxPQy7Ibjk3eZjXlep5nfy6e3xdPmdufz/AGeXq8vo7OP0t0vjMpY7OInracGW
vV2aV55sVj7HkQ36PX4q4lgAWCzZV3y9GOtrzfY5fY8Ppx93q8zq6cefq9C6mNNSASiFJHJHZr83
Jcuf0t8ef2589b+Tm2Ro6e20hrIEWBYAJYiZYlmnml9DHysprs58+pOPV6Y5d2xYS2JZQIlgCAoE
KQCWIAlEWUERRC1ARYgALFEAlJAJRFgABAoECCklgWEUQpARRAQIFRYAIsQIlElVCklCVElEUQEU
QAAKBBAKAAACpUQD0vN9PG+rl6uXRZbBQFAoLFAIUApVigAAsKAAoAsUCKFKARRRSUBQBYKIAoAV
hj8tmer8/lh5/Rjsx2Z3hlZXu+38P9B34+yrpzELfndmL7t07tQKCLLCgUUUFACoAWUATKLFIAIW
UFEoKKBQJQLAApFEUAqgAWAFlgqiyxSqVAhJcbAAAFBYFlBSUUUFiUIsCiChYKIAoiwWAoiiUJQA
ASgUktPlPqPB9WOsts8X2vG57+s8bq8T5H0Pa+U+r5umM7ycn0vD1Twbz37nn8jN8/u4e6vc1eb5
+8er9F8L62n0nFwcqadfThy6YZZ6rM9WGuMtmGS45Gbjq78Kx1XZZow6/ORl52y59Db5O9PUx1ZZ
6dOm46zNmWWsY3JZhhuk1zbM8cau3lZ3li3Wcd7cdTn2cvNOvra3VM6NmGOLnnp3efcrVG3Tu019
d7Xzf0fPr817Xlevnp8Zw9nL7fJq6vL9Hpy2bOW19Lz+X09+XLNWjh19/DwtnXn7GflWvdz+d6tZ
9jTz+fWeDDj0noeZ0S+7weXv78cPY2tzk+P+68Xl18vv87mjVz+z5XPeHbly877Xl6vUXxZ7Hk6T
bptzv6OLKN2nLWplTH1/Mxjp5vU0aeY26hZTLfNvQ1WaY3LVl3zm+q3w35V35RYkWVF0G6efhm9v
Dv6l8vs61kXRZu1eczdfR32tW1Lm2WkyhFEAa+SXueThL6fLj1R599a153TvWJVyIAJYBYCxYgAE
AlAEAAliACChASxQpASykXEBAAIsAAWBCwSwABZQixAIBKIAACSiLLBAshKEVJKqLAsiLjRYAEQA
AlEAACiBZAKAAChCWD0/M9PG+vk6+XSVbAAFFAoCliklAsKFAWUFJQlWJZSKAAKVQARRVlFlJQAL
CiFlAUuKeR876HmebsHLssVbiNvf5d3n7bp+N+q78ejz/Q+avPxYeT19Ps/Ot5+92fHfU9+XQN4o
gUKUUAsqJQCgigspUoiiKSKAVQFEBQLAqACgACwVQoACwKgBVLVlgEsrGWIAoAAKpFKUBAUpAAAA
AoKRYAACkWBRFAAKCApRFHH5nvfKZfV6fH5uPT6Li03GvR2fN9/h9vuavP8AbxflOXLl9Xkx36s5
d3O07mju8v0+mdnN0cqzr4uixc9ibNDTWXPs26xhsZazFJJmXk2b8s6wyss1eV7OJ4M7+JNiE3be
fcnX0ed2rtstqUkZYiZF1YdEzrRN+Gdats0Z1r3bdVvTr2Y5uGeF5623Tv46xmeB0cndwx9b7nxH
3E38/wCv53rc+vw3m9/P6fPjo7NXfnrz1dfXlpnXosZY542mrHpz3uXDN6Na51dfRzrr1Z4zWya+
9HZza+nP3t3i+r6OOzxfa89fP8fLZ5+/Fr+u+dl1Z6cuVnPG5v8AV8OnTy+rkeTnjnZt0ywuq29D
Rnln6Hm4V3cHq8i8drU68WHWNjUYsPo856e+vT5hKObjPT4+Xtjh3+lK1bKsi8p0avNmbjs9LYad
yakmWKALjyr2zyteb6/Hz9ZxT1szyurqtmKtRLAsEsCxEsAqKSLJQpLEAAASwAixABAAABLAKCIK
SiSkEAEoiiWAFixAEoiwASwLABKIABLAlIsARAShCoUiyIqyAiwShLAEhZYABLAAAFAiogUAFCAJ
6fm+lnfXy9XNUpQFAAsFBQoJZRKAKsoSosLSwAAsCyqCLKACgqyylSgpLKACkpCitW3jT5Xn3afF
6glAWLdtxy1L7vz23Wftfjc+K84OPYB08yvtO74P2fRx+jvF2dMUCyiwUBQLEWCyhQKsAUShFEso
AoFAQmUAFgoURFgySqsFAsFLEWkBbKuWWNlsykYzLGzGZSyAUAAFFUAhQAKIUAlAACgBQJQKIogL
KJQKIoiiVY4uHl4vP19rwezmxvZv8/t8/TLZqz5XP7b83++vTh9PwfU49fleD7L4r0ebok16xvmq
dZyb7u3NUx5LfV2cnXrGOvTU1XqyjVuyvTnjVAoASGndxnJhoxk6dekbJLWzdw5R29Xm99ndcMmq
BFslJQGNsuuat3Prsz5sZrdqzsTpx546Zi47YbJzrXMq9z6n8y/R87z2eV6GdfK+F9F8/wBueDfr
7ctbbOuM9WXJrO+8+ebZln05a2ds0t2rn0uThz03YbMrOvLn583fjz7F7Lr5+vP1dXP63THXq35d
efL859b8zjXN5e70PN38V18lAjo5x7vl8/sS+M9byqvVx9EYY+mxfLdvLqa/V8v29PN19/Fszx2d
Jjy3uxOn6PTo7+bv0+fs0mv0804erYqBCcJ283FumuTp9KpqzrUS85vedrj1OXj3rr1entjxuvuV
q2pZZSQUlEWCUkWKAlglIQAAIpBZFhZYAIBCFgWAAEWACUQWBLC1ARYgKCQCUJQgoJFgABFglEWA
ACUQAEAliARRARZQCEAgipVkABFhKEAECBYAAoCWQACgABK9LzfSzvr5urltUsAAKJQWUBVgoALL
AUFJViUVQAlsASUFAVQFlABRZQIKCUKUBz9I+a877bDl0+Fx+01438hPpfFzeRduN6s+3UaLlhWO
KQEgAAGXr+M3Pt+z8/8Aa7cvpr5/b0xnZRZQIUAUsSpQVZQAssAAKBVhKJQAAAALCWiKWUFCpYUK
FKFgtli3GLkDFSYzOVJlAUigAUCCgAAAUlFlAAUiiKACwALYxudNd2yXCX52Onf5Pp516Zq6Y0+T
hyefrtY83Lp06eDqjT63kehjXTMcvPvz/wBE/PPf3fe16+fz99nxH6P8T34c2GzLXPXtc+rrzx2d
I4vS5t5w69fRqcuTO5bjfOosESpQFQLz9HMeby7MJaVLnpi57+bJOro83qT08tPU3iyLiyGOOyGF
izKEs078M6093N0Y1w56O2b19HLtzNnP0aV25y+berJLNv3v5t9lnXqdGHBjd+D/AEX4ntnz9sx9
PC7pNTDk69Ny3MuvHGZS5xm1WvmxZ1y9nUl5504pyTrmdcuzPXnc65Fz1O3Wef0/My1Pf5vH9Prz
+O0+x4/Dr6OXl7s61T2fNrQEZbfr95+d9L28umPg9f23z3Lrn0+X29OefPv6LPE7/J9Lj2x5OvR1
jl2YZZez2ep14eZ2dE6YiywCNHnR6vnaPQl8z0O3Gplycietr8rYbuXv6F8fd6azn3ZSwEAgAoIS
qgEogRMosWAJFElCUQpARZYASkAlEWAICpRAARYgpisAoIiygIBKSKJZSBUsQsEoiiAlCAAlQAAi
iAQIsQsAVLESqgAiLASyywlCAhSLBKIJAVLAAAFASyAUAAB6Xm+ljp28vTzWhYAoCiUAFi0ACygA
oELKoCwUAIWBQC0CygpFEoUAsAoolAoKSgUs17UeZq9hNebh60zrxeD6rgy+Pw9Hj5dNTZrzQgAA
AC7dKvW9r4+9MfoWfwXo9MfWuPs3kCpQABZSpYCpQlUhQWJQFWAFIoikilAAAUBSKFiM0qgEFSlS
xbjVBLASiKqFgAUhQUhSKJRQACgCUAHBq8flfW9fx/Y0MZqZublj1cvOsvp353jxr6Tj8DXjXped
cue8fX827no+bq2Rrzw143ls5dib9G/XLp1dfLm921fPvl+q+bun6J4nqeRw6+r8V9r5PTPyOerb
04spot3ascusvN6Xn7z267lZrad28bLjd87hj553uHKPQvn9y5wtKlx17ocXP6vHHm49HNcYGTWM
2Ew6b2rl18u2XdcclsxlZzGkiGbBGcmBnhhsluzHHOpZuzXLuw572Z4buOsNW7Qk9jxvYl+t8n1/
m+Xf3vkPtfG3n5ar7PNqi6zs5u7i3jZV6caaa2+fO+a1dBci2Y69umMdevVnfVr1xOnPzbnXq48+
1cmOMrPXlnfP5Xq8NmiWWXv88ej5/R3y+Z9x8P73fj9DMPO78u/zMvTzryfn/usOfX4jP6X5zn0v
Zxeplwc27g6R9Fw/U642p35DEyebx5ejwbNy4d3mVe/i29dnlZewOLqzliWWAQAIlEUsBFJABQEA
lEAlECCrEqJYARYARZQCCAJQlgWAACAAliJRFCABBSKkWKABAJSJRFgIWWAKlEWIBAWIWUQCURYA
JYAJYkpUCRYAJYARRiolCLAQLAsgQsAFAAglAAAel5vpY32c3Vy3QtksoAsoAKSigiyqABbjlAKA
WAJQUKAoRSALC0FAsoBURQKKsoUEpUoLLFgFlEqhMcxp8f3kvxHJ+geNy38u26uPUIAAAABer6T5
F0x9Du+YJ+hbfk/oO/Ptc3TYsFSiwUCkSy0QWxFBQoAAoSkURRKAFAAWxAW41QQApQLccoLBKCiL
AUlAAolFBAWxRKAABSKGOXFl4fLvy4ddWWWgzyxyzpNeUu2as5crEascttmvbqyszY5HPljslxu2
amjk6k69MYcmfndyzPdxdnn24u3ytT9N4nZx7cnL6PXH5nt6uTvwueqbm3j6efrO3zfQ886PS42s
65c95lqzXx+hw3PLdmtnb6PnelNZJhbtc28rHKVr06Tq19HMePju0JcsRn18W2y9Xn5R72XF1S7M
tdWwNeO7GNU26kw0dOK6L3VcNl141h28OWN7a25szl4anP0aLLz9WOn6By7tvHrh5ntc1fCbNeff
lrnVr645stezvwqefvGc39Cq03O6c+abbjpXZybdsvl69/MZMab8cSbO/wA7qOvRo3roz6tedZ8H
Thz6eMs1AGWI9fTw/XnzH1nmeN0z9zzeB9vc/Mel7Pz3Pp635z9Z8ly9Hd28e3rw8vfp+t6c9+9w
+jh3aPG05vXx93bL4/V7F1nzOroUlWJYgEWUBFEBFJAsUiWAEoRYARVQEUQACACJRAJRFglVFECA
QCUJYFglBBZRJlBKSASiFISgBAEABZKRKIsEsCwsBKIABKIAFikkoiiAAgQigIsRKIACLBKIsIsA
AEsABIAABQEWIAFAel5vpY32c3VzXUqWKCigiigAgFCgCgsAoolEUhRKqyygpFEUSgsoAoCwAKCq
BQCkoLLKBKFAlAAoByeV9Al+Q879A5uevhH03jc+nEyxxoIAAAAWLc/R8u6z9xl8rj24fU9PwvRN
fcZfFb9Z+veD116ji2nQwyKAeQew8n0F3WCgsUAAAAWUKiUBQgqCgAWVaUhSVYSiFJQTKEoAAVKL
KqUSgAURRKpCxKHH4fp+Pw69GjfpzcMcNmN9PJlqhtw31hs05Rnlqzq45tZwuQuFZ1pziUz1Gh3a
xlp25u/Vr2S+b6Hm+hhu83v5cX6L6L5T67HXyvU4O3lv5Pyve+f78Lc28uXp5es3cvo8mptymO85
W56zruFM9OzBOG7+Rnq6deUt5+jJea9GRo2bMYwtwNs0cNnZ4vQs5noZnFn1ZycHVnY2460nRdFX
dNMjdjqGxBcc5Ndc5Om6zxu/bRt2bq18fdqxrDPDPydZr24Za9W3Hb672vi/scdNXXzdEfE+f9B8
71zMdePXnnOZ0xh6E09ePVjnjrOXJ1415HTqyy27dPYMcprHl4el57WnDPCayyxyjLp0ddz0XZdZ
1TdrNbLTy68PD7PJjpwt+OpqvT0y6uj0Mc3Pu87s64+b1e7xY32c3m9vO6OfZr3rq6eD2t8+To29
/o8/i+h611NO2zeUpICKIBKSLBLKAAhSShKIESlSiASkiiCosAEohSAAgAIsIsQsAqLESgFRSBAI
sCCoLFIEQEqgiSqiwBEUkygBCkAlCABKhZYoEUQIlhZYoEUmNCKIUhBKRASqiyEssASwuOUAIBLA
ACAAlIgAAUBKiABXpeb6WN9vN08t1RYBQChKoAqSilAQAstBLQFgqpACgKoAAALKUBULC0AoBQCg
AQoqoVBSkUSgURQlEUSUcXkfS3OvhOX9D4Oe/ino+dy2GaAAAsyX2PLz5rAVliM8tROzd50s9fLx
lnv9/wAis+r5PF6a9bVz0+i6/gNVn6VfgvY1n6Zx9WpkoiiUIqlIFWKSVCywUAFVYsKUSoWUAlAA
ogFCKJRaAUiiKJVIqJVIoaN/hZvHwb8vJ6NPRdcunZhnEx6FYXKWY4dGvU22OmAKZEZDFkl1zm38
97OfbM617dGRsuGqLjt4Y9PXN3HfN97+efoM1o7cOXh1y+E+7+E78OlM2cObq5ejr49vPplncd52
astm+ehsxkYJIz06NTsvCOzFndYZbMFmvZiacdmVnHn1VOTLs1nLemnJnuxIsjnnSrnnXicrqhzT
rhyZbMbM8+fKN/Zq78787HXjm9ntcHV0miYZ51lq38/Le/GvPvRMstTb9x+cfUzX0uHJOe78P+if
Hdc+Pw4+17PN53R0Y7zqwz1zO7PnrPRy67bhj6PBZv2JZZOWzr83XjNa2wa2O2st+rbcehs5d6bJ
MLWnn151qx9Ddjfn9e2TWq6NzeWzVhiTnzyjR2TdjW3x/R1V52/s95eT3e/s49vnG/R9f5iWaylV
AkWACACRYFgFRYARRFglCWAEWAIlEFARRAAIpARRAJQxsSwoIFMVELUAggCWEUSZYpVgCgRYiFFE
lECAqWAICwqQCBYAKlECAqWCUkoAsIihFxLKIBKIBjlERagSKWKIsiCwQSwUIBKIAIAgAAUISwBX
peb6WN9vN0810LYAAWFCgLKgLRAApKAoC0BQAAoAFEUAFAFSy2WFAUKCwUKVEoAUEoFgoLKSgAAU
RQAVGv5/6RL+e6vv/H5dPmHpedz6QuUd/O1o3a8k6OXfsrjnfTz3o7Dynr4WeW9OR5rt0roZ4QCN
uofQeH6eGnljLLv85Xv7vmms+/3/ACXTZ9B7Xy3qdcfROHps2sBmlWywBACwApVFBYlVSwhEoClK
IAEKWUCiUAFIAUB50dnn+br49enjlxqbtWvOsmWrN3c+W3eZcrvOOOxGOWWVmLOWY2WqmK5tKN+O
pKjXm3u8/dnXB6kya0ZaNuZPO7MMa6Ojm6uG/K+3+H+u09rl7vH4dvY+D+7+N68uDbo3655cnRy1
1c2/D0Z16/R595yyww1nTp7uTnLdZNsxJIhkwlXbplvTu4ca9Kef0t7rji1saszLHKRUxM5pyq47
Ia26mhuGrPbokz6uXXL07uPKzdqkzWeMN2PPmdOOeO8Jz9fn62WcN6sorTr7eLefvfB7/K4+j2PL
1ez0z8ru2c3s8vRwdcNHnbcby2Ya4nbs4drPRjNNvbp4uhrR0bpZlq7Lqeby+15Vcdxq7+jkzk9T
m4Ow09HVnc3LDRXRjyWa26ctmN63bta48dnBz11zj2crG7pl057NWLzfe/E/ZZ36uhnz6+Hjnj97
5OKy5iyhESiAAiiBEsAqLAACKIABKICLEAiqgEoSiAAhSASwAixEoSwCkollICKSFIsJMsQoSxAC
xYsQBLKBIpYUQIpIAFixEsKlIFSiLEBQSABQEogRAAQAAIDGlkoQAElJCmNlIsAIEAQUBKiKIAAB
KlgV6Xm+ljfdy9XNdBYBQopLKQCgoBFAAAsosq2BQVKJQBUpYsRVJQBUsLBSrKAFSigssoAFlCgA
AKAAKAAQFVBbEWXmOi/J8vPp9f8AIc159ObLu4Oe2NiWBQAqoW4kz2847c/PL6eHnxO9wXU7NWG0
enxLnhx785fNepiea78JeN0azt6PL9DWeXo5teJ9V7P597HafY3yuveeqcXSbYtRYKBQKSkUyWZL
ElhJkqKJQsUiiAKACgIAUAB42bs8rF5+2M2TG5g5U7ufdjZmTpnPKSylMbckoEvOXVjFuUzWWUm3
VlGWjPVJo7PG3r7GrR1c9yXDG8jdHD6Pmelyvn/Q+H0zX3fD3cXPp1eT6Oyz876NG7pxy0brucW3
Xer1tOOzePK0fQeGjq5Oqzhx7N2scN72dcGPp6Dmu3WkcWCdl4dx03T6S+bM+VPT2XY6cG/p2Ly5
2rqyx6C6daXbljrN15u2MGuGuXdnWGrPbvPPOjdZx49mRo69XHl2uGs7eni7MbLeO9eGzXnW/hvL
vP1H0v5962Onrex+c+qbODKe3y8XRj5hvuXTXLzerxb5cWHT0Yvm7OjmufQ3+Pkz6c4avZfPi+rx
cC611Guvdx91z0tW655p36a0bZsjDPbqs2aNGGN7dHUx05ezduNMwz83TDZjtjHn6OY0foPyX23P
rjju5WvNh9z5MllgVJSRYASwAJYAkWAUEoWQCURYAQBYJRAiVUKQBKRYAEACUQAEWJKEWAVFiASg
BACAEWIBKlABELUWCUIAEUQpJYgACWBcSywAlBKCVYsIsLLEAkoEAAEBCwCKIBKIEgALjYAQIQoA
QBAAABKBPS830sdO3n6eduDUUBQAAUlIClIWCygABRVBZSUACwoKIAVCpaFiUVZQCglBRaIFAFAA
BQAoAACoAlKigITIvLnvs1p2ZE8T5L6j5fhuDn1ssKlAUAEBSiSxAAALlgM8tSzoy5R35ecPSnn9
FmvXt0y5+34WdvZ0+37m8/L+o8zWPpdnzvuazuLQpFQAspbMlpYgIQoAoUgCwAKAgAUcvV5cvM5u
Th09Pypu5728+zCXLOWwa6mWO7WdbO6hlLFIoGWOovOqspYq5KxzWGWZ5fD2eZJjL3Vl6WnZy64Z
7dmbz7LhZo6dOrjvr1dPDz1+jatXXjpyeh5Poy/H+b6/j9OOzXt16mi6uz0ZueGOptclxrox046z
nlotl19OVY6djPTYy0szbhY5tvRx25OaNdKVOm6crjLO8VdePP3ZumbtcujR2ZN+fPQymsZ08+Wi
+l7Uz8w35Z3y9HdwOuL6D33H4DLblel5vZ83HXb6Ho+zPN+ddPVonfbweh596/ScPH9LnzfHc3Z5
2uXd347Ojz+v3/U57+A4dnT2vlenu8/WNnF7OjfHyeze3jHHbpzro49miOfDfy643C5LMQ1ZdazT
dyzo3c0jt1aNq3fxS3t5c+mzh29S51bNdtunow59MNuWvOs9cY1lt14cNbeXp56x/Q/hftuPXs4t
vm3fl9vjd/2PldMrriCyLBLAABKIBKSABUogsEAEsAEoiiAiiBAIKAEAEsLASkiiEFgELKIogJUo
BKSAEAASCooiwCApABIpYBKEBKSLACWUgIohSAAAgAVBAIACAAiyxKIUgIsARLFSxACUkEssUBLE
BQIqIAAFHTGft9HD4/f5Gj2fO3mdHm69z2svM2759zz99z0schQAFJUKVQQCywVVllAFBZQAUigC
iUEAWVVgoCiyiUFllKABSKAKACgASiUKIALBZSUAKFA+c+a+h+e8/SWXn1uNhQAoCykACQAAAAAA
AI36N1uGFkMsafTex8T7PR9Tu8XR14+l5/od6eD618g+hfLe0veVAFFtgyYoziklhFlLKCxBQpKs
RYKEoAMNmuPn+Pt5PN3zxz5s6ud2akWaxZkrC3JMWSoBEi5acS6rZZlaKLcrkkttTDPFOLyvW15u
jpmXPXV18Orbo6fP7cb08vVjG/zPS4Y7vP7uLz9PqPqPgPtpdOXm7eXbw/G+n+Y78N+WOUnB1cuX
ox6byc+mPX5+LTZ7PLnqm92rDa1qt2zWjPbqlxx3evmfL4ehtzeXb6fnZ76J09Hbz82jZtzvKen5
3P05ezj9PnzfAZet5Ot9fnenz8/a+v8An/p74PjeT2PHa9Dz869Xs/R/nnbfD3+TOHW/Vw8bvdfo
vX/PeWeb6Dk0c+76efi9U11d/iRzy07eTUbs955/Rt1ZaNE+ys+fy+7+P59dDHh6Y1ejxd+tc/6F
+d9eH3/wmWOOnZydXJr2aPJ+x39Pn/nXp+d6PXllu28ebs4fQ23HFPU4tctN11NmGGxqa7mmnZsM
bN3LLO/HkHbs47q9WOO1deeeE3jht0Y6bdeeOLnqzw4a28/RpNn2f57+gY6eh5PseZjr8phq6fpe
D1N/h93p4dq49MWUQWAAQCABAgALFiBSWCURYAAQAIlEAlEVUBFgBFEWAEACJRAARYRVJYAiWCUR
RARSQAhZRBUUJYASZQAhUgJYAAIogAEoiwASlgSKIsJQgAIsEoikkohakoSwAiwSiSkhSSiBEoTK
SxMJdjAuSEqCoL18/Py9Ge3i6vL6/oOfXr9fl34srnDX0YnhaOrp5dOC/S8s15/R0M9PIx6eHl37
MvLm+X0fFxer15994nTh2sc9QlCwWUAUFgqUAWUWBSUoJUBQBSUFlKlWpYAVQlAACwKKAWVKSKIq
pSKlUVIsUEWUBR5ieR4G/R5e6y52lgsoCgUECJYAgAKAAACAOjn6LdEshYNn2PxvtOn2TR1eny+X
xfRE8j0MfOPV8bDul8r1+3zj13y/o17DXmUJKCyqsFQUFN2bodnLLjdmGoCANT5jGu3q8Ldy37fH
4+6PT4sJne/m6OVNe7V0VsS6zGWVkWUlgMYs04rs0Z4mOcyi5CrcjHK5pbLUmHPJu5GGNZrrxctW
r1GvK0ehLrq2XDNzZSy6eic7zTHoxdv3P537OdfQbPkssb+o/Purh68u3bq9DPfxcNGr1cPqej6f
b59/nfa3z0a+L0eHWvc9Xn9TPm+Bz6efW+/zurRPR7X0nwve8WXmuW79Hi8ro3vv7PD4+nl9nXzZ
S+jr8ru59t/Z52ly2685XJ2qY8vbnGnDejzuvfDm1dOSaL15ZvDn2WOLLpsc+W+y8+e1GGjXhuaO
/djLOXfzmrm6sOuez2vI8qvrfje+R9Nq4px6aMOlc8mrvw3OHj2Z9sd/m5Zr9zh8Pr5b6PQ8b3de
vz/Q1/W8+Py/B6Pj9Mc2j6Pk3rz+navHHq5su3DTwezxYvAenrn52fLLjty49idDTtluevFfSvD2
unNlvZ6a2ezjrDV06OWox22aPrvjPq5r6XzPSx5d/gZv4/oePbs05dOfThy7Lmez43TvPqMcu/JL
LAIsIogEpAWLBKSCgEBKECWwSkSxQILCwEAIsCwASwBEsFhQAIEAiwCoEgLAAgARLBKpAFIBLBKE
AElBLiUCUQABKSoARYAAqABLCAEACWAEUiAlgWUlEAlEAgiKQAkta5m+pxfon535/fx3dl081kJl
cLNbcGnl3yxy3axzWaOfWdXmenjWjm28R3bNfpGjb2zWPG9TXyW+/j89t7cPcvldG52c3D38fT5u
n0fG8/o7XFq3zu3Ht3x27ObHpy0/RacM9e54k68PbvE1ntc3SVLQCygpFAsAtBKBRKpCkUAUKoCw
spKAAoAoKSgAgBQlEotABFECLKs+b+lxk/OsfV8ry+hZZpLBZQFAoIESwBAAUAAAEAdHP0W84gB2
8W+X7H2flPquvPZTrgsMPP8AUHzWz6HTGjRp4UvXs3L2bvm8q+icnWLBQLKLOjNbunn+d7dGn575
/XP9U8/4v1e3P27hPTx5/M49fm7b9F1Y1uy5ttZzVtlvVzY6zqmHVNad+d3jXlVmcxlmeKrjM8Ca
d2qXG2kuQxyuqNuXJ0rtZY3NY6o36NOnF2zVJc5rzzqaejp1fP8AQ341hcdeNbNN7rHTwedc9+3z
/Qxrmz+q8rj18Tbr9K52z6vz+Xbg8z7Xh3j4zq8zn7Tl9nR6esds4sOWtGfXNTz+3fmefq9W5cmv
vL4vZ3JOLT6KOPLpRyTqHJdvNZuvVMXnnTJdGWzns3ZeVu1PQuGfLRNFbuLdj0zlu5erNTJm68rC
W4pjcLpnZqjZ52Po7mnoywzcZYuvnz4Omc9Grq789Hr5ZS83Xwepx0uU5bwmemzLVz83TPk4evj6
Ofockc96+/RyanS4++uPvyS9W7y9U6+9r5ufHX9D8X0/Axz8udue/Z4+fZ7O/B85rz5umdbHoTzO
vf0a5aLv0VnybOlOS9PPLju83tOzl7tOl17cPF25erk668z1ePr1Pvtmjd5vR8v5H03zf0PNjz9O
v0efDl3Xh15+nLM6PT8b0fTw6R34wAEAlCUICUQIhQEUQAEUQAgmUIqyASwAAiiAAixAUBLAESiA
SiEosQCAAiwBEogoCKhKISlgSkgCxZZUgAIsLASgFREqBYAVBAIsLEKCLESliiTLEssEqyFICAGO
dZTgyxvtnMuem+djjr6Gngx5devRrnLtkTOu3DlJ29PH2+rxQejzgMcpN8Pp6Ozy+vt9X468O27o
8vuXz/P9Hz0iibdY9P0vmWb9ds+NS/Z83y/QelovVbw4+tsNPpasJq+NMd4+l1/a/G+nz53B15c8
6kcHL7SXxns6pfE7uvE0beenpPI5z6F5mNnquLos3XT0VizzzdN6s5rienhHn249MKAApKACiiws
oBQFAEoUQCigAICygUAJQASjHLFPi/M9HzvL3WXO0sFAFApAESwBAAUAAAEAb9O23SIAZYk9L7b8
7+w3PoLjl250BRAJSef5X0tX5ju9XzI5tFh6vo/OaT6p4XtVnQet5XqcOu7i7Pm/m+zxfL9jwe3H
0GGSbfb+e6OmdWtny25+3l6509GGnTuy83pt6dG/mmdu2cFnT0ed0V1zJrMthccMQxylQFbTC5Q1
cfd5udPR83Jr1MOYxdUz570XLEzxx21l3cua55a8qnNu3HHl2Q083p+nz18737nP0XT28ufR9l4n
H5N8XP2eb36x9B5ez3eXXzdX0HafmHN7fz3o4et1+B7eN9mdz8u8LlJUqIsJniTOYiSqmOcMJlo1
MNOu+jn1crI6dXmbjbbo651dnNbG/R0y6pvZ1r1bdlvDs7qvJ36dPOd859/KsMtROecXbOzR6G/p
ebu0a+d7tGvhxO/l0de5y6sr0m7fOKz0ebCR6PRzc/Dfpc+jdHPO3ytzo4/Pz749G8tl2aO7rxfN
7euY1jZcVjcLJNzTn5PTw1MeLsw26Ly63X636D8y9XlnO8HoT3cHH632m/P+e5fUfOXHPj2ces46
+rT145Z6Ndz1581s6NWV1kxefpz9OnZy3z7nn7n3nr/NfTeP08nx/wB58D7vPnjb7vFzXLTx6ujR
jjXT6Hleh35d49HCKqLBLAsAEogIpIBKqLBKIsAIsBAogASSqlCKIsJQhSAlBLABLAEihFJLBKsg
EsKgFMVECRRFgABFlRYCklJCkAlEosmUQgAAgAAWASiKSFXFRAJYhLKYbFxa9kGvoXXMcg1I2TVl
Zllp6M687i9Lk8vu5HQy59nodHTn5XX2Xpjlz3ddxyYe98z5ff5/VoZnrcG7Prw5evPV257pXThB
04k0TWzXhjw9XPhu5vN6Oj0/C9eNHn+h56AtQVBUFgCkUOjnG3by5L9b43kzWPoez5K9MfbX5P3O
3P0cPO8PM+r3/O+jp6Fxy1FDG2mnR27Mb83V9H1+f0fJdv1Ovnr5TZ7Wvpjzenzubpy9rX4U3n3n
kbdZ9NzbqzURVgpFASqCgsoAsoAVEoFLKACyiUEoAspFgAIXDLFPiODv4PL3oztKAAUCywBEsAQA
FAAAABLu07rdAkBQNnqeb3tfe5y+nz2wAUEUAgDRvq+DyfUI+Ww9/wAiXZ7nyWs+37fiPosb9vyv
a8z5ft6fz/6zxzw+nTt68Neay7GGWay07O+dnNt2anFubdOW9Ok2+Ts13OefNsPbarZcGIz4Mjtu
W015IbJq1Ru180xrLVlg1r6c89TTr3bI4dme2NO2yujXsylw5uhnWrZslurPpxm8J16uXu9H6v4T
6PPi0/Pac9Td53pbKs29PG+F1Yk+n9H5/Xy33Y/O/Lerj73Hvz3nz/b8P1s319mGfh7WVKlJJr5d
ztnDts6McsZbljRyTHtnRn0au9z1WVj0ayTk6Jcc+O7YvPp27k5unoq614V7tO7S1r2cGetd+3n2
Zzjlv4MY7ebztia/Q5O7pYw89r0+S7Y057ufOM+fryy18XT5+5xZ6s/Rz39WfXx1oymE106eit6M
ceXWeZ6/Rc8nqzZ5d443HNNOjU7ZyZVC6nTePsxYkjLLn6Fx19MrldU1PK783Sef9p8rlL6M8Psn
f0e/wfp+eun4/wC0+N1nLD0eTp282/R+b18HBu0Nc89+Gzz9OSZ6c3o4+vUb/t/zz2sb+u8H1+zP
T4LK4/W+cwzuuenl79OOmjt8/pxr3nnej6/PBcpVQAEWCUShFGKiBEqoCUIBKIESllQASrIABKIU
gIogAEBKIEAgEqoESwShKIsAIAsSUIpYsRFqSiLEBQIUijFRAGtGbVDc0a5euc0XqvLY6Ly1dzTk
mQXPRviYY7lass5GrdC6dtlkygxzkSkqwsijF08vPpXATuaNusZSzWUqoQZbfY59eScHB5PoZamW
euOO6yaurk7fV8xZO/Fu1Y+b254a+fXPbnlu3y18vp+RnvjhHl9GPqef6knD5/oefAAAAAAAALbM
jAFgLBnv5vR03XXh24+j6fg9ep7Dm6unNhh5PHp2+t8V6Xj9v6LfL+h474fL9Tw/b4sNVenlcWZr
x24WcuvuS+Nz/RI+fn0OpfN39HCno7fH5D6R4Sz3b5XQdrDOlBZQlAKCpYWVRSKIUAFICgFIoknM
nU8zgl+ix+RmdXyerHh15SZ1UoCikBYACWIAAAAAAACXbq226RAAp2+55X2TfdT0+YUiwoAEoApS
LADHzPWR8v533XLjfl9fBxeD1fadP517nLp8zu93wunHoGWc3apWrdq02zbp7TcwtmnDr8+zHzcc
N5z6OPsl9PHzYz9Dpz2V87v255vVu5somOTF1Z7cZcZlz1MtMa78cJudGWrOyt8Mc9GPPe7Xnol3
J1Y78v0nz/12cbvkfu/iMturLy99urZr6L5+br3b/PrRvycNWGL53L6/zvq5/U+V6HPw2+V6voPo
8PPw3a48Tu499n0O7k2eLttz8f0E3cGezrebZs19tXl68mefrnLyz2auXpMGGztdPJNW+mXo+Z3p
s4ujKc8scdi4GK8Ppa4xv17cGjlxt38857pXVq9Ey0Y5Xlx72dHTxRrp5t245tuO6XLXp5bnL1G7
ztfLu0nJ5HreT6uOHRh27m309HV5Ouvmw17uzV2umuP0Ofql17ebDOO+8Lm3c/Rju6tjXq7bqzNe
zXumdOHRxs7Mbtsw6+aS+g07eG8phYzcvUMMrWGG5ucvL6bbl0d2jU7dnF5uev6n85yeRmds368f
R4d323Jr53xnN6+i6125Yx5vXt8nrj9B6/nfofL31fI/WfI+/wA9uN9viuOUOXHdp49tvr+XN49y
Y5ejiFkoJRARYAEpARYgAlAJYAShAFhAAiWUAlEoRRAJYARRAAkWCWACWUAliARRFLGEjNqwOic0
OucsOu8mB23kS9M5M7N+uQy17cpefHqzOJ2DindLON2Djz6RzZ7RrucssECyUJYCFpCwJREFBEsA
JYCwIL6XP63k9fieJ0+T5PblY6c2WETt7vF29uPszn6vT5MpjK+i+Zw5vH9Pds5tk1mzylnF0+dc
dm/X2b4cXD62vpjy8/Vyjn6deOOu3t8rXvHf5erHl2wucmc/U8f1k4vP9Dz8UAAAAAAAC54Z26xI
ABfsflP2G9/ynR7fmdfHh0687rPdzbo4tEw8/ouWf2Wd+B9/894+H6JweD7u8ebq9fzvb5NHRo37
mnEuZSwFFJZQIaN9OTj9ey+Dp+kR89t9vQvJ06eY9fP52R9G+c7LPYeV6Fbbr2koLKACCvPTZx/J
6uPX73p/OqfoXJ8RD3OLz2Ndl4kuzCJQgBu0l6NEA2Gu7C62WIAWJUNWCCJQJRFEUQAAJls1526h
Cyk2YdC+99D5vp9c9mWrZ05UApFCUSgWCgAKSkOD0PH57x9jxOz53seL914/Lp8b536j8l35eV0+
Vu1z9fVq2ctZ4567ZsuvtnZqyys2eR6PFqeRbj0zs3Zd+Hkb+3flhnWbjllkuvK4GWWGETX04xrr
ddZVlo15ZWcm/ZZrgdWM7cnqa/Q5tfL+h+DjPgdeqPZz9fhervyYcnfuznDPft4a07l4aWXNlAB8
79F5HbHU5OHc3etydHox53Nq8frj08turL1evx/T8/Xj9bLCsJzdXTrjxZ8e9+tz9OvPLHh7lzz9
ujI4e/RnWPm+54m+mOWLW995eyYy6WvHOXDBnfx57WJ0acHTb5/oa7rghvpezyNTl6uGjdnGjp6d
a49UmdY3px5508We/WdPfNvOrJz1z6NvL1znq8j3e2OLZo61s7ujjeB6HnavVs4t+rt1XddaNnPk
aOqVnHGbGvH1nXr1duGWOXP0YZSYc3oa5njb5Zsz09cmvVvmaTFevZ5/dyuSsWLSWUstNOPTdTh4
/Z19Jx83pc+30nn+TyR7PR5+nPu+n9Dl7OXn+V+c+h8jpy973PnebG/ufkevxfVxzywnu8WdxGOp
rm9mO/m476fV8L2OnPqL6eUWCWIAlCUQCABAiVUAIWWAEoQEUiWKWAJFEKQESGTXgbnNidc5B2Tk
wXucFO5wI7sePKunXhRjsyOfHsyOB3xOG9o4Z3w4r2F48ulZqbSY5JQIliABUAAlEWAgAAlgCShA
IAAALFECRYAoCWIlEURYen2aPovmfQ/O/G+1+c5erjnfrs49XpY3PDenn1L6Hm57x7Pn6dXXGzbh
ux2xWWXKE55lgx27OOL6GHBnXU5tS9GvBN9GFqavoPb9vnz/ADnl9zy9dOX1fP8ATmfH8/0PPmQA
AAAAAAXJcagkAA7f2j8f/W76Py3l26uvjtmVY7ufbLz6bn5++PXybc67ftfivexrHP5/Fn9A1+b9
Hq/P4dvB7/Ds0m8hQJQoCkCkoLKACgGvn7EvlaPdR8vt+kkvznX6XPGHT5/Ev02XyOk+zfM52fRf
E/Q/I5ahw9AQAFBIAAAAA9Hzs1xuI68+LeuU3brPLfSc9eR6nNsXXy+nvPAx97kTzGXpR5k9LlNC
4xUolEWAJnlLbqLE3a/t9X5f2Pp8NZ4+q7N88N+OyyihQAUlBKAAFnNHVPN9OXzODPHx+nkmbnr6
Lt8f3/L6Obx/a3Zv5zls5/X5cce5Gvo5+XT1JwdXXOWWGwXDFLx53Mmybs6w178ImeGquzDRdZzj
VNbnLrXs5+zz63+h5np6bbeezLHHdjTT7nsc+nxvdx9WPbhsvBM/afK3l15NuzHdGGfRs4a17LeG
scllgLHLZ1YeZO09DPz7Xp+VtJ81z7d3u4ex5nrcnHXiaerl9XL1suL1+W+X0fN6+O/X28fbw6eb
u4dXpvsYcw27NWk38XVlWOzz99aevRsZ2+Rv599cWOW931PM9LHO6t+nPPVllzsM+XorHq05Rsw2
6bvgrHfXRx968vP9HVsrds0dWOm/fz6ccuji2Jz78Ofv4617DFmrdw6Xn0cffHfs0dFv22XlTx+j
zs8c9ctPH6OvU09GWJnzehwehqmWi67eDoSZa752t68M8OnTs6fK6s8+rZqzxmXCyc/br3GOGe85
ctfUuq8Hdbv149nPMYZ8CyiwLLFyxq2zKMsbzEvlPRj1tPJdM8Ne5NHs+P62Nfcef5/v418f5H2H
x/t5tmrP0+PJZrOPF2cq78+Tsmuf1fKz47+gvL1evzJWkBFiAAJYAQCMUyasTfObA7HDF75wE77w
xe6+eO+cNOzDRTZhlUw19WRw4+hTz56A4L2l48e4nFl1K57vGtshKWWAIARRCJUKCQUEiWUlEKRY
SwWXEIKwhsmGJtahtc+Uu5zw6GiJ0OWr0zmp0OfA63NDqcuZuaIdDRjHTHOvRNWNzvmqrsa9kYZQ
S2LhkyNVzpqu6GGHRiaZvlmjPYNGzIbPW8Ts83ox8/1vJ8Hvz5t+/OvA6PXz1PJ1+vrl82etw15m
O7V24579ezoYZzUyrczx69+pLbnLqnToXGWrju19EuHped9hM+9p+Xy5u3ydhvg7MO25+b8/0PP1
zAAAAAAAWF2a9mugkAGS/Q/f/J+7evweOzHr5sM8MzXhs1ZvLlJy65+j5tzr1ve+T+yxr5rz/T4t
Z+0+q+c+sz14fD+p+b9Hj8yZ4+vgBKUVACyhKAAUCgAKCwwz5TyfO49Pn69mHFJvZhGdAbctO25m
GzFdayUICggEAAAAv0nzX2ud/I6vpfE1nn69/Z0nX7Hz3X15fQX53Xc/R8nkdEuXL6nZjfx3L9x4
ub87l9Vivz/T73mWc2OvHNz17dNuPL0zN8+d6Xz53a41TZvrhv0Hs6eP9mdeQXNSkxzprzsKAoKi
KEoiiCsfH9LyfN16Ovx8prPDbz8Ou7ky6ZZ9V8hs57+k2eV3+fr4Hi/YfBejz+lNW1ndrzymtPH6
btnyeju49OnHy2s+pk0xcdWPPe7Xz667c+LZZ0YY69Zy0Ta1J6smspw9E10Y4enHJh3Y56atXqed
z9f0X0Hw+TwXHy/W283o68sOfLtvn1rvn9ObvsuLbLmgQWa9fV5vrnTi19emd83sa2Tbc8+Tru7y
5vz30XCfHcXsef8AW8mj6fze/nrm28+/lv0+vze7z9Onm3M3hxy6+uXo8GeLxXu830nB6vk765yN
XVMsbcaq5+lhtxxx0dPBnlovoaWZr6NcvPuuvWenbprpp8/1tGt687lJu4+jcvk7Eqejp2Zxnr38
+Jsy1a8Mrw9upo1+pyy6unj4umfd9HydXLr+hfMdHl89XLkyvPdo3c+ph18m3rN2uZ3fD52Wvt27
seSmTC3dwdbHVkx5csWG2Zw05Z3G7fo6Z05MuTDW+/m5dWtOrm9Fejq5unlxw2ct4zdy5TTXlty1
d2fH18WVlzcrjrh5Uy7ZmNm5o7e/bqbbgxr6rwN3ic+nofS+D2ZedyvtOnX86nt+P7vn69unLv58
+Dpxs5t2izXVo87Pj29bb81suPt9nx3p9+XuYeRnvPo4cmSbphSzIsWEmRMVlXLWN2XPTpy5Fnbe
Cnc4JL3uGWd046dTlp0NEN7RTbjgM5KSqY47Ia20aZvJzzpHLOynFO4vFO8cE9CHBPQlnDO8ee9A
efPQicN7ovE7RxTtHHO0cF7ScU7hwXtHFeyVzOocrpHLlvJz3eNGW0mq5qmOYSjHJCWWwIgpAssB
EAAEWywAQQFAgASLAsVKSA3er4nTx79fHu1+b2Z6cOm54Nvqa+XTinV7fLr8Nfq/D3vyde6deWEz
ysz9Po05z5/Z4PrZvZs8+Z36E4ME9Hf5W1OnVw703d/nbDo5e7py8zg93zbfI9O91x895/oefvAA
AAAAACwuzXnhQSAMsc1/RdPrfK66eZryx3wmzDYc+jdu5dOf6Hd4nHrq1fT9cvyP1Hq+UnxP0XT6
DXtd/wAv6+b7vyn0fyXu8CV6OcWCygpKApFEoAShQCgAFlR8l5H2Xyfn7c0sx0AAy2attjDboIJR
SAKIojLZWl39Nnj32NKeY9TScnp93panocufXvHBfSsvk8f0SvldH2WMfEa/tOWX5Xo9XhzeHdp0
S+m8tZ9z8vo+mzrzPqPmOnU9/wCVx8lN+XDePbs2cvVnWOH1nid+Pi/efKfb7WVvkUQBRKBRKCyw
UAAATm1eRy306tG3z98MpyR26ZsMkuNzp1Jfa6OjDy993wv23w++fF06ejtxu7l6eemG7VS530Y0
Y7LqeXr7dMvBPV86r38nfXXqz1Y1vurZKxx2Ry6PUTfndvbtx7fO/QPgPpp5PT+J935Nn0fP9Xo1
eXpzz5TDPdj59JZy0ODrnq0b+X1Y16u3Kb4Mu/OuXqw5OU9GeVsw6ubPL0zD0eXbevz/AHejjreF
155xvzTx5yi8r5fNj4v0vP8AUeP9Z7PPr+Z5+54W+fV6Pld/PXbnrz4by1bOLWZt5uv1Y3+f2cd3
t8vt4N9XoYb0yw1TOG3VsWzCsbuPuXno5O/zJvp1XVm7t029OXP53q/J6ns9Xh+m1t68dOdbOLPo
XLbjJo1aWevlw2py9dSTqw24OfPVnWvLh6+uO76/4H6bh26Pn+/x06MdvL155sN8uG2ad3dwen5W
t8fd1XWtPJ29MngO7ZvfF6OzVjmw1885dnHtzMzeu2a85vm83o5+vUdd125sOPHDt5spnW2U0dM5
mt+XD1tdNxx5c9fbzb8Hk3HcJ07zh6GmVnuwubejdfL0vhe7Y8vq8bv9nPi+l87xdPcmfBn6fJNH
T7/iatmDfLTo3cmd6+rP0Y8jo7NPPefueB27x7O/T9B0x4eHV5mde55Xt6+PXxndw+zzQb5wAEAB
FEFSohYJRAARRAAJYABUAlJFgAIARRjRIogEsoogIAQBEsCwSxApLAUgEsAQFSiYcs49uq+ZhHsP
Eys9meZnrPoOTZrO9pzszlEWWAQqxRJRFEIDEyYyM2EM2EXY1w2tUNuHHycPT6G7w9nHr723xNfT
l9Lz8kr0vpvlObj33+floz6JhuwuNOWXZlw9WnXM6fYx4pr0J0dM3x83ZrtwZbDXwerizxel43ps
unZz5bufbU4O/HuPkvP9Dz+nIAAAAAABWSsMsUAAbtPa6/rP5t+pflm+fPu13WGVkurDZOXTv7uf
6/z+j1Ob0PC3w8fh7vGzrv6/mfQX1vqfG7N3DyssfofODQsBSUAFlKAAUioigsClALCfPfR4Zfnr
2vL8/fld3PNaW/WYMxg2fS6nzGf6BnvPzHD9y3j89z/QLm/E+n9G08rd6C55d+SgRKNfJ6EjyeL6
MvynL9tjHxnV9LyR5/TOWvV6PmMT7G/G9S/UPnetPX8zZ0nj7fZud8u3bU8bwfc+L579x43Xy69e
ndlZ4vX0Z12cfpS57/b+F9Drj6p43p6xvFACgsAFACwPMy87nr6JxedXfycHZx6aNGe3l05c+/Vn
WudOuXo4pkmKQ2atkX1/V8j3vL31eB72mPzrvy0+nzZ9HPvxrbrz1lujP1c+vl6UePn6LO9WO/VZ
o4PU7a+b6PS512b/ABbl1dGPoY39T5X0vkc9eD08eN7cvocXob8+rfl0cLqzznn05ujXXDn6mn08
+PV6tPM37d2rw6+jG9NWO35/e/Y3eDnd+306M+fPf53ZXPW2cbpuyyxNc6dvHPNuxwuOia9nl3cs
Ms3k09/x/r5fR+z8d9rvXy/l/pn57c6u7zutPS03dy02eTs1O6Y56Ti7OTrejRdl05+rTJjhlbhr
37jm3bsWcpljZhz7+c5JydFnoOHMnD6ujU0+vOCNmPVtXmxnHNbebb1LxejjJNF1p127uXczZuxz
jXq280s6ub0dO36D5v3vP2+RTHtyuWGrV29nD6RoxxumzXC4bcDLCwzxw2Kyw1OeOy6Wst1wji9L
C3OfP08t7eaOvZ08w9pwZcuXdqw6JNmGrLM3461uW7j3ydGceeXzM9G4wcfXHpavEy64+g38fp8e
m3s1Tx9dzRI6bxjr+c9XHrmZeP2+rnxdXFv6uX1Ort59vlXNw+nx92nm9Heenr4Ovpzz1458eund
duL6Xn6MuGs9eVxvp9b5bv0+7+d+h8ztPPJ9HwAQEURRARRBYlEWCURYARYCkABAgihQJKgAgJQl
iAqUQJACrJZYlEUQguEkza8Dc0Q6HLF65ySOxwl7nnyPQeYX1Hkw9eeTZerzdd8/o2er8/uuPf1c
m/vzaN+zOuLX6vLx668eP0Muees7c/L39OuzTs09KYTpdeWnHerQ32Xny2jS3LNV2QwZVMVVKgsC
xZjzdc49+Lfuy8/fTjv6+fTg4vqfJ3nyp3c3PphlrwzvPVNdmz1fDp9Nhzes15+XVNZ8f0MObM9X
Lp3cuvlYer0514vZs5U1eJ7Gm527fF9Vm6PW18d+Z6vN17x8l5/seb6+Olstam/VGKxAAAALswzt
1wkAAe1430r0fofwX03zl83Jo9db4mfqarPM7+bZjfpfReJ3eX0/U8mns1z5uH6HzunP5jqy09+f
pedjj340byBQAAFAFIWyqsRYCyiwACgBYfPeP9J8xw6ZaMMOfXdnpHTdHXU+n1et355jXMACpRZQ
CKJYKAAUKgomOY5+L1avgc31CX5LX9jgfJ9fsc0cfTOazZ817GXLr4l9fPh38iertTk6PW9Dtz5M
fUy68vC4/qKfH8n2+nN+W6fQ4D0PQ+S0n3d+I7bPq3h+lZ13HIAlBLieJwdGjz9sOnRly6bGrZm7
9s4s6lmWph0TMpqjLV2+NL2xnWf13yP0/n66Lv6eW/zDZ7XierzdWTDF6cM9dXDpennjGCXHLVjc
u2Vljtk65d2X1vO/Bbvc8bn08vzPofN7c2PXwufrbPA9vLq6svK8vT1Nmnj531ufzdvSZ49V9POc
uu3Wi6+qXZnt2r4u/tzusPD9bxd75s8Munp+gz4enh5cc9+qTbw9nla16m3n6MZ6ubp4ufLN5XTz
12uXfy1nfL9Gtvl+nwJr7/g/q/dz9r479E5OW/hd/Nu6cu7q4erOuvg6uTUk28zt6nmepxa545cW
63sw05xns4cnP0GmJvaVbpo3Vr+X+k+Q6YvXo06n1HT5HJz3358e1HpeftjZj6Nt5M2uMsMsc2as
DHVpxwdFxO/s8W3nxxx4+vRZx+znrX3eX3/P4dvlWOvvnfry6Lnk9Pg7LicXfqrXlhmuOuRyyuGF
z2aJpXfoabnv1ZYY757tfUc/RkTRlp2Xr5nR36tb8/n+j8K609vJ7Q2XZy4vC7OBhr1bO2Ojq7Nn
n6cOvp5Tmz2ZbzycK9sYdnX2Y1z9erXjfZn5urD2M/L7MXq2cUzfQy8vGPY+e6MOmfT0+F0d8+zv
+f12/QfMbtc9Ds2cXo8Oe/lw6Y9nzdPNw6+zn5exOnPgy4a9BwpdvR5l65/Ub8xz8e/dPAfW+Z77
wFe/PByPceJinuPCp7s8TYeu8q16k8zFPVeXV9OeWPUePie08WR7c8XWe88LFfevz+tPo3zuC/Sv
nqnvvn8q914mR7OPjVfYeRmnpY8MT0MeKnXObI3TCLnhaYTZkc86cjiw9LE82etE8rH2Kvi32R4j
24nivaV419dL5GfqE8vP0ZZ52fcOHLsVx3qJz3eNTarXc5ZjlKhBh5nd4XD0ezhl2a5+H0+n5+On
bl4PodOXcl688fL9Zy7eD6ryeW/fx8jd059GnRefX1R6fIFCKABFglIliiJYCUSykAQXDISnn9Mm
bzerTp7HPfmavXmpxdie/wCcsvTnBjpl7vk8Hxvp/RatDzdsubbhqatmGvc2eZ35dMatnk9Ws6vU
8X1u3P3ubby983ix+s4dvh+L7Xdvl+fX7vzpr5ee5wLw6+vYebj7WMvkT3PNTn2YbbnnKQAo93yf
cdd02Ya49unR3Z1ju6PYl8Xm+slnzmPvac3z/e8bc16fja9Pq8lh1wsoAoAKAAAFQUoEqwUAFSgC
wUGHyn13Bi/Fa+rRw75589l3fRfOfXdJ6w7cEpJRbFIoFBSKMVBKLKCksoEUCilAAADXtsvHz+pT
xNX0A+X5/r8Y+T6va5jl2XUnX1eDzr9ffj959PzeVtLxel1y/J6PveaX4/q9Xkjo9T5DRX6Dfg+4
+u8vkRyTZr8vozx6eLOtu3yPTXXlq2pds2ajKJLhnJRiaqxzq/X/ACP2fLpj06sed8n5P1Pn+vLv
xzwjoY7M2ZMu+WHLq1O7Vz/UTXg39A+Px0ujbr5/Rx+h+L9Tp8zk6NvRGrLv6OV4d27Hz70eZ62M
fKvrfM9XPz+3h5+097VwepLqZ7Tl7cdzfjb9mTPRlx9jfC9Dnt8rz/Z8jp2xuN11z7eCzPqPKmcb
8MGunvb/ACe3j5/R5blnlweho58Z9LHy9vLXVrw3dZ0YY7PLvgz8nw/fw/R/L6ve4d/zPd6nkduP
Z3+b2G/iw3OvNd3VXL3edi4d0uFuc5+lOfn2XMc2OOsYdvH3Lr9bhza5vI9XdZ4Pf0c2rycvocur
j73ifT3OWTDFylhpw38udZ8nZw5Jg3x29XLnOurfo6Z16Ofp5eWdfl+l19c8XXv9TG74f3fxfPfB
q2O02YbebXDLdpyY7MM8HTXhnrYmOXHrO3fhmy1dGS8GDy9vR6/D7mvQ9HzvQ5byqrxO3S1hlz4r
18W/qu/M7cR2cvZ4U58X0Pld0jq2OG2rLz6lbemdWzzu7bxPa7+vDXq7NXPrp25eZ136O/nSbW3P
z8+WdjnrjnZLOOdlODw/p+LtnPT4nV6ufn/f/nXZqfbfP8XZz92nVpvr+bv8zt4+O+3p8nps35c0
53tczF5PoPn/ALC36HyvU8hvy8Ou/V+b5+Hq5Hiz3C+DPfh88+hkfPz6GWeDfdHh5e0PIw9lXi4e
6jwnuK8PH3keDl7g8ae0ryHrjyJ65PKnrQ8t6eNefl24pyZdGMunLOEsxrPLRE6byYndeAeg80ek
82HpPMHpzzoek8vKz0p5tPRecPQcVOxyW56pzZm5pptmKzJKCkBAAJYJVkXGXzefD0vP36rZ6fMs
suHl+s59fn/V2eTz37d8T1uvHbxdnlzpr47t8Pq5t+Guz6Dd836Pq8vpxfRyAEAEogJYQFASxAAW
AgQARbASxEsRRZt12Z+x+V+n8/432fk/T08Menp6KmGvzddvdnOzvy5OT1dHq8+j1fP9HpxnN6fD
nro+7+I9Tzenz93Br68vYvkZ2+zt8TM9jVxJJw9nscu3xmn7z5/OfmfZ8n12/Pc9z0zxlSVK+i9X
4n0o+g9fyOm59HxPrIz8r9bu3teL5/LfZ4LJd1YKVZSFlKgUFlALKEohSUFBZZQABSWUFIoFBTwf
mv0H5jnfGY48e/T9t8B9n1z6g6cgASpVFAFABKIoFAiVKpYlUBVgoBQWAFlUUlCUEBKOfm9Gnjaf
fsfNcn2A+J6Pq9K+Lu7eWTbOLlX0eXhsrzvX4+e+Tdqw5dPR7vB1Zv09+a2HuZ+P2r35atzKEqXV
FvN023T18eNY/YfG/TY36V17uHT4nyvW8z0efqx2Oesd/L0xef1+vpfmfW5/Qnr5PQ4uWY+9+Jw5
Nebu19HVbx9PrdfO+V2bMOWteOWHO69e3RyqzDU0b88vc5sc+bdef6mTPid/ZpOrHxuRPptXg+4z
yZOqTVqy2OmzLl6l+bd3F29RV1N/odvPl5HJ7u+T5n0u6phhnnjGPP6nBOezm2YTe3Nza5dVzx43
ly6PjdZ+x9H4n7S7+S5v0X8/1jLr4+izS6uF36ezTWOTG4b8ezt8/fN9Wjdsx34OXo8/pw7NGeq5
6dOqnXdeua0+p5mZs5s+p1z3Ya5z9Dgb63b8M9WLDDl6pLx4bJi8+S75ZYbdM65d2vdx3hp3c6c3
k+56HbHF6Oj7Lh2+W5PpPnMvN2Tf6JqwS+bObcZvbnoZ6actezr59G/RLnTvu41Y5azk5PZ5da4O
zd3rzbPR041p2XFcsyWYXna358eu30+Tm5Lj0vI5/X1js6NXT5uu2OPnrXJj1zq9Lo4a9Xs8b2+H
X6r57q8jGeFu4vRe3Tc9dLy9epz6NnmXhPR18NX0MvP2ZdPJs2SbUnPXN8z9dw+jnzeZs931Y+P/
AEz4r6jOvS+K+k58+rxeT09/TyfN9u/Lty5Z06+eteHXq5XR9J8191nps8P6rwfRz82elPo+Dzno
jznoxPPegXz3oQ4J6A896A8+ejE856KvOeiPOveOKd0OHH0B5974vDO8cTsict6Bz3cNF2DXcokI
ZXCGbXjXQ04y73PDqcdOpyrOzHmi9V4ana4sZe9w2ztx4odk5EvTjz5Jtw1xrbNaMpjRjauDZExm
1GuXM1qLEXJjUz1Zced83sed2SdF0Tpz6XKrsnFiejPO1y9Hk9/ncPR6HF2YYvFs3zlvju/Kprzy
yx9by+b0cfonn93fjVakZ4mFzxrDLPBNezPA1bNuBqz24GnZtwNOe7Wadu3A0bN2Bpz24mnLdiar
uwNWWzEw2ZSXZrzwItMZcl06eqc+nmb+nLF8j2OTQdvXy3pz2YR24wXOPped6Wdd3P0c+eoUVLu2
8jnvm8v3U18/l7XLNa8rhHPzYfU8O3ymn635TM52XS6ct6Mprnz7t55m3v5U7PY7fo7n5ftt6cfU
8/g5+vGU74SwpQVVlgAsKlFBYKAUihKCoBQBQAolAAClGvYj5XyfvfN57+U+3z6NWK1iWUAKAFAA
CxSUBYFWBAWpRZQBZQBZYoUCwFAoiiLTG0BAAFAA17YcnzHs+Zw7cU9Lp8/b5vT7eJ4U9fPrnzM/
V1ScOzbpTbu4NMe9p8Q163b873r6m7l6kw9fyejl06dnoerw6/H/AD36X+X9OXp3n7OfTF6XJPX9
N6Xx/Xvxebz+b7e55/R29McXV6vZi8fTnhm1q1Ybdeu4ubTaaerDF5+fs5OuemZz1XycrNds8LZn
Vy7NN16Hi+9404cXv+D7TOzDVrzvq1XicunLi3W+rOZO0uW272NWrONurs5meX0MfPuu+eL116zD
Znn492p2x36odPp+F6Mxl5frebwx0el+cfY+nPq/F/oXgY14e3Rs3z7+Lq5J127uPtuuHE34d2zD
rz15+jmk6dvn9fma5a9+7cnkdXNt1NunDZNubr57nPbs5mens19cuvfxbLrfn5vUb5VuKw16t/LG
jCZyMtfdj0ZZRyxr03Xt5fd6G3rng9vq97z9vi8dmtnh2YuzViz35d+vV0TeGns8/U6efLpuOPDu
85Ou6NknVNexZu1Yy9WXPuaxw35NaOrXoXdpyq6GVOW9Ww4+bs8e73ezze3McTm3cd4ci6mPD7Hf
ZpvP9Py6+rr8jVy1jx7MWd3Rydms8+Dl9HTZmsYXPTVvz2fTp7+3VefNxez5GeXffOvC9+XP1Y15
/j/U+b2xl5U9b04eh899Py9Lx/t/jdzzNfdz+jy4Yb+bldurdq53n/Sfzf8AQM73eN7Hj+/zwezy
QAJAARRAJYJlKixAIogEoiiAIAAEsEqxLACAAiiLACASxAoFSxEsLLAACLAhCwSxAUARdXh7tHm9
HsdMvp8thQCABPN9PyuXbVeHPxerscemPUy5bL05cuB2vManpefu6d44M/Tx688M9uzry0Z4q3XH
LfMKISwEsAEsALAAQJZQQLSAAiiAkolELFikx9Hz/Qzvv5unmz0FoAUAA5d+xjXma/Yc9+Nw/R5e
X1/J+p9X26fE7vV5OnL1fG6Obrw6eDPNr0dXm75ZLlrGrD0JZwX1ca8/Lu1JquRdmfDE9LLxqezf
FL7LyNtnpXztp2ubYbbr2AsoJKUpKBUoAUEoAUQAFBUFuNjJC0CygCyhYCxFUEUKAAoFGNsIpFha
lKABSKFWUAWUAAWCiABSUFAB5Pb4vHprZc3Drjh2+XNZa8J2mXoef2x35S84qE1bavH5nu8VeHh6
OGtcO/09ec8Wnbyp+hZ/CdHDt9Z5ez6Tj1/PO72vnuvH0Ofj9m9OX0J57n6PN4UufqPb+R9/z9ff
ujbvLj6OPlq3DVizzOn0+88DT9Dq6vL9LDh556cvG+j1fN6Oa6epy49t342zLC9dfN2+ddeh4XX5
bz9vuaOXOdmeXZnnya+6Y68OrLVvPblhsxrQ79dY1ljVxuo87ysub1ctX0vzXt6z7s3cHCzo885X
V6PPPVh18u6Xv59uXjeV6nz/AJnu4/pXzPo93Lr8HsY9uPZ2+d2ct8nbPK3rbj08m/PllhsY7c8J
emjn7pM7Mt1uvnt+zFmY9WnLThqz3dM2zWezVyb87z9Pxu+OnLkzs9BjbWN5DPizwks7eLO+ndMu
O0ywTRo34bnke827cnrfRaOHb5myufPp6+XpdUTr5unfhuaw0dednHuywZ5tPdpmdePTC479K5at
2tNWe/nOu6uh0mUym9Xnd+E1ybLw659G/g6Gbwcvob7dnfydXHWPDljnLVj6mp4HtdO6bz+y5fle
Wu/Rxa5O3z+jPpMOnDDerycF6d/e1cuOOff4Tn6dsvb8v15nW3Y8+OHm79euXXz59Fx1btO/w9xc
Xy/P+l8f0c93Lzeh6c4en5Hoc/Tl436Z8VvHjTdj182F17/Prl/SfzX9Ea6vnPpvnfZx1D3eMQBA
IsAIsAEsEqyLABKJYEogABBZSAgpLEAgAAEsLECywCLABKIsEogRLAsAIVICWUgHL08eenhel5f0
Pn69KvX5osAIoYY+d5+/Rq0Zeb0eam3OsZjsS9PP0Ysxuo5t2ru3MPTvV6fNJXq88AAlgUQCABFE
AIAJSJYqWWUSgAFglGNABKIACej5/o41283TzTpRVgVKALKLBbEbdnOzvfpluVlopQjG0aMepHJz
+nV4Of14eVn6WJ5/Rlka+jiid+Xj4ntzyC+xz8m436tw5sPSp5L1yeJn7MXxs/WxPPy6sDDdzazv
2+TiexfEse08PdZ67ytq+i4czrunI2QAFCUFlJQWWKhbZQCiKVYAAEUUUigUAAiiUFgoAFWAUoAW
CgAAAtxsUAFSljTL5XFZ4/Tjo7ccaw8ndq6MMd+jti9nFifQbvM9HiJlLcbxLl5+zXrerr1+imnf
nr5zT5PseJrPR9Nt9bh3+Yx+q+Yy8fi+u9zU/O8/ovD3z5tmnCzo0bNmbo9Xyx993/C/Y8Om7B8y
fQ+fsi+vo7dXonma2c3r7OXqk1as8DZ5e+1j183zrPq83yE7Y+vy+P63T3vE7vJP0Phyz4zy/Y4e
7E4elhjphweh4O8e1u4O7nu5ZM3KYZFwyLyfLfYfM+jlx9vBt78/tuDbx+dlC8ctvP1Z6cnTzduf
X1568/HnzN2z5/2cvs/R+J+inT5HV9N8x15dPVxdWL3ce/fjXi9OLteTZq268vds1ct16XF3YJvK
1zef7PmM6efRss2XdozrXqk3Oi6sp2z58/Rk4t/q8+c6e7DPU18fVV4OjHkTv3atnHdS5t0b+cx8
/vw65me72JvDV9h8bx3zWLjHh9Hj6Z1NuPXnt6uTrrJca1XJM6sduaY0tmrPGZWZGPP18sl7uXrd
JjMmsOXr4cby59mvXGa+iN+HPZ7d3yXtcfO+bl6PHuaMvo+SXg9r6Hfx6cXzOfPq7uvg26z2bPJz
Xs8rPHfTQ28/Tpm57bvaOyMPSk58urHk25xj18vdM6urh2MY79WPO9zn6eG84c9ePz/Q+H6efbzY
eh3z1aPL9DHref8AaeFvn851c/Rnz+b+gfB/Sy/Y/P8A0Hz/AKM80s+j4AqKIsASKICLBKIsAILA
WLELACLAACAiqkpIsAAIsAIsBAECosAJQgEsQsEoiwSiKJKIonB3+Nz7cf0fje1MB25xYFgxy5Of
Tk02/L9117eDo17tG1Ne/X1GGzFhePo5Nt3ocPq759WVfQ8UFiURYAAQCUJYARRAJRARZYLLKpJk
WLABBIUTKLJSJQA7/P8ARxvt5+jnmwsoUAULCxQUCKVQKRaCpYWUAAFIohQDFlTRz96PO0exV8nT
7kPKz9TnOXdv543bvN1L7F8OWe68LansXyOg73LsXdizNevoHJr9AeXq9pL4/N9FE8TH3R5GfoDi
3wOjz8T1MvDxPfeLrPevkZHqvK3ne5M16WKM7KqUSygCgUCkUJbEURRCUoACikFKAABbjSyogqUF
AIpSLoOXj5sPL319nPjx6yZYacDq4+0x1JvBepfS253hbbxDQwnRN9q7plMXTjsxrm8j6D5xv9W8
fy9nJ5f2Pjevm+r5uHm4ejo83Z1xv5PQz0+e8/7DTZ8Tj7HHc8nrcWiX7vy/A93lvvu3fL5Hv+L7
HVqx1cDXpa9PZM8+3f5x7PzvT7WtfKfL/Y+J018ys7eEE3a8fbm9nn/V/M8t8vbux6493v8AifT5
79P5X0vFj0vofi/ezr38ps8vbXht1S5yQc+9qfJ8v2HzPq4+p7HyHr899mvs86ct3Tzb2MOni7ef
t6M8L5c5eF7nmdsdnv8A5z9L2np/D/ZfPHL0c27We3Zo28tbODs6DzNO7o9HPlx68jV1+f27zvjC
XVo6dqeVr9LRM8uj0uGuPDLrs0TDdLr7PO6Ds5+TKvoGndjpjzbtLfPljbju2cvV594VC8+7TWnR
s19senhfqOPXh8r6P5nK2ZXLn28tmrs8z0fRl06tzOWGemzJhE2TxOHT6qfNe3JvLmY5YUmjpzjb
lV64LI1a8eJFwzjfo29mO0t4sMc8dO87M/P9beeP0NP1nLp5W7l82dtmjt3b49XN6Pj27rjm6aG8
xr159F3830+zr3vXlqyxx58de+44s+jvTg1+lk6eVh7eg8vD1epPn+j3pwz4mfrYYvBn0azwPT6f
L9GPT4OPf1n3/l/D9ePR6XO4md3oePn08/6n5Pjyb6HjPpfP9i+RknqPPzrtcuRvarWxjQCKSAAg
EqkICyLAQoIoxWAAEFgLCpAJYJQlgAliFEFRYARRARRKgipACAAGr5/1/E83q9vsxy9HkF0gJUL5
ff5fi9eNs8fd5fr+d0a9mvHU69ujqzNczxzrTzbb0z1epwen6PNlK9fniwAAixQSLABLAABLAsIs
AFFUhLKSwATIY2wiiSiBEB6Pnejjfbz9HPNrLYC0pALKAWUBFC0FKsBQLLBRKAABRAKAoAsFBbLK
BZRNW4aOfuHm6vXp4uv3qeLj7uuPK2enzGrbNJ3ZeXrPavgj3ng5V7d8XbHqvO2nY0Zm1MlxmY5n
TTz9fp08jV7tPCx9+x4OXuQ8Tf6fOc3RjrOzLzNR7rwcT6B8/V+geHke08nZHpuLcb2GQKSgAKEq
1BkgAKFIVCyiAAXGmSCyjweP2fI8nomllx65bMdvTPF5vdwdswmo9zw/Sxr0duOvmx5s5NYYbcWr
ux3M4atmrLbejZp815W/m1nt+t+S9zlr2fR+bz83T6DR5Gcem83m1Or0PG5+mfptXkzU0eb53H15
/T6uP2eW/L2bOeXs6vN9/Gufl+n2r8r37+Ozf7nyHXL9Vh836EN05dvS8L2MNdfjfD/RfP6cfiXT
zdvO26h9x4XiM6+gw8v0Omc8SzHi6/Pyz9TV9Tw6tuOny+jbhszjDDZjLho3wmnoyr47H6T5X18P
pPQ+R9eToyuzlNHpeb6XH17EvDGXH14nje/4PP7ef6J8h1aefT5/Zq2dePZt5t/PW3LHLF2cHZzb
mjdw7O87NiJ0GFzssEqHN4vpZzHHp3azm6NOvWsN2GLv3dWHo8+eFsNGnbqu8dPVgw7dO7jqY5YQ
07NdcnVwZ98dnr+b9Xw7eZ5e/RnNNNmPH0auucO3Tu6a27cNjlebfjcz5r3/AJ7SbfS9Rr5+/QaY
ywyTGvK5SXfrzdKnCvXx6tMXbjIx27enl1xt15aeebumcN/mN57dG76DOsuD7r4Xl357NfTzbunj
9Jno8/v13py5d+gXpV5GztNcGXo8Enmc/XvufO6N3e1lq0bp05NPHr6+j0OLDpt7fT4+3j59uuXx
5oyJVsnldc8PZzPreTPE6ZuNazhNkMGUWzXhJu6ODr1jPD3895+dw+lh83n72s8TL19Jw3p1rjMo
kWCwZ5ah0ZcuR1Z8Ns77549K+QPXeQPWeVgew8mHrPJys9N52R3uLKzqc+RuYDOURZYBFgIFglIl
LBZKCWACWCUCAAgWJADWvjYc3r+T0ehT1+VKqLAJeHj2avl+9iuNZ+r46uLXtvWOrl24vTqmUnn9
nJ6O89Xbp3e7xJXXCURYSglgAIACBYAAQABauLIJUQEWVKBKRRAMcoQJAj0fO9HHTu5urmm5ZbFF
WCyhZVAAFgBZRQUUBZYoFACKAACwWUAUFlAKJaCyiSgolBYKAC2WCVUo1a+kcWr0ieRr9weFPeHg
4/Ql8DP3MDytnfqjVt1YnZt8jE9u+DifQTwYe/fAyPdvi7T1XDtOpp2qmWUcuHaXztHsE8TD3bXh
X3EeFfdHibPUwOTZlrOjb5WB7Twx7t8Kr7jwqnuXxdy+o4t0b2GYoRRFlCg8zN9TX5vn89bOLp0e
ftq6Obtxu47NfTPm6e3j6Tnmxub/AGfP7+Wsuesax2Zk5Ll0GjDoubzL4Tt9L3/M7by4+/zvXmev
2tPX4u7bjuhTc16urZXleN9V58nhcPv9Op8T5f3fg+nl4nX5+PbH2XD4/wBb59+EyZ16H1v5/wCp
z39dn430GNa+b0OLU+V1/ccms/HY/RXbxfT4dd17bwy+l8N9fwbx8ms7+YB3cPWvZhhzru4Nuu56
fo/k+vOvqvGyxzdHtefdT6fL5b1vP19Dg04Z16PTwenjXB4n03LXw3Tly+vz+v7Xy/s87u2+tz+X
vR5tW45S+Ls6fP8AZy+u7vkfW59fj72cXfh0b+bfjXTljnzrm3atzj1d2vu7NVR0mWueOcoavKX1
vnezlZ5tkumjqtmebLLrdct3P6ec5VjNacdmF3smTmzWcpMc8bdOvPXqYdnz/r9c9n0/z/V5+3Jo
mVxFI5d+rrde3Xs3nft0b9c8cMtaef42XX0ezuyw47z4enzK0e5wdtxnk2QI1q168ZcMdWZ1t146
RMax4d2jpnZpz5dz2ebm6Jrr9bTt4dOLnw19cY5OnXDn9Pk2c/Ru3c7fLoz8vWz2+h5/fq5Rz3U8
jLll7unh6860devdbzeTNXb1M8PWt27NnPx87sx4M47ueXDHu0MuqzDhcPJ1bPqeaS4+nlWKrcZZ
njlqNPHy7OW85pkb9aHt/TfC/aenzbh0iAAggACWE1boc+vti+fh6Y8nH2EeQ9YeQ9cePs9TA4M+
nUsyw1pvvHiduPEOqcw3Ya8izPYc96sk456Vrg1+oPOehDgndknDegum54pngwOnHnV1ziyOxwbE
63nbl65525Ot52063DkvY4qnW5LZ1YaNedaNTj83q3MfeuPJ6OzR0zM9GuTv2eXqX2tPlCMb4fSk
hldeqzV1NhzTr5tMpq0rs9TyfT1z9TKX6PhliyyxUsQCVACLAACUEogIsFVVSAEoQCKslAEsFQAk
yiSUPR870cb7ubp5p0UsAWUAFAFiWhFFWClAWiKlBQAoihKBSFIBQFABQJVlKAAAoFIolAClliwU
CUWBQFJQKJQUlWUKJr2jmx6x52v1R42v3qfPz6FHz+z3R4L3sDx9nfiaNuOle7LzCerl4uB7zwYv
vvAJ718HI9y+Lkew8mr6Wnm2F1dGUcGv1sq8bL2co8V7drwsvbxjytvZoObyMufy9t+7g1c+nfhx
6830OvzLp6mXmZ3OGnLftw59GjT0uLRq59Ozp1Yrt69OyYy156ovTw/W8uvleJ+g/nGb63k+jw9e
Xl/Z/A/quLoTn8/Tsz4MT0p5/lanpcU9Do4927C3Ts492HbxZ+7mfE8n3XmdufFn8l+p3XwGP6F8
Flza8s7n1Pb+L7ue/t/Gy3c9cW2+j1z5nn9u228W3Y3xu/svP5rwfuvn94+b0fVeR15+WOmO7o9n
zePbg4fY87rz0pbNkm47vU8TO59/z93s8+nlef06t54ujZD0vd+M6eXT6rV4/Tx3o+S+5+R6507+
PZ1x9R6Pyf0/g77pXm3MsRh899L5vfHV9X+f+3rWz5T3/C6Y2b+bok689ezlq79Aw5e3i7a3Za9m
878uXovO6/F3Ju4vW87N4GeHWps3SbNPq6Oc8nq4+npr2d/H14mPP0RZybi269udZ3nvKbZlgaNO
7h6T0c/mvb0+s2+VwcOsywyvNLpXHDLDu2bMOLXLz8/X8zpnPmvFZn6nH7GdSMM2+flnXTlszTC7
9scWHpc015+vr0Lo6+nq5vOelMXzsPS8/Tj3dWGpy+hx79NP1Pk/QcOvl+P9F5K+fr9HX18/Jv3e
vXzHd6vmxrx6eaZ04dnZc+bt7+C66PL811dvp+d7GdR6DGvNx6cnX599FxdN+X6HHLv0dWzfz482
3dZOS6ei2b7njOvn28/OeZ6Pk+p9DzpHTNxuIsG3Ga5eXz/a4OeuPfuxTXjrzS/afKfZ+nz5w65B
EAQKIBKIBKIKSkixQBESiLABKIBjRrw3jjnbDzsfTS+W9QeXfTh5+XXia8teNdd8+x3vLzT0Z5uy
u5y5m9hnZAgUgMchgyRrx3Q1aeyTfzPR62nz9/Kz7upMOrxNWsfQTk7O/LT5/rTn08DZ6vleX0ZZ
+dny16WvSxccdkMLlnLryU4tfqc3Wc2++r05bNtnv8QlAiUQCURYQolEAlCWAAFuNUABLACKIoSo
iykoiwIEyxR6HnejjXfz9HNOlLQAAFEKKABUoKLKoFBUsUCygpKACUAAAUAFSgsoFspZUsWWAVBQ
CgkWwtSgCygBQKABCiikoUBRFEULEVKLjVoBSKACiLAAUxx2DTj0U48PRR5eHrDyMfZHiz3ejOvm
3udcvy767hPnfN+s+YxeTV63L5+3LOzfXlu3fm8Ofo56nma/VxPJvrWzwuH6Dyrrg6fSymvLy6ug
493bnMcG/m7ufe4beXHo9ryeDdvx9Hkdnjbx1/aeR73l6+h0c/QujT16cXztXZ41fVcnjev6JzvS
0Nc/RNSaNvJv4PS0Z3F+a93f8v15+r1+D9k3858v9z8LrGent0XPodvzfbjfv93henqc/O1Om+Zb
10dPF0XzdOOjaujm2b+Xf53s9LLeOj5H6D5XpjZ08mfo58elNZM+w4+zRlXX7Hi55fb/AC95ee51
c2/pnZrdxt3eVca6/J+jy5dPgLs0ejlv9nwssX7Lp/P/AG/L0+lng7OWvc8rIvlfa/Ou+fu/z72f
Jzrh3cvRvn27dG7jvOSw5d+vrrV0Y8/Tnu7Ney51bmlNPk7MWeXo697Xhb7s1n0Orj7+euDzff8A
Hrt7PP35u7LXdXThsjS4prHK5Yxsw2a8XXxdnkdcdXofKZds/sfzfyfoeXtu0Yt5zz5893c5bqbe
TR6euefD6eyOTj9iGraxl18Pp89nzf1WHTqKubp0dKXz+f0tE3xbO3PN8/b33nPPd2k5Ncy6S455
16+vyfd5dPsvn+HzMXq08mnpjo6PO9PfLuyw511ed73I58fJv5W/S38HXjXr+F7OWnwfpfW6+k8n
r3Z5vlbPaL8t3+h469bn4L0vrcO5rqw09XHnrz5fnuk+i4PDd+fXq0tY6s+Kxv7/ACOrc9LmuW85
7OCnc5tsu268o4u7n86X1fN69Obx3LZrPve/xdnr8llaqAliACACURYAQWAoAgAgiBKAEWAUggEW
CWFlgAlECArXsHPq7ZL5+v1IeZfTR5mzu1mrPDBeq8GNnovOHoODYnXOfbZnFpKSaOiZ34er6HTx
7c/d4vMz9Hwzu1PM0exPJ3+cz97g1bfL3cN9fb5mvpPU5fK29MdmPZv68vP9HO9uKHTCURYhYRYA
SoFhKAhYpAJRLKFKlgBAACkAEIUBJRAkmUHoed6ONd/P08060WAKRLFKQspYqxSRSgUFsLRCygos
osoLALKWSglEoAAKAossopUspRiqyUFgsUCBVAXGlKRQAIWygoSy1KAAVKUABUSqRSygqkAAqFQW
URQBQUsQCt2dbM3h+D1cvR8v0759Pd5DGvovEy02XHXndcvZpy019erdlvaeXb0sWCbue81unHfq
nTJr6ZW7o0zm07pnf0noeD6XHfyXb5Pp9Oert1+3yuzy/QzxrRt3WOffp4I9yeLt6Z7/ACsvNN2/
zfqumt+nj36aePo6+c83X6mrFdePRGN5vPTH5f8AQfm+me/1uDjnTz/nf0LytY+b0deGsbPR8P0M
a7d2nXd92rzux0u2ab5ruxxcstfXzY75cnXr08fyMNPr4bdRqFhd/OPVz8fM7tmHRVwqz0/oPlJ5
+vr+N3+oeU4efpnq7/C3WXyfV8tcdmpZfrvlPoeGvb17HzvTy6fQup8vs9PD18+vzcuO3zPU8X0O
nL1Ojk6PN1yuvSbpz7eu9mOduNu3l6rhz9HPJw8e71bnk9HR01o4PW445ezl6Yzuvp1rRh1Q147M
c65bjtdGyZ8c4zKRNW3nrDzO/wAzvz5Oiu2evfezzb5MunCXHXno3dc87X25fR9uvbz1JcRVJhnr
Nmu43E26s024bMJvDT0YS4a9+E67Ml4BIx4d/N0m3l39NnH6fL6E13fVeJ5PHe7mvDJlu831fTz8
7s6t2s+Zr9jmPC5PU0Ofnb8evTRsy9DLzu3o5s69HX5nQvrdPg3d9LHz8F7ni4t/Qc/k+kZt+zlP
Lz7/ADcvm9DZ7uGNuS62VMLcaxyxM9XRx7OmV05ww6MZcd/Pqj0+bi6F09G/lToz5Nms+n9B8l39
+H0rj3bzuirBYgARRFglEFJYFEWQllAJYgCUQACWAlggsAACACJRFgFRRAAJYJRALBNe1Lz6u2S8
GPojznoU8/PrwNOyajp1aNed8/P0aPN37e357DfP6nk5eLpjb07u7N+ZfS2vJ9SuvFLN5LBKIEIA
EsCwAASiEKARVBKBYilxWWJQIWUQolRFiscpYlhKpiyhj6HB6GNd/P0806LCUKKQCgAUAFgsosos
qhCyihQqyosossqUAARZYUSgBUsqygospRKWCWWACgCxLQAUCgKRRKACyiyyikoKoEoIBbjVqAsG
WNKgoAIoUIoigC2IoL6vld3n76vjvtPifJ18vs4ey5yWctzo5+mpcdO3Rx9OPSY7pI1cmzV3nqef
s0y9OevGa26NVqdvD6MR0epjPBfc8Hy/U4r2ae3z9uT2+TX04b+V4PJ9rzt57+Pi9bd87f7Xmatu
vbbq8z6PzWdnd5XpW5YbOQno83PmdXF3eZM9enHrxrd5mznTHt0dp43zX3Hkbz9jz/JdDfkT9D8e
T5HTh1bxt9b57vxrsyz6cdMefmz6XLdOW8OmJebw9nidppHXKzJWNgCArPAer0eFsX08NHXZq7eS
8unNnjp3no38WVdWPTyXPLo68peH0ObKPtsvInz/AEetw8Hcnncf1Xldc6/qvgOnd9ny+Lsk3b+b
fi9GrLPNvN1bbeHbo2dd9VS8dvm+nqudPT4nqpuyltiyNGvr1pM8cqTLGWYZyXj265Om7ZzuU6Jo
35Y8/Rorm49+Ho58OfXjp3dXL0+beV50u7Tlw9XJ7fN3dMZZYZSJYWyk1bVmhtM4bcNhlLhN69LZ
Naejn3TpneXo45z15cUat+OnpObf3atN30/N6fDrv+WyxxPP1e55npxq9Hn7lzzx1XGvxO/ZOXm7
ezRHFljjrd9Lysl9rg0bcOrfr9O45e7Txb308TbLPnvoPmuk4d2Ovty9Tt+e7c32fF9by8a4tmef
bPM6dMuFJYSxjnEZ9mjU592jrjXqwIEAXq5FderDqrXv0TWPo/S+T3deX0M4+rec8cumuXZ06i58
uuzucKXunDnZ1zVtFCACkokpIACKICLABLLAIACKIACAAASxApLBYLCIspFSFWAJSAgQQsADHHZF
mOaXh370QbxFEAlEABAgAgACpSQAEsBYAFLABACUJZQQACkpLAIBZMsRYg9Hz/Qxvu0dHPNyiKLL
KJQUACksoAAqhKqyixFsoAoUAsopKhZRFEUQWFKELKKBQpLQJRBYEVKoFgCigoAKQAAAsqrKUFgo
JYAAFsRSrFEsoyxyAAgKFEsgKAtiMvU8ro49tvle1wfN9f57l7/g9uXY0b+bDOF2WbJdeG698tWz
GzztW7T6Js3ce2azjdNYYY4Wehl0auTd9P8AAejy7foP5x6ics8L6+LszmzjrXls4LMOC5dc4ex5
3rb1h7Pl9mp4PT1/Lu3td/ibZjV6Oupnz9W23zt7ONOjo6pPFx2zE6MObputfP17jzdl2SeZ6uHN
ce3Pzz9La+c8/wC7+Lk4ea9O87PZ+X7OevpeTT3cN+T0erwddaOHpw7c/m+Pp5vV5QsuSLiEABQB
SdXKPf0eSl7eHd1azzbsdDXZv8/0U5+HZojLp5LXqex5HZl6Hdo3fM9WXga/J9HOb7y+nn0d3i+z
m7N2ndw3v2atmNZ56bLrxyd9dSZXlsSpy8PsKwylsJjGUYVcscohFWWPI3cW9jp3ZTj1ZGLhp269
OTLn9DtjZ9v+f/RcO3DxdXn3OV2Ybzzc+ns7Y7s8Mo2ZYZE2Y5CAmUJVSZY4GzRt55vDG5zpq2b3
LOvZlhlr4stu5j5Pbp64y677/Pp19fDwefbXNWmWMy73f0reV17NEm3Tz6Zngw38W8tHVw7de7r3
ZvF6vD35x6uOXPevJXNy6XTi3w3cHpce+nzuHRo9HGZTI9DXhu1OF1c8uO7HbZno6dtnlz1uaXTz
9nHL05ceVnXqc8oSAAAAbujhtdWyZ9Mc3u/P4x9DNXJ1x6/Rqx3jq2+dkevfEh7mHlb7OrTu3WcL
uHn3vHJvmg63BDvcMO9xbTelqLEAIAEsAsASwAiwAAlCABIogEqpKIABAAAQASAhUgUsBKBEACAp
iWWBYkAlAhZYAAQpFigRSSilhKCKQAhUCygSpYCAllQJYAhSu/g78a7+fo0Z6Sy0ABQAKgoALKAq
hKIpLQUFiiygS0CwUpFgURRFgAUSgspUpUsqwUpJkMaApFhQLBkAqUUiiUAEpAVZSgAWBQiiKFlh
RQKUlAWAAJQKIUhSUGeFl7pz9ng9nnfI/c83l6/BdHHt78OqHPWeG/XWG3l6+8xY43PDu2YdNcXT
hvupw5dVxzetnhy3hu+n9Lz9vzbt0dW8avRvo5xenblx3p8j2fE1nDu5Meudmm8q9HJ7HpteT7HP
gvp/KfX/ADu9eTnMtdt3d5e+T1ujyvSzjPLdm56PP6pGMyyXz+Lsyxnn5+nfaz45N7NOzbJhns08
8ef9h8b9f0vn/HfbfJs8/N6XNrO70/nuznr6LLg3effb833/ACfs5cMPZwWZjDLEBAAUUsywAAAP
Q9D5/dZ6bm32efh7Gk8qd+iXX7Xhd6/UbvO9L5fp4fJ6/Z2+V8r6Xl9fHx/X6c865N/L1c7u26dv
PV17tmnn7Nvj9efu7ebeuxBnIohJz7+Y6pjayEIKwvDHBls1Xl7ck4985ljz1q0bdG5p6+fu6T1P
d+Q9fh18fTuw1z4+P0/J78uHPd6nXOPTyTGvR2eTuX1HnQ9Gebsj0cePM6ctG41sMoz59/PN3djO
dzHNNG3g2ZbeLeeXhyy9XLp+j8Dp4dPb0ed1ct7tXbz7s9Hm6mcrdMmejhqdXBv5rjlz5ejUuvZ2
TXpeh8V9lb5/o3GXLjz82zdxbtGZlt58p0y19GWd+T5P0HL24+PejR2xMsLZYxFg7erz/S6Yk2Su
fV2YS+bNmrGsRIAAAAAA26ld/Pr3V3/QfM+r24e1n5Ozpj0tHHsWt+6vL1+zrTzMO2S8V6xj1aNV
nozzMz0HnZ2drVtMdW8cM7hwzvHn7OzUNnNpO+cI7px5WdV0bDJYJZYlCAAWCURQlEAlgFkWCURY
AIAJFLAkBKJFgKsllAgEAAgJQlgCRYJQlEWCUQACUFLAFgAikoMcpEWUpAKiJYlWUQEWBQWQ7/P9
DOu/R0c+egtRRFABQAURSgFEoAKsALBSiygsoCyiwUqxSASUSgAKAKCygDLHKUBKIoAKBQWBVALA
ogEoKJQUAKWJbAsWLLFIUWpQolAIWUAFCUAFJYFgu/Rcb6+fpy+X7vnPj/0L5qTh2cXbrGHRz7Zd
ucy6zXh182pj5vrcPRhsw9Zeffrz53LDHr4/Q3+h4Xl3y9ndeq8b6GW3jvWcyZ+L6fm9cbc+bn3d
2O7RHvdXi1r3MPHV7WzwMTZ432Hna6eZNu5vv269+eXjYZ7czR2cW6z19HnWsezfyZzq6eXqYnLn
yXM69fLn07fT87bwufyf3HDrnfe8Ty+l6Pm+zyO3L0MNTpjPS59TVzZYagJc5FxCAAoDKZExsAQF
WZCjUlF388T0Onxbc+tr5h6fr+Pn4+/pbNfV5+nk+D9hq78+j675Hv59vN8nPV05b93PuzdmUyy8
7j7Ob0+f0N/Jvm+xzbWtt57ZuaKY69uJh1a6uc08qdXPhwM9m3zNierz3fnc3a9/HcxzwzrRz7+b
rnk9fXnb9h42/j4dODRuvTlzcnbo7Y25eXlrHp7ubKXp5dlXnnXgaO/hlvfq5bFw37jzs+7VHmdX
D7Ws+bu69fLps2cWrN36uTPc3+X1aeuOH1OT2jd1Y6/J1x5+rr7X57o+i5dY0dfDxSe7n8xkn0nD
wc+THZo3jZrxzvXPXhtY4ej2/O3O+4ceNdWXmo7NnPux039DRyu/zZs1MMcte89/Lr6tuDn+2+d6
Z8PR6unpz86+l0Wc/Xk1MGatfP0+XGjExoEAAAAABQS56y9303hfUejz69cnXGWrLKOXLtwXldBO
bZu2K3c+iujVjEzmsdOXKWaeqSck79xw78ua3vy8XWe9PG6Ln0pybDfJlYxo1auocLtHC7hybc9R
uvHDtcUO6cmw3sNlkAlCAlECQUAIWURKRYWAlECQpAJYAQpJlLJUAIoSwAQQlVLCywEQCALAsALF
WBAUAlAiS7U1ZdHznL0exr8GZvvvn5L9G+cys+hvg79Y9dwdO8bRqAglT0fP9DGu/R0c+eqlgpFE
oCgKAoRQAKSqBFlEoLKLKLLALUoKAUAApJRFEoLKCkoFktKLBZQlBRKAC42LYW2UAAigUigAUlCi
FFSiBBRZVAAWCoKlLYhZQUSllAAsBTo7PM7fH6MfnPqtPi9H55lx7uvPfjnoze6XLNiY+nGOzh7j
K5ytXs+Z6vH0ev8AHfa/DTn6GN2amz3J387z/L/TeBM9vgdU3GHX7+tfH9mfedPXzc9vo/MfVeDc
eH23ZGGvq0Tee7LlZ6Nvj9NdHoeYMuvHem3SWZa8hjo6sM3i0O7lvi19nm6x2bubq1z14zZj19+X
L1eVwfG/oXz/AKOPyN37vfy4+rRhZ06c9ac2vPOzSUyxgAAAFMpcSBAAW2VVhRAlZstOjn2YL6mn
Qxr6jr07vm93zn0Xn9M+R+g+D1716/wPs+FZ1bufez0ac5i46OzHtjj0atXTl19Pl9Zt28vTN+jp
0dE1jgkuWerImjZwXGeWu3GGzdtm9uqdOOuGHRMTLC6s6x8/0OPrjR7nL0NfWfK+54HHfPhsy6c+
S9OPSeZq6NfTz78uTe16WejLHRcNpnjhsMrr56y1Zak16PT2nh+xwdkdd4cePTv4tOvTqxx5tZ2Z
d3S108X3/wAty34Onrz3nz9fpd23k5eh4Fx6mPET0OHnwXrw0kz87s5+mcMtXqHs4b+fhuycl3lu
nqYnJl1a+W+PV2eXvOjV2Ydc4Y8vt6nlet5n1edeP7nJs5du7y/J9DedGnv1+rzcGe6bxonRhWqZ
81Txt/NkEAAAAAAAoI3avQ1Pc9jXs9fmCmOQ49Xow896COO7ecjGl2a8jqcmJt5tlOW+jsPI0+6P
E6O3ni7eXQvq83lU9HXz9ict9TCuazFM93NrPRy83dXW41nW5txmUlixo3pePHuHDeynBs69Zhs0
6TtcMO5xQ7nNtszKQWJRFgAlEBAiwRRCACVUCSoWWAEAAlEAAlGNESwSgARQQAFsABYipDP2Z8/5
vV5PFi4egEACARYNvRxN59fo8C9Of0l+d6enP2fR+d9iz2ufo0XULZKApLKAKShQApKAQoCgBYUA
FBRKoLBQUCkBQEUQoAWFABUS2yigKRYVKAFARUqrKAFhUFAAoFQBYAABQqFAAClEULKURQtlEoCx
FEWF5unyePT2N/P1fN93x3i+743Tj08nVqjq28PWue3m3+jnqyyxmmWft46eVpnTy93kepzdHX5+
f0+jvxbjlqudWrT52Ll2/nn6v2eXuy714tMxNHR6WyzxuadGN+fw9HUnJr2WY5sO70s9PB7ObZvH
bly9Gs7ssMyTNcrr2KEvJw+vljXmb+tXm6/X4ZnlbtOddPofPdXHr6/jevxTPzrVw/V83Zz8Ms6N
eFjTsmUTTs2nLd+ixKIABljkMbAEAWVaVpAksRZkk6ufdXP083o5fTXy/Z8nazVu8vWcnXjXyP0G
Pd6uf0/zvo+Bx6+du593XluzxmLo2Xduc3F03pOTLujPFu3YTXXp1bmphjsYyy5+lrk2ZczruurY
SZSa6d2rby5WWZk0bdOrPI9bDrjn6d8Xdr26sLFyw4Onn6dOrHkm5uy1Rnv0btTHTNG8uGrIxvnT
XLLVcrm+5w78dcujRs49GF0S6MpNzVjyej2xwdOz2cuTdhxc9b3nd3Sej3eL010+f6EzPJ836VHy
fX3edrp069vmuO7T3ejZ16eic+/BO7VdeZn7G3GPMx9fTy151y4ekuWWned2WXBp3anrS7fpPnfG
5dO3n0eX35fZc/yHRp62OTpnZu8/p1nZo5dmsaOTs5NTRz9vPGqVjUCAAAAAADOt3V9F6Pfjo3nX
mlVCkIFiJSpYTm6oefe8nBn2a5efBimTHJd27mxs7HEO1ybxzdkPOx9MeTPXxl8vo6OSXZhya5e9
wb068+DGvQnFts6Jhtsuy43Oxzl33mpvacrNgEADTp6xwzvRw499Xi27tJsz48E75wjucOddc15p
ZVQIlEllFhKiAAJQgJYWAAIFhKJAIpAAJRKCC2UQpAAj1OH3ePT5PxOn3fL9D5N7njpqZyTFliAB
IAAAA9fyPXX7DRv0e3yxVgqyykogqhAUKBApLKCgAFABQAWxCirKAW40oiyiKWUIqyLCygAAUWJb
ZSygACiAUolCgKAIolQqCpSpSpYAAUVZQAoiwWUAApYAtxq2xFQVKAQ4I6/E9Pg4dfZ6/P8AR8Ht
8j4b9A+P1yx38XanB63ldm3Rlw3vz7Nfn9XPt9L9N+Za+evY87Dbrnn393u514Wz6O3PzeHvaca+
e8X7vyrPzf8ASviu/wBGPvOv5/pxv2HN07w2aicnie3yc+3Ht3bpfmMfW45znH9LGvG5vb8xPI6/
nvL78f0bs/Mfvc69Llz18nTsb93m4OrTzzz+nw991XNF6sc7b8n890+Z34huZSAEAZ4ZL3bOS50x
y6Y6tHN3y+Ph7nm6nLMprMBUAIABbK1UKliQqWZDo5enmNvveX2Ydfr8/f5+3Js5sOHTsky53xPJ
+i3+vl7Xq/N9PDv8rno29+G3PVsxrLJsjX5nq8Wt6XRN70bc1tyw2uWMbpnCZa61tldMccuqNG3d
lyxow6NKZ5a8ow15Y1q8f1+nrnl+n8P1+XTq+f8AV8WLgusXDct4Z3cfTph1cvYzdczc9eWUuscb
pnTp1c9Tdlo3p0XY541bsLm48exqZY6/M3n1ufb0tOjo0ZnLwZbt8un2dPQ6MN0087n9zUnleX6H
lyYb+vWc3m+/5HSYe15/VL6G/wAVz6et0eZ0cdeg5dnCvOx5u+cehz9Ju4ujo1nLb1exw69vid/h
m3m6cenLn8D6nz9a+fx6+bvjv7fM075+xn5EN+e3r3lw7cprgvZwRw5tcZ4bIa1iAAAAAX3vN+07
ctpe/KCggKiwAiwSiAikgEoYZDk1egPPvdDlbuciQy24U6XGOxw52dcxzNejqHJh22XyZ62qXzdP
ppfEvt4xwdeWhe3d42qz28PJ2Wd817rnXelWrK6zdOYdLltnS0bDOUQWJYXXsRza+2TXC7hwZduK
aNk1nQ5YdN5VnQ59psi2RYgUlgABFgAlEAliCFlCWKlIQWKsWAJZYpfUy7/ifruDx+70vMfI56ac
CQAAAIACggEev5Hrn2Gjo5/b5iqlABSBSUJQFJQRQBQFAACgUihYWiKlAFCiKVYCgAiiLAssUUIo
BRZRSClgCgAClJQKJbIjKVJlCKFlFlgsFUSxVABYKgAoAAKmULKVKpr5zteVzR70+Z1J9X4PB5/L
p7WPkdPm9Ho+n4f0vHr4/wAv+mfLZz8v0avX7c/J7N0xrLdtlrp6+LXTPz+H3Wb6XL7HB1ZW6mWr
VzZuzXr4ed9Tb83ek0fOe1856eX2XZ38eOu7f5HuG/LHu1nlw7+ezzOLDlxvbza+dn3ubyfSueXy
/e8pn5vD1/e3Ph/q+7283wPWrg3adM3pjllZ06teOWHN28VdvV4uyvJ+Y+y+S741DpgA3+zjXz+3
7r0fN1+A3/d+VLnq7ua65eH7D5fWfm/Q6d3THm9eV1PO8/6TVZ81O/g1AsABFlWlaiwREZTJMezl
31zbdfrZY7ct3CeprzvL17M5OTDbhcXX8v8AWeX2xp/RPkPSm9fzXd5u8b89eSdMxmbmxzLNljl0
ehp6XVM8tSaMtkzt13BV35S4bV5rJjm3GyzHHLTpGGdcvk+x09ca/rPmPpePbi8b1/MmdebHWdmG
SLp2Xbi3547mO3TlKzvNprdGE6c97s8uDvM5zlxxJqy5tSbpp0y6/n/U3PV+2+N9nh08ziz87rzv
oeD6O+Xp9Hndzpuzga9eR4d1/SzOvPY1rVNnmZurHfg3xTo648LZ72rlPL49uHXOG3Rq3N3t/ObD
Z9N5P1PDpsw8XHN1a93f246dPr6rqbdUqeZ6+Nnz/hfbeDueTjsaXdxrn29vjbdZz5u2xyz1NK+N
PQ0nPhvpyujQkCAAMsfW1Pc9Xy8/V5vRaN9ShFEFJRFgAlhAiWACUJZQEWIlEqLr5u2HDPQRw7d/
OY4yGcm0y3c+s7Jxyztc3QXXsHI64c16MYNWA596Xh1+xsa8S+zqTj36NEvq5/Oj3tflb7Otz7TO
5bbnVnddbnON7nldLTsSrLIUkEAa9iXROgvK6Sc16MDHPXgdE5KdTg5c79ri8PTx7fRdPyexn6qe
R2duPW07t5SrIAAhABtNenf4/L0d2zl1Mem8bps9Bp3bxu9j4zb5vT9Tq9H4jj6OTh6OaAAAAgAA
AKAev5Hrx9jo36Pb5QFCUABQBZQAsACgUmUFlAApKAKKSkLBbBSgsCqKRRFCURYCkAFLEWyigpFK
sUQFQVKAUFLKKRRiyiRVQFssJQsoCgKEUS6dSdV83lPcfO6V+ofKU+l0+Luju5dvQeXz/R7z5afW
j5jo+gR5G/0S83RRw+J6vnefs2XTw7X6f5r6nl03fI/U/G854nred6PbGPL27Oe+zDv8/H0ePo4O
31fLu/n9nz63b/PvDfp7fJ211a9eZ5fqeb6Xpdvl547nl/C/qX5V14fb8/y/0M39FzO/nvjfTbNZ
+U1fWcGb8x718RN3V5PuRv5fTlz5OHbq5Tye/Ttw7NvJ0VzY6ePc9XV5+m3s5dmacurr49Z3Ofvj
y30Xo3Xzvo3rmvmvl/078/7Y6Po8c89tXs+Ht8mvXYbPPqVgfO7/AGvB9fL6v84vnejPvZeL77PP
r3ac65+vdydM+l589LU+b1/TeZXj49erU0rLm2ValVLiIrNlp08nVznb39HR5+nJ6Gvn5+h28XTO
vVeXPjw3pc15fp82p8r974/f3n1X579R8rz1Lrz6cstmrZLnuwyzdjC5ZSC687WGQVJGcwxrbNMN
00Y10Tmxs6dOrVpt6fN9E4eT0d2836HxPtOHf5Hn7fPvPOY2y568jLGCrDCbMqxyo17JlFMc3LFK
uDVWOGO/Ucuzg3n1cOL1V9DDPTw1nxYdvS+F1c23r5fUz0ehnrs58sLdfRs0nF7vleolxca3Ts15
6ac+jPz3GXXlPJyx7Zmbyt59Hi9jm0x9v1dHn68Xmej5HXLm9XPrz4On1dWpw9vHy16+Xi09vm8f
yT35wfS4vkcP1mnOvi9f1XjdXmbWveOnLj3szaXPds8X1ZdmPq3hvyOT0eXrnycPd8/U4meNkodP
2/l+36fMwzdM8fP6hfLnqDg6byp3PMHpvL3nbMcyCosRLACKIogqLCUSAsQLAQKICa9o48O6r523
r1RNMgMib7gbnJLOycuSdCWtOHSl5cewc+2azfjzwunftl8/R7aXw8/T0RxusujY2Vlt186dmrjH
VlyZJ0GVY7sMLne04rvaLc7phayUSWWJSJRh4/tsdflNX1/Nw7fMPXnPflZelhLw5dGkyvPjL2Zc
Ns37OOHqdnz7ePrHy/sduHoen4uneOrP5v6Hn22I78Jy9bN8f6Pzdvl9Onx+nzOff1PNwkzYKAyy
+g6cvncfpvM1PMb8OXXWyksAAAA9fyPXj7HRv0e3ylEoAFAACgKAFEKACgACyhKLC0QoALKAKFEt
soKRQBFgmUIolAAC2UqWKlUBYLKAAKCpYoWgAS4la+ZO55Og92fOaj6ifK7U+j0+J0L18+3oPL0/
Q7z5efVD5jd9CPG6u8ujdlYsUiiUAWgFJSAOfwvpfK49OLdqy8fq1/UfK/U8934z7T8+Y1ej5/qa
YfafKTn6Prvzn3efr59mjZ87rPqdXg78voN3h9XDfr7PO6uG+rdx+Zqd/Rz4ep25+T7G9X8++8+L
3w6PovkPv+euJ7fnxs7/AAerWvb8/gajg9fKPM9bz+Fn3efj0zO7Vt3ni7PR4MTmy7+Czf0+T0ad
nn9G/E8vZv31o6taNuGnGTfsjnfZvD19Omr4P7X4Xvj7TR17sdcMePga6Ghmb+Dt3p53X3dnG/J+
H+ifP9+fy/reZv8ARj2dXqeHw3t7uXDN7PJ9rqPJ9Hg87tn0vB97q3n5jX39G8+I9bls5JYsWEyx
qOzk6F5/S4/q+fXmz7uXz+vnmVanX1OfHHRvmMarjkZYIeDl1b/Tz+087k5vP38HPTn34bMtGR0b
eTLN67xo7Jx6q9DHzMNT1cfKwr2MPIx09bT5sr0MONqdGOiVuxxyrCbso1e35fqcrwZbd6b/AKLx
frOHf875u7yPVw69nAr1s/GuXuXx8s32J5u3LtvNnl0Xnyl25c+Rux1w2zXjWXOx03bMMY5+3ztn
TG76v530+PXLy+nbL5XRonTt28jfeOxvxvPPp5NxOjTos7N/n1N/n6+aZ9ji9LHn1mWGHn28s7Zl
ed0zl6Hznu9cZfR9Hp+P0YeHy7dvO7L39ueWvPXrPn6/Rxrzsunn26ZxSPP6Pd2yfP8AH9fpl+c6
Nnmno+dMddMtmPZXk8/uHPw9/ZWeXT9P52XmfRfOaj3/ABNepn0uvh+nm/luP6bu0+K2eny9+Pv7
93b6fPwdc47PQeXV9Oc/QgADRvHn6vUHlX1MDm6efnPRnmD075++zoAlgAlVFEliAShFgAlgAlEA
A17BxYd44M+sastOqNmMqss9pjnz67Ou8dTrmnbTTvHLemS82e7ErQs3aYjVj17GuDZ14JhdWo24
SZ1Ls3pxu2VocHj8u31OXn7NY36vN4efX6bHyPU3y3Z6XTnvaLW1zxOqc+3UyozKgFY83Wzv5/h+
s83z+nxG3Vw6BDt4vR6c9vnfT6uufmfofL7cX09U5e/Kt2/WPF5fR7fN6/nsE5bQAB03PV62Oft8
JG41bUvBz+ux0+ew+y+p8/b8hn6P8rz9HgtuEziB6/kevH2XP0c/t8qhUoBKCykUAVKAUApCgBRF
hSxFKAAoACgFApKygtlBVCIqySiLACxQUlCiVZQCkKQycvOejfF5I+lfLYn1mPymZ9Jo8npNuno6
l8bV9JtT5Hf9QPmd/vxfI6+2GnbkAALZYSqkoigC3GlEWwtgWUQosoEAXHKHkasdXg9e/wCm+d67
evwfqPnvN0+U9nT5HWfT8XDz5793X5Gjr5NuvVsZzrZm6r0Sax6dGuPX3eFsxr6rv+P7Oeva8rbo
scfo8HXPgfd/AfVbnrcvB2ctcXF9jzanz+/2fO1r0NXL1mzu5fPuPU1+BrT6Hi8zuWeZ9VmngY+5
xYmefnelznNzdksw7ebPW55u3JnZ0c+Ob6uvh1Hp9Hl9k14nx323xnq5ej9DzfVTfzXd7OOdeN07
U6STn5a7pnfHdfHvnTPjYez816+f33B7PNz38Ppyx6856XnXN9zRr38teDp+o4vRjh5PQ19+eru8
Tn1Pb8/k9DWfN0fSaV8LHv5k3bPa6uXXm79d4dcNG3U1j6OjozMscZwXCSmMmpccZXP5frcnbn7H
2fxPq8e3i+H3+L6ePTOW9J0NFNmKkb845Z3Zy+a9bOTyHsF8vb6GWXn59iOfLZlGnNnGEzq69mGZ
w9XL1dM9/v8Ah/U+bv8AF6OvX148WHbNTg0+rlXi4+1jXjZehNuDLfqGejCzu2ebZfVy8jLL1MOC
HX083Vi7OXp5o6cuT1Gurv3eHx1nt0bU5Obv4u++nbq2zBqzTdy79Fz1+Xpt5dXV5Pprw7p6OLuN
Hm73zJj2yycvTOjzen0O/Pi6vp9PPpxY9PDjXblyepp1XT4eufuPl/P3Ptrz781hZbpzy2HJj6eO
Ly7M/POzl5s5u+R6GzW/Ovdi55Z46F27m3LTo6ZynB5/0GrrjwtXRh6Oej09COXp1fXHVv8Ab8vc
0Q9nlAijk0eiPMy9HWmO3g1npzzOlOoWoAIQYc/WPNelDzs+7CNe7j0V6bzczuvJtNsyhBYCpYgC
URRARRBSWIAlCWCUc/P6CXgx9GHNv180vVp1jZcd5q3XTZvnKOtzbDZKs16umGi74as5qjonPF26
Nmw5MuyHm+B9b8p5vV158t5dcde2Y3h9B5f0Xp8ujT2Tv5+HPsZ385x+95fk9nNOrnzej3vmNnXj
9JeG+jz9zj2WdLnxXd4vJp49tms49Qkv0Hg/S+jz5cnreVnp6vLs8JfS0Y6c1juxPd8r2vjM71RY
iwAy+g8T6Ht5sjD1cMnPY3yZWXq0fec7nNnL4/oaPh+jyt9MdecvLRp7GXn+rr9jL29G/R7PMsoU
SgBQALKSgKALKMpvzrQ69BrW2Seh58qy2AAVKSgUShUooBLQUFsoEqykUYqskuk23k5T1Xh6Y+jf
LU+px+U2r7/LxbonP6nQfOz6nM+Vn1Y+b6vbL5nR10wzUigUlUKJQSiLAAAC2UCIsoUJRZDK42Mk
LQAKQAsoavko+y5eHvmvB1dGrx9+n6H5b1+09b4313Dp7HhfV+bw38v5X1vRXidHW3nRh185p0bu
W51cnTrs5sPLw3PWw3dOL5z1ebN5Oi86+rs8HRHH9n8X9lqfSeH18/l69WXi9nfF06MdXbq7dzXJ
34+heHPzdGrLZzaXH0dHX5/X357ufflZ4W7sueeu8PtHNz926Twe3s1Vwc/Ryp6mGe3HSZ48l6Y/
I/U/OduP1mzjz819fbw9nXefD1aDCc2TtLMsZrD0OTPj6c+Gvmfa5u728/Z+U9XLG/jr6HF15Zd3
lbs31M+bRz1q+e28/u4Y5urc58PoWN8Hq82S49Hne1mdGF38enD1aOjFbeeyb9E14ueOGNZzXrs2
Yc3H0no8nBOk29nn9Os5/c/I/RcOvT+f/a/KXPPu6M951Z55Zas7VwzmUSXEa9mNa7tWarnslxuc
jC5ZGubkab1azjzxannelu6NuHd9N6fHr8HdsvPW3YWarnDDHYrRhulYbcNVm/XlnHNp7ZXn6/Um
nka/Z5tzLr0b+OsdvPK9j2PC+44dr8P2efcbNmkmzl3Osnbw9U1q5+7ztbz9DzLOl17peGezfjMd
TDR595+bjekS69zs8jL1dzR6n0/B5+nk43Z1admfX05cnlZ/ObnTyer3J5XJ7eUvpbMsKixcd+Oe
bm87DLt1Y65rbho7m/Me1r554NmzRljjz7uiYOG59fX4u6NvJ0TeeTHr17k7fI9redv1Hxmzj6Pr
PY+Q+r5dPIbMPtfIiyxKIsASKHN0DzHpk8/pz5F7Xlj03J02ZKWBEsEokyhq5u4eXfSHBu6NRls4
tZ6E4qnY051mpYEiwLCLLIoiiAEAAIAAQw5+yHFO5LwdOjRnfZzTGb5/O7/J49fc6fmjP1GXyss+
uz+OyufsHyWzU+ow+axl9/k8W8+nZy3o4992dw49NOOXX059y+P7vH08vnY8Ovt8PEl6MuZjfqzy
/XmuPV6fBN7PW8LDpy+px8H0fT5ejx+zx+PbG4uW8rhRbkehq002ce/Qu7Lk23N02S7fX4s47tHk
F7eLd3HkvpuQ8RerWXueP7Hbzer5/wBJ4muvPjhzdfP29XldONfR/R+d6Hl76vivoPhdenHDLHXG
Y2IwzkZe3430OuXpaN+jtgqgDVhL0zVulmWqZ3uTLWIXWYoigUFlbdeUvq44dfn7+fp+j8nWN/if
SfPaxhN+nrkBZQBYKAUAUlXHSdDi5j13h64+hfMWPpdfzua+tp0dRxa/Y6V+V3/TVPker6NZ89u9
qHHt6BKVRLQFAFAUSgsRUososLbKBAVFEUJQAsosQURRiyViomUpUsVMF2OLhT3J4GR7HDq9CXyu
b6eJ+XelfWx08f7r85+7l4fL9fzvN0vteH0enj2/R/CdnD0/WfPZcHk7fU/Oe98bc8Hfh9HM+NfW
4eWtPX5ujvj1tHg9fXPmc30U3n53L2pXB7Xl/S5189t7Ojnrg0696ed1dG44u2a8a9Xs+f6M33/J
y2W7u7xtt6568tt5TLkycc8s7c6rs52vZy8Dfdexj5XqWTHNqa9W+M7MtOy3xfVu/m8vh9fy5vdl
eeNfznveF15/T7dmvl06OfDlZ7sPH65O3X7evevK3Zefm+s8/rzOzbzbPL33eN7Kvjf0f5Xy/Vj6
P4r9E48X5HT2c3TGXmObtnHLP0dTR9Bjt8vTHfp7OWsc9Viat10xNZv0YYxnjrxrPDDn1OnV5/L1
ndyanTKb+04OzsvO6sOnHF4/R8b2uk+1+M9n5jl01b+Hu6cslmLlhsEmymjPZoNk0ZV1aJhrPVfN
1TXs3zOnLpMMs2joGrdzro17tHTPl/Qa9ur6V+o8Dz9fFTDfLZcKZTl5NPVeZvs34ZxEz25ujHoh
zt2NamateG7E5ujBXP7fk7rfT9vVlx6+Do83q6cum6sstmWvIw7OfZWrT3bG/Px9StcPbXKZTVzs
58OOXSUx1OH2OfZqZ/Y+V6HDt5fPpvSXtxz1jD5v6TCz4v6D0dGp2Yecjv5O/PU+e6fX5U29HgbD
1OPLl576Jqt59vP1eTO/Z6PzHWz7GfFx+fXp8nmNzbz7J1zpwz5d5347+uzz+3cOHz/qPn64+vk9
jefOx+s5efTR9ZzZY15WOrr+p8/DradZ7Xkj1Xl9lnQFASxEogqSjRzegk8zL0cF17uTQek8zYdz
DOxFIBKqASiS00aO6RwzvHDt36DocOB6M87YdrVssLLAVLEAgCwAgBguac504cmrO5hj5nLt7GHJ
nE8n2PJ49cEmblITK4UyuNluee3O9DfJcE11uw0Z3Pf7nje37PH5fl/Tc2nzEyx8noyz3YY6Ya+j
VZhbjc9vufMfW9OXhvX8nl35uXr0amtZGLIziEAdfIXu59OxdYZ36Bb7vg5ox386gXLAMpknZ2ef
6WsbN3N627813493Xhj18+vPT0PU+ay49va+V17l03musdMmUs2at1z2el5/oeryejp3aM9aLMPD
9rx+HXmjHPbPo5dl19Lo4fd53zsuXTqfRaPC2setlwuvP08NXTc6L2w5d2r0JeLu2TGuL2vK6caz
1c8X2vnvY4Nc8OXr5OuI0N533zJHqPBzPcfO5y+y8bYdSbV8/L1dqfN9ft0+a7/XHi93aXk6clig
pCirKShFElWRRKAsJSrKCgBUFEURQBQtBQCwlEUShKAAlUQAvNxHrPnNR9No8TrrPR6XVHg7fbp5
vdtspr5TueHhXvvn8U9z5v0eqPzv2vJ9Lz9+jn9HnzZ6fFu9HDry8bX5voer4/Po5PoPD2ce/P2d
Gn0eW/Onu9XLfk9nfhx03eN5/We7r19vdx8/0voM/E9Xq9PLXfp08+NdfL2Y18py/ecm8/GY/S+J
c8+rr40b5jL27/I2S+hjq6c7mzOmhcdOffefWOy6O6518vby6b+3ixX1Z5XbrG3HdjrGG7m0F5eT
z+Ovd8bP0dvjvZ39W57evyvW53i0+9jZxO4eP2dSMdPRbfA37s8TXu4vSzvdv5dHDr2/Ke902deP
yOnvNnl9ev1Z83d1bpnH0PO2+fXpY6c+et+/jzxd90ZS7pqxM8McNTPDRxbno8fBr6zo53Tuc2z0
d+XF07XOzXuyiJnm6tl2nmcXq59sev06/Y8/f86yrv59m3XtzrblquWxx9FaOTLLtq9GW68deHTj
LzcvZlb5voaLLbswkvdxdnLV5Ovhlz0bfH649f1/M78dfp/kPrPkuWtfOnbhdvPv3OFv0a3PR4vQ
c+bT1cy6uzi7Z16rz58M5sMyy6DTrx2dI0Z+yebu6vWx0uzZ87HiavX870cdu/y+iX0M+fdyu268
pduenPN6NvHvzd/Jhz1nxY7+uZneQ7fL3925fofQ8vz9PR+Rz9brz07stmk154y4zLyl5/My9zrj
X0dOacm/dzZvTq8jnPb5uQmOjp1Lw9Hma+mfRmeznrLy+vCzDp3WVeTql0ed6PVvPhX6ro535j3X
LL3eV3/M2bMGr0c+nqfS519E8Lm59vsvD4ev2+K8vU78fHvr6U0dXFy16/Hjvjgvrq8zp3aDoy8z
GX1HldVnUlEsosCwSiadw83X6yPL6ujmOl5lPScPVZnQhagCUgQQAatlOHV6SPMvpQ5N+Gk7Z50P
SefvrpS2IpANO7y89OHZfZ568pOLPXoy1dmXn+d73ic+mMsxoRAqATKJFDbqsuzdzSa6sMurOuDf
ssuHs+F9D6vG19k9PD5Pn+i8zzenDfw9vm78m30tOenHq6Ws8PZp0753Auct/Ml6NeW7PTlm/XrO
MsuYGQGeFIzwUEA6OffpWACssteUnb7nje9vn5/L6fua6fEevwelcd3N0fcp+Xafr/l8ejyfS871
cZmN56z4uzZHm7eft6cfV9Hz/Q9Xl9HR0c+etFPP9Gcunic/tcXD08HTjLbl6flyfR3z/Tzvyse3
o574Nfr82L53N7XJc5/V/G9+nds8Gd/P9t5Xz2rD1svk/Sm/T5ctW8zP06nzmXuvT5/Ez9hXBn2w
wuayygBZYC0WAFlAKUFlFAACwLCUAAAFgtlKlFlgVZQigACpSpSnkR2afmMZv67q+J6a+wnJ1smP
MnU8vkPf1fPZJ6XPeo8vH6Pavz3X6nMbd3mcx7jwsa9zl4+iMOb1uk8G+9ZfF6+/STbw8tnsXxYe
1zeTqT5ffj5nDt6E27+XTz9vt93fl4H0mjq83u0eP6OrPKfLfTfNb5fYdPz/AE+Lv3cXZ6mb4Xb6
nZp4HRu8jpr1u3y9u9ejjy7rNDonLDq1b+V3eT03U5pp0av1/B5nPc5+R9roa+Q8z7jybnxL6vz9
nRbnGzp8xm+70eF08t+1n5u7lejm37K4PL+i8r08+fLn2dp6njuprTllr1z1b+vtZ83d6Xn8m/0N
O7WsdHVK8Ld7HEztyt1bAxZYJZhwy7/J6+TGXo8fUb9mvXj0dHn/ADvm7x7fkunvnv78fteevzvZ
28W+erT06eO8t3Hmd9492ddF59cdePncm56XHzXpE6+s83r7sMMphlm5MenN05bcY157so052RZR
q8P6HzeuOz7T82+4x15flvq/lNZy6Offc5c+zVZs09XNvOe+bLObouy55rdy6cdsTDV14S4tla1b
9d5ay5OjRnWvfz8nbPt/S/KfWefs+T9Px9Y2tGXbjpxz02atdu50b/Pxjp6PJyr1tWzHG+jn3TNy
3py1hw7cNssc+Sz0sL0Tc+p7tHHfJ87ujPHr7efpz4eL2NXXPmdmjn09nb5HVz12XRli7pq0mzmw
6Nstk15ae/Tx9Ju+y872uHXycPP698urbp2dZYksxxyHH10wuU1E18Uuznw5JfSZbsPE2ezwZvH5
fP0+nnn077y1hnr2w5+2Ls1+j7vPXyXoe+zvR4G/n3Xdyasc+7kxh43Pox93n68uL6OOrKYY7/Qe
Lu4/Z5PU5NHX34aN/VDZl5mB62Pn9pz6fTieXu26q6dnmbTucXWXk6y+VPWiebv6dRsz87E9OeXv
s7ZMlgQCVADVyehF816UOfo5eePTnm52d7DMIoEiwAiwCooiyMdHSPNx9Ox5Wz0dS693NoPR0eLw
46z3fmPb479Pn69uenjdXoZRp8T6PDG/j8fRw59PPnfhZxuvKzivdlLwZehkedl6myzyMva27x4d
9+7z85s98nk5+jlnXkdva9Pm452OvHm25amtvLXPrh1a9nn7cnP36+PfyvO9HzWYyx3yLBs1l7bx
b8dctfTrl5sbOnEEA26tupQQDLLXuNJSXZF15sj7H6D57v1nk3eRh0NvPOfT3vp/hvUnOfO9XNe3
ndejHOPX0demXl19WquMy3y9b0PP9D2eP0dDnx22bvn8833MvL65rsz+e2Z36Pm6O3l11eX2eZzt
16dFndfPW/Uvm+mX6LP5/rxr6bd4vfm+z6vx3fXueH6HxDl6Ht6tLpqnkfYS3h6uT3eKG/edCrAC
gACpYLFAoFBQWUqWWoKACgFJKIogQpZQFAKlipVqCpwp3vA1H0jw+1rvUhNRn8j7XzmdYsGeuzPV
s1Ovv8rNPR393JeW3q8we1o8rqMufu6TxZ9BU8Lo9UvLvw5jvePD2Hkaj0+XmyTagx19PSeXu9jM
8W+xV87uzS/L/I/oPxPLX0c5Hl9W3s8nX7PJ6evZ9H4vp/Fcm/19ebxNHbix9Jswy+Z6hYvRzU9b
i58O2dOLd6WfZyS79HLbM8+TT0apF58M3PZxta3aJrut6ac37Pm+Zrn63jfS4S/OfOfe+Tvn42Xd
4FnXjhkbtnLhL6G3z8831t3k93PWen1M8Xh1+pFc3N2+qatzmuO7Lzui56NszllguvLC5ylacmzf
zZdM5+hdXFt3R5nRpjnns5u+b5Plvd3a9HzXP63m9eF+48P3/Nrl4Pe+Ty/RPn+/TnfyOd19uWnT
28GplOHPo6Nc6l5dvo7+bi6s9nO4smbrw6LWOO+mvfrxy3PKx29hwZ5nRp177Ms+XqzXF26DV9N8
F9B0v1353+j/AC/PXh7tGzry25atlk3tm84Y7sbNezDGxsxzlxbMCRma8OznmsNmGXLemYZRwduP
X0aPvPC9vz9vlvMy0duO9z5b5bMLts4nTDg092nU58urWavW4dE16+3z/Qxu8+zg56dM1aavY5tN
d/0/zX0PHr2/NdnhM9bi0XPqPP7Y1aOrZZ5vN6HP2z5eHs8G5ejyMl79evszctkyxcfP39284+j5
n23Lp6HybRNcXoef6vfzZ7ebTdehr07S65prqzac3Pjm2a1atnby35m7fpxN2ri5LOjmz2dJo832
uHpnT27cJdPVn62dZe/y+bw6e7xfPbrMonW6bejGcuXp08nL5ftfLennpzwy9XHd7fP6XPfsbMuP
d523o9vic01mrPsxudfXw6j0OLPccOz09a47eHVZ3ck6jnnbypht15HXOSV2MNiwDm6YebPTJ5m7
s1RdnDieg4Mjsa9tSUQApNewefr9Mebv6+c3Xh1nouLqM1lkKQWBLKCFggANa8nhelzeb0eV7fle
7y6dmF1rZrwzrt1cvnLr7fM6MOrbxznrs6vA+q9Xn0bN70ccMrOmErWEogACBKSLACUMcc01y49n
Njr5vmr4vXrw3mdWO7AwWIAAAABsxx3GkADdp3mh6HnrbiRsx616t+q9c7sdMrdw9Pn89eju59xu
17pLwcXoebL9Ds592Zq0dWm683Zo6+vD0e/g7/X4/F6MOrh7OPn+p9POvm/Y9LytYwyXpzlQw8r2
OPh18XzvS5ePfhdWtOebYa2zE1zLGxZEzy1l7N3mpff+g+Cud/qOP5r72s/U5b8u3Hz5u1duSxZQ
AAKAssUgKoUoELBQtSiygCglRFxo16jqefhHpvLL6l4uwoFhMnH4x9F43z+5Jfb6F4erZzzWnyfa
7I8Tq8DQn0XX8n7bXufOejzHlsZnruy152b+ff6B1d+HjufvbPPp7F8a6z63P5+tOvVq2Vljdhz4
+h1nl32LL4270xzdMFSy2BXHxWezPCHr8Gv0Y8T5P9P/ADXGun0PL9fzenq5fX0+nzZen5vL4PtZ
c/P1a8Hidnm+1MerkvzvSuu2Zc12d3Psyx73do3ZWa9mqrO/zvI10+x5Pm+qva5dXpY56NefQnm3
r5czLVzcusephwbImnu6pvs18+vlejRhj0xs+X+q3any2f0Hy1z1a+jZnWPZ52MvvbvF7OGvWy4+
rz72ed18Xqz6mjhvd6HPz9S8nqauFj2MtGy5z11qJSY45cpt0bNOdNGaXW2YzW/HT0O3H4v1GjWf
jfU933M8eTb0zhvHzPY5Mz477r5T1vRO/wCQ+s8HF8vVls68/D7Ono2m5nx1hlt1mGzajVt3b+d0
sPO078+Po7Y083oYanm8vs561yZNOcbs+jGOfdozl6McdnHfm69+ffP1/leju8/b4PZp2+jhlt1b
Y27sM+mVKx15/P6ndyb+StnsfM52fT7Ofr56mrXM25ZYcN8y83Scfuef3ae1t9vwPN2+d8j3Of1e
bm39e+zysPV5Tn6MMGcccNVnTjz5W7stPsZ1r2Z8XLevZr3W4cc9Peebfj9Tz6+35PH5mHLsmPXl
r0d3U682HTLjPo8Tt5a7eXsz43xdXt+f2x5Xne7z98aOnXtzrLHLgjdn191d3pep8l5/RydPLe6Z
dGrfm39GnornnTjWrPHkhljrzvqxmzlrby6vHxO/gvR1xq242sdvJ3Vpev62N+J9Zh5/Perka93z
cseLvPQ3+b3yXfxZZbunCcZlhccTzfnurn9/DLdp9256+nL3sdfL09GHs5cuXocXfyXr8unXz4+h
nXNt2fMZ17urxeaz6bLy+i59bPhmnZyZ9R5c9Uc118qb9nLuTZrlMurHnOtx7K3rFBAJr2Dg1epI
87PuxMNvLznovPzrta9pJZQECaubuS+bPThzdPPynovN2Hcw2WQEWUCTTv8AJzvPy+f0ueuTf791
PivotW/j36deWvn01atmnNx827DVcd2Jhoy3Wcv0nlbOnL3h7/KBFgBFgBFEWAJFgBAOHuk15/m/
ReRy7eLDyehngTddBejLlHZq0741OjNeF1043XsTg37+qXx27XqYtm2Oa7eo9T5z6Lizvl9W6dTP
mxnXOeeG7Uww6dUs870uPLo7fN9bAxxmufzff4Fx9T5/2pjZpy86b4u/zvV7+fu9Dz/Q9fl8Xbz+
f5Pd9pyfHY2foPzfhM36H0PjbqfoHJ8l6G56fJrZ00b5nXJh24y8GPfhlxujRGOGzGMVxTBbZioN
nXNcPT0SX3fuPzP9F598Of2N3p8Xy7qw78tNyxoAABQeR6/Cz5OjxNfHv6f1fwG9v9Gvhe1145hB
QAYmbi5T2L81xZ19Vx/N4S+5zeXjNdurmxXpw0Jd95qvb3cmeem63h09DL5/PfH6Tx+LZcY+t5e2
dOvV7PgOP0uzd2V5/VvsQHg/M/oHxrGP1PH0O3qaPK0MeNy+v5GO2W3RsXo6OTbqYcnpceGvq48u
evqN3i/TdefP19GW+cylUCxYMdB0vI02e5r+e3HpcW30V8jT9DU8bv6aLOXGuvq+c8LnvT5+3ZjV
9fh6+fT2HFhqfQcHqcPl9PznXrz7cPmfp/k/s+U6Fnh9HM36e+LccvTrT5/r4b6eX28/NrfrY+Wk
6OfDXrfd3+dtxy9Ti09OOfPh7GFeX6G/dOfJhn5sx1zz8rOnnx7zh6OrDHo9Hnw6fJq7dfPqevw9
2jq8r5j9E87pz+Y2ep89c75nnLu7PG2Zv0d8ju47x9XRcXLh7tZo38WXtmr0vIjj9BPK6rejj28Z
lnybJM+jg3NDOJrzydGPTzDp1717LhfNN+zV0bmeGfzmph89de8/Z9ngdmenkef9h8brnh16NWp6
S8/PXXdXRlu269nLVw59+p5vNL7N9vRz6J5O3bxqws6GWerTN9erfjLhrzlYbdG7l05vI9/yd59r
6z89+949vkvO+m+Y6c9ueGyzbv8AP2bnbMdes+X2eB71vRrynPXzmr0PO9PLu+i+b+l565Md2/hv
Xr6eHNz07PN6Z9ju87tx0+w+K+m+Z5a5G3HvxueqaZ3TuJz7uC5mnTOnPry39ON6+rHHlvHjdVt5
NuNndz6+hro9Pu8/h01aMm8THKRruWnU2dOrCubv8z09dNuXD2+TOeM0y8vPr5fVz2Z1Zq7PO37k
+4+f+m8/bh8D1fP6zp1bua52zdw3PZjMdZ79XBzJ3zdo59cunTx8NdXlc+XbGnp07tRz5zU6tXof
Sc9/Oe/z9HPe3xufZrO67NHSaGvK7w8b0fN7d+qcmxnZerDHLZjs28sXk2+AzybMM/Xw3fT+X73L
ev1unl69mra9vzvOz7cEy5dWyzVp9XGXzsOjnX5z175vPp6HJ6vRrHm9Ps8tk18qzq6efZZ0uHI7
Obop5ev2IvjdO/jTuvjI9FO3U17OfWdjk6SgAgAGGcOXn9IedfRwl1buPTZ6LzsjuvP0WNWn5Tn2
+g5/Dw49fXy8ZH0HT8pu1j6Xey9HDovnYnpY8XDnfrfOcG/l19T3OTs7+UNOXm7OPz+nfjp2ceuv
h7+Ca5NGOWZ187bJlt07edurZnNezu8f2Pq/ODcSiAShFIACVCUSASwAa9kX5TR9f53m9PgvY1c9
+Y7NGbqeh1az4r6DbvHzj6jPWfmfR9dvHndP0PZ5fX4HX6H0nl6/nPzH3Pze+mjux5c57tXLJrZh
pus9OfVu68/Mvo6tTVtuUrV15L4mn1+SvL93532s49CYXO9ejPgXh9XV5dz0c+e/eNHua+z0+Seh
5/f1x8xx9nB4PfcduqliLAuzUt9fPg6erddeRnAWUx075JzXdjNcnN6vBiaWSMbkly6+XKbwzyyT
d9f8V7XLv+kbdefThxaunye3DRE9HGhVABYBT5X5v9F+E5XRM8eXob+ar2dXkXU+s9P4PZrP1/H8
3Y+h5OTO3OaZLtmqRsmCLLVwm7Nea9e2Xi2ejvzrl17cLrm19vj3PTdfdvnkxumOjsmpo6+Tmk97
6D4jfcfSaPKHdzaOe3bo380XZp3S5N2hrhXfjejPZLJtwzudmWFsw0b9XHWX0XgepH0m/wAfV34e
9PAtnr8evtPLn0GR852evDm25lNfnp6rwaezwafQPNe3tzryHq/OeL0cKXhq5YWWb7e85Orz+zPT
q5fpfmufp5Ojq83t4fH+3+T+s8+s8c9Pl63jy3+jE0c3J6M9/X5W902+F7XXrXzL6zVb81u9TK9O
f08dfPGzbzaJy9aeNuuM9XqZp5F9pL5mXo4898rpy6cuHD1PPljq3zWrHg7PD12ac8830OfRq6z1
vM79PWfO+J+h/OdefmZ+b3ak2sc67uvwujnfd0cvXx068NmnPw+lxejhN/Hs1NWOe64149PKu3qx
159evK5DOaZnt5+fsy6uzzuvMu9uza1fJdJ6PzWvHpMu/LfieT62G/b6v4/m+2zr89x+p+W3z1br
p1PS6fL7eW/R5M9vO8+7m7u2OHq5OjWMcWTGqXGs8bxy9e3y+42bvL6petgzpMcc728Pbyy4/VfE
/Ta17XwP6X8Llx7NO3fPK2Luurb1x8r9Duxq5Yzlvj4/W39MY54JcOnn6OG8ePdqrVxdm7tnZ9Z8
99l5+3mfL+hwyasLzd+WeOe7TDLPTG/x/X87WeHo6O1MM9mXHphx546bJlw2afd6s871/a8vm8t6
vOzw3mC5Y4efXpTHpxrVtlOHt5t3Xno7XN595+TjOudezZjuTXq9qzjj1Zr63xeHX5+l0ZZd+eXP
s5ddfR8jtxvPj6dHZfPz5buvh12cmvys76ObLf1zMdPnanV3c3uL5n0fR1cOvb8zW86Nue3pjk7K
rGTWvNJzNcT0Jrrxb+rUzlz5455bMNnTxvmfOd/D6uOzPX7Ws7/oOPHHo+s8jg3+zy9A7eeKJjnD
indLDixXZ4n0Web52XZxWaOnWTv1+b0GGj0cl4+7n5Dpmvvs19GjnO5w9Znz9EPJ5foEeT6Lzj1O
Xh3V0Z55WZ48uJ2NW4iiASwCkUko5dPoSX53zvU8zx+3Xp37MXku6VgE6vpviurtw+y5tHpb5+d8
17Pnef18M9PVz3y/T/Ndnbh9Ph43N6OO/wBP5v6Xh25NHr+fN8nH0cEMc2G3Zht5ufrkzZljpMvp
Pk/c9vl9SHr4FgBAACAEspFElJjbCLAQssAIBjlLMFIsqgX1/HZvu+j8lv8AP6tPm/TeJ8/6PHNt
5ufl7sdTj2dNuezdOvty0ZZumdbPjsy18l83Tk5PWz1PBvr8HTOlv23PHfT6e3Ly9/pO3HTtrpyi
zSeh5/oY18v5/oef4Pfu05yzEQCgbOvh6N3r2Yts7jRQtgYbMIZYZxz8fr6sa8u55Y1gziy5bY0e
hn346fbbvO09OG/kPX5SN5oKllAUAHhe9jJ8Hx/UfOcO2iZTPSEXK684ydPTN8vV6Hp6z8zPo+Rn
x3sZV499jA87b2bprh2er1p4fX603jj09fNZ4Xm+983x77tXXruezbx9e7ZFmFyy3iY542aZjlJ1
5aOidNWyy5uvPG58/bMD0devc0t1xli05uevnx5uqcljpy55nXZ2eX9Gvo+nht9Hnysti6vPPUeE
PY87H0E8h78rxvQ6BUqhF2a8863fDfd/H+D0cOzzvR51r2TOstunPrOH0efzNX9P+N8eTXteTt8/
fLq9zw/c82+TX6+fLfh5er0ern5e3StdPJ3zpr056Ljo0aeWPW5fM7NZmHSjH0PN6Zj2Nnl5TXp6
eXCuzGRNueFXK21Jlmef07dR5nL6OeOniduxJ35+d3+Hrv1XKun5Hr4vZy+Y3bNXt8/q9ny30HLf
Vpzx49Me3n15vr9fz/Zy17evk6eesMeidseZerHpNe/k3XrnnhpdN7WY2Z5ehOfn7+jHiz36OmXL
i6fjOucvLdHTOPfsuK2a+jFn0PP7Wp8h7G34zpPrfN+g08evwF9jze3Dj33RXpdHmdfPXX1+ds1M
9OV3wuGzE05XbOk2TJrVr6dSasebozOjRlnbyTqwzrJkxrwe7Lm74/Rvmva0+bv8bu593fhujLOr
li6Y24i4bGWdaM8iTDOGvOZYuhJqa9/ke3t6Pv8AJt8vf5Zn5/bjNPnbPRx9DVoq7M8JDs4/Tmtk
uXHWWjLiL1XTpr6uTbpu+n877Dj04Pl+jlsx8ri9Dvzcf0ngnmbJ6HTPT7OGzx9rNaGzVwpv8t0d
Jr2OW52dXb4lvT09v0vPflep5Pz67d3JvnPd0aOmXm5fT5+m+OdHTLp23hzz7PL5s9TXvzx1Jwp1
zxZ+71W8n2vxTh09Lb43tb59jDzNR6Xz/tZm7Hj4tb9Xm47m5a+mmrXuxtxm3Sepx8Poc5lx9fzm
Xm545+7z7fd0ery6eh7vzWy9u3Rns+j83gvdiky4tZ6Dn6SSiKODH0JZhnx6z0OTdtl87R6+FnAu
iO3LnzMufohp6Wk7eXVlTZ04WZuIdrXsJo6Evja/cxl87u5OM9bRx7029GONnROPKupAFRSQEUcP
lfR+N5vX4OHsc/k9PnujHU146pvl0Yaqnta/P37zp5/c8fnvbu89L6evgHtaeXVXq9Pk+3rPVs0b
M60+P7fj2+Z2bMM52yuWpiqXizvTOvt2YXP0TDP6XiIoCAAiiAAAgSKIQASykoQSKJKJZRLABLC5
4TOpr2uPfU2zOsLkudmzndOeeEdOZFmSTOsmNs18/WzrDKzWCyyUJUWyxHfwd+N/L+f6Hn+D3Xbp
21qEAArdp22+hcNvS1dkYXZmc92a7Jld8c+O/OXTvvr8uvy2fZq5aib7eXp9f2enPy+6X0+ZK6cg
sSgWUACgELdXKd/zPp6Mz4vLo4/P6Lm+luvD+g693TGvd0dO+fHh2c9mO3l5z2s/A1Hv83kjtuOU
a9fTuPJ2e1vPI1e950vy/mtvm9fJ1a+ez05hu3nK6tk1qzy029Tm37xhr36WcO/kzl6GNrLHFZjx
9nmyb+rzdcnq6vO9BrHVv14uqbZhry27pdGzbM6nscnqr6uGzs9Pn8zH2WseN6PQAUBKIoiiLBni
O7yvR1fO9nwvN7vzWsevh5/Ty3lNm/Th3df1O78t5v33x+Z1/NfS/I75/Uexw9nz/Tjjrxy0dHF0
+zDjnPt6/Po33fPj61Tx+zezOK7MZi6tuvHTTyej4fbhu6vmb1z991fBfbctbtXB89Z9n0/DfbNb
c8LGVxi53n1HXypLyurzZvPl1bzlz9XdLwdG7jW83q7fLPkPe7nSdXn/AEnPb8Ft93LWfA1a+jUv
Vx6833Nvh9XPXb2edtxfR4duOWn1PO6uk6MuXt53Ftz3NN3zTHj4vmNzp4Ho9c6u6Xlcui75eb6X
POpxdnLhlz8nvV+dfZ9vwvbH3vwPu/S8+v55q7+Hpy0b2izvy5erGt2fP16zu4PT52U6MWuPo5Nt
m7m34y8F1YpePo4t597HVt5ddlwuLzc3oeV2x9V9L8P9rw7fnc9nxOvLdnhnLljldRsxaluOMbmi
y2XHLKYSXHTv5tzb26fUzv6r5f6T4jlvLl6NXXj4/Hv2evhrx9LjjT1a/VzeH2NO3j115ZcWbj0z
XplxT39Tz+v3Pc5dfmuG4pl871c/fn2+ozxef5rs8vtnL7HwvoMXZjzbfNvZq1eZqbtWzZow0ejZ
wd3l+nph9B614dOnwNLjfH5Pb+Y9XDv6OPoOvp49vHfQ5+e3o0cnL0zv07tmox1yuvi7vpJr5r6n
dt57+b4fV4O3PN5jWNuvq13nlz6urG+jm0dzPzHT6W3revT53qY6Y7eLRe3oPK2617OvV38efn78
5xnN8r6Pm+vjnu1e3vHq+35fs8vRNW7T7/Jx4dzrx07eXUne1bTm0ehDi6s+Q63Dts6BLBXJr7yc
3Tz8x6F5ek5NHpDx+jt4ZdmHLuM9jFNNx6TPo8/Qvdrx7rOToz5bOpxbDoinLq75L5G3u5zn35CX
mzN3Tx5HU5M7OgEWUlHz+72PE8ft2ed0eX5vRyw7eYEd3CXPu87bLNfZx0DN6+Pcuz0uPq1Poc+D
0s606evXNeLy+x50cW/l6+TRpureM+zDpjHu6OneMR9LxiUAWEURYRRFgIhYAJYRRJZRYkABAAIp
AAQIlgShAIBYEoUEIAAAEASju4PQzv5fz/Q8/wCf72eAzw26rAgFZY09jZh6t7ct7NknE70cF7Rw
YdunWeXZczH2+f6ZfJlvfzC6lhcgAAAVr5k7b5uC+ph5+yM8EIwzNuvXsLp25S8Py/1+rN8f6Th2
tdE14b556evJeLR7fVHz3R7avH3elTRltsoAAo8r0Pmsvn7L5/XqxZLp7uS3Hoa903nOc25vDHZB
dHPJ6mGrssa8dGp0OTBejDVsjp1XNnHowu45+vXL5/Q2efeGPRsy0bduyax+r8X6rvztl68QCCgL
AQrm4rPWeNrPZ4OftODH3czw/V358euHh/T6Pnev887NnD05dvXy48+ndlr9N7PL830sevl2fGfV
fM74fbbfJnh79fmel3anh7vX1dLrbPB7dPc5fO9HW+H1OjyK9TP56y+/r0b8ccdfn6Hn06Pf87ef
Kvq4Wc27o6M3g8z7Tjr5b6Ty+y33cuu5vJnj8XX3ez4rGX7XzflNye52ef8ARLObqkaefo55fO7u
3hnTDp148N9WnXMZ2ejyXne/PxfV7Z8vg+p8Hpn53ft07zhhszzcejjwX1dvmb8Xs3+fnm+tn5We
L9FfE16nt+Dw+V0mbL0umcGzPFx3bPWxeT6G+Vueth4GO3u/PXpk083R6sZ5cSXxOzr86vqvk8/o
sb/O930HzXXlN102d+fF189TZkrb0efsuerHOWc9xkmWrKLhsy8u3sz4PUt587lys8z1OSvP++/P
vrdX0Pi/0f8APcVs07d42Vc3A09ee64Rctvnbo7Lp3c948/TzU5tnJ1x7freV9p5vRw/Mex4qXVe
bpz5dfpTpnzp17dYdMy49NeV5jHPHLTHV0b66N/N9Nx69Pmep8jEty3jzdvZnuY+f6vCfM+n2de8
54dmrz71aObDcx6ctNmzl7eHc9LyOn6KXz/stHJx6avPcu5zejrtnT8t9F5/POtz59sdbRy5118G
PVth0Lk1ZatTT6mPr16W/n4uHXp8jRy9ec14brMuzo33n5vL6Pibzt16eianXrzjv1dXNOmfj+h5
uu869+us8sunlMkx4Rzb/A3jzM8M/d58/rvF+h577s/Q5tddUPd8+LKA5dHoxNO7k0HpTTvJp3Dz
noSzDZxaz0HJ1qBo5vQJx9evjPQnF2jm6S+Nj7eMc018xu2OldHJuiY+hp5U9PV5ndWt2rNezn0n
a5ekcnYOHo3cw58tpz5d2iXTloyOrd52Z3OPfZlq3w0fK/X/ABPHvzjz9AAANzPXOuoXkB27vO75
fX6/I+jrHfpymtXkdSa8fR6viTLo09kzn6uj0u3FK93nixAIsoABKIBAAY5RAEBKJKIsRKqASwAl
RAAEsEogJQgsiiLFCCBUBAKCAAJ6Hn+hnfy/n+h5/wA/3AbdWS3ESAqyntfSfNfY3tzbNtmNejPl
59MmvGtjVtsxx69yZdmPP6PPR15KBhoOpwYp6OPndCsM5HPlq0r2Z+Wj1tPLmbuXLE3zn6hhd1cb
1ehPH2etkvmZ+gTnz3FVBQUFlAFlgw8xfW1/McGdfTeT5Gma6OJq59Lv0bee9dxW247k5ujThc+l
jzerHFj7SvD1e9lZ5mj1UeHPdWeE96HhT28Dx+zo50wcNr2Nnh9Nvdta8a6cte4w252ve9Xy/U68
qluDHWbnl89nuYeRtl283b02eRl648/p3lWIpQAsNu7l3eb0eL8p+g/DeXe1zY5vp/ZfnfW9O7yd
unt5eff5e3WfuN2Gfy/ThO/hhq2jCZ6vdNe7k3dLu5HDZjy4dmt8fV6nNnnls592c8nLj1644buq
5uGzz+mNu7Zk14vjfW/D9cfomj5D0F6/H+l8+Ovg+qwxv47g+/V+d/o/l+jZv5dXZGroo147EcGn
0015uvu4ZM8/Lwzj6CeV7HLrybe3y19Hxva9Pd+D0fX/AB+8b8ajnw7NdTdw416mXBcXv4OPHbLo
3dkkuV56nVj7MY8fpcNl06s+s7+fo6NXwPS79knm308rMde7XXFo6dXK+N7XP42dfVcO3sxv8/2/
U/Jd+OO+azu6PN6ca6tWeeWvdOTpju0zG5w07IZeb6HM12OfpTLXsc+mMzkeX28eXfP6N8j7d83b
4rfzb+/Hblr0xh0+b6HTMJnbg7cNYy7PM7cXbpkl5uzk79PX+t8Ho4dvm/O1bO3HV06rqbMtOc1x
9fJdcvRuOrl0mvDo0THn1NvVz/eY3q7NXy3LphMs+nLDLLLSZTKLo36pZtw8/m6vLdXSatuCzR63
natzm9ft+k575MnLm7fF7nXjp14a9a257tOd7Jln478nnjn9Dhp27M8aucuNMNW3ecvV2e7z3ePo
4tsfM3YOfkZ9HsWcOfZdMeTr81nTo9zjPP37sV38vRyumndji1rw3b7vDdNcnN6OOHLPdp5ejldX
ynp+X7OOWePq9Mezv1/Y8e/hbs8Pdwizr5gqLACKOPV6BNO3l0noNW0gNHJ6Urz+3LiTuebvOuWL
r4vRicPbOE9B5/cZA5vP9mL5Hfu8+Tbjz9LW3T0cyaN9yTpy8jZXoc3RmnFt6NNbXHTr13M4r2Yi
8mJ0cvXsPO1+pql59mjXL2fIe95XL0efj6evz9/PdsZ427G513KHXx9PLNBeYDs4y+l9L86l+1y0
dkuidOEvmeB9F4Odcvby+nrj2j6XiEssWoICkohSSiVBKEsEokpIsEoiwlSwWXFZYBFglEUQAJJR
FgBBQCWAEoQCWCUJQEJZT0PP9DG/l/P9Dz/n+8Bs17K1iChlfc1rs929Pbnrwujzd8MM88b15bdi
c+fTzdOW6+S78fT1+PzY37vPw9bRt5Ovn7M/G2y+pz+d1Ls4dvacF696ebt9HfXid3oZWeR091s4
urKy45gWA+ZT6efnevHT9JfA+nZ9W+c7a9dy9FmRUlgt18kd7wPPmvpvJ8bTnfTz6sMb2ascJq4M
Yykss3aN+boq2zdq3xpTNdWHf3R4V9jQede4cOXXDRnlFueqG9zxfX85heWW/gzm9/D63AnT0cfs
dcTbu6Dm37NJ63V5nqdOPNzext1jy8/SS83RYVBABRYKQsFBXBwJ7vP4vWvZ4n1nH8z3fnnredu1
jq7O76Pl6vznV6e30eT5LDsdMfR+t8r9R4PT1a+PLlenHk83efa8/wAno9Gc8PX3bvjdrQuHP6fV
enm+lq85MtXr55z5+/qMc2rbhnGvV0al27OaJ3/C/Z+J0nN9J8z9pW/k6cpfA7e35eX6p8n1zXsa
d+7N3XHO5AksKl1Jq3aow8/vuby8XR5zn9Hp87snXq57u8fX0tXLq6ub5b9L+b68/Ezx16mzXuxl
8/i9ny+2Xra+3nbN+7nrV1Y+tzY9OjpzfJ2Y93qbvPvVccvZ1c1m7Xtw0xmULcMjZdeEePnho82/
L9/V49fdfOX6Wb/OsPpvC68uPPGaz258HZz31a2yMNW7HeNWWPPWWWW5effp507M/O6c3padmdcn
n+x4nbH1v0vw/wB15+/57r9Tx+3G6erZZjo2rrmuy6mG6XFz2SZYY3n1met5nsN/VeH9H8V5+nNq
rvy5rubmncsuWrq15zOedNt0auqzn39n1mOnzXobvncu7kxqbctOvU75qzzWWOUbuXm5UuzZjuZ8
99Cs/D3e6vmfR+ry8t7dXP5+m/GeV1x6Xncm7PovRo9Jjz+3PTOXXccfJvy+D0/G9vHqyykt5cru
b/V3exy21buHbXo6nTlq4d3ZKyYUx08MbevyumTt49PPNb8+ToN3Lu6M71c/bMPPdHB0m/dr8vV9
nPz8+Tfqx8rWebDpy9XHm+q8j3Oe9nsej5++0lnu+aAhSUQAEAlicmj0pZr2cWB6DDOUSmjePNvo
4phs4MD0nJ1mPF3Dg68+M7L53YbFE5eoePfXxXRz44RnnZGqXbW3o4ddnpTl6K0YdpObox5Tscm0
w1dw5ejXznY5ek1cfpJr5vm+t+f4erzdEnDqQjHp5UuBcDJMXVyqCbvQ8n3Jd/qeJx519Nz/AD2G
dej6Hk/QM8vdk9vlI9HEEBUssAASiAiiFICSkSiASwSiSqiokqyAASwAixAIsIABLCKqFIBLAESl
gBSLBAehwehjXy3n+ly/P+jz3v3L5WXs+eaHpfV9M/J+v9B7q/G+pr36zt0zV5vRcWUrPLpZmjRx
dMYbvK5e3D2MPO28fRjjZy66d3V7PXn5HV6OXp8nldPas5OnMY5FkUAAKAoEtBwfn36F+fc2A49w
Ls1Z1suGte3r8a6e7z+Vkndr52d7WrKO3Xu36cE9DZHl32Nmb4eXvdEvhY/YY18Bt2c6ZXXubwzy
xk1UMrE0lWYykxUQUBndaT0uL0vKZwst1ljnrO32uD3NSbpul0deXTrHTsl68FgqCoLACgFa42Xz
NCetwc3XXnT6Eed6HncR7nma/UXh7+rb872fBXt83E9bX6PJy+l43fL6Pm/J+t5H09z4X23516Fv
0HR53s+bp5+3u849PDz+rtvRu9DUaejn8vGPd8Tb6EnB2bMrrXs082r0d/HY2YY5uXNgc9627T0x
s49m2z5b7b4X6Xq+huNxrLXzk4PX2ZWCy68qzQJMpWKyzKTI18/XrzfN5u3VOfm+x5PqzejPXlj3
dGzbeHDLn3cms+bPV7u+fjt/NtuctW/Ca2dXPtzc9/ndOX0e7y9+L16ePi3N2Hfydc9Pp+f6em/Z
qm5MN+Jow3yObHdzxm5+Y6ubx/bzcbow8nTzuzt8jc+2+f8AM+w3fh/P+u8PePOjPWejd5vXnW7d
qyxcubr02ar5/f1zcsdU1z6t23WOTsZ5uzzPT5I4v0X82+0a2/H/AKN+bxu2aN28TPJNY7ODdqbb
ccNunHTWzr4vQ1M/pfmNON/e/BTUmzDLj1numOyXVsa4y5Mency4uj0Th6NnrZ33epPnuPTDydV9
HLomukcfq5zns1cXLfTwN/Sa9uHJZu9DPxdTL0Oj6vnvHp89jW3xt/H1z0aOfe6c87MJZhcczX0l
53Xn5+HZ5vNOmdWnuy6Z2YMM3X6XP9MvXjw451zb+nLtyww3cxbw4S7uXHil6N3N2Zc2ntxl0kpd
WK+hj5szPS1ccOnTrmpq5uzzuucN+jr3nVz9PPU26fTZ9b0+b6PzenPm8nr9/m65XfzSWUWACWAC
WCUkWAHFh3yzHPiwO9hmASoaOT0h53blxHc87cdfF2jzezdyHU87qN4pLDm4/VR5m7q4JZnpySs7
Zhv58V9J5vQnVp2ZW8V7MUmXLidXJt6Dj6M+Y6XHsOPwvsGN/E6vu5nXwU+4wzfi79lrPkJ9Px43
x8P0bG/Gv0Hdh850cO/F9r573PS74+P9/vx7cdmtO3MNZAAIARLAKAiwAgEogRLAUhAsIsAIsILA
EsAEsRLBKEsBTGoSy2QFgqUkWCWFlKAlEUTv4O/G/Pw+w5/B9DwuzZzaz2eZcGfG+i2m8PS+c80+
xz8nsxvZfO65devbhLz8vsdVnj6Pf5rn5me5u78fF2+/t5dvK9De7caN8wKlFlLFIoAKAABQJcfh
fvOaZ/NnqeZw7wZ1llryNmrPWADbW3v+l9TWvk931Nk8rb6Om55cpos6nFoO/Xq27xqw9LqPA+X/
AEn5nGvkdu3l59+nLHOa0CKVcFiAECWAUB7Pje34cwyxyus9W3XL9V7nB6LU1dPVvjhuO3BZaEKI
FBzp0PG569jgdx5mXtZHL06vLj2uPw+w14e9tXzvQyglF6Off4/T8n4/ueL5+333l+XxZ9Geho7+
LyvqvkPcxr532ePDrj6XLn6fH2z3aGbuy1a9S8V9Dbn9PTlw10tGXLVY6+mduvf0+nHmYzVre7n6
NSbrz4uXRjqM56tfnanLu83v7Po9PL2cevT1+ZsmvVvk7Ln1L5+xOy68jHLVsltwlbZrGGvoqc+3
Vry2cG6y8fS1usZYz0ehs09HHz4Q8etH1Hzuv0Z9D5b2NHp5/Odfp/Obx3c23XLydbTuen2fPdXP
X0DyvQ566OvzPU6Selz7+k3zLLUmeFTDZzaperXjnGjz/T0y/FYfR+DX0XNPR5686d3LzvPw+zxZ
fSavkvs935vwf0j5bWfF147emN+7z9ub282zjidNbnK3LeXrx2GvbLi5688ZNXr+F36fefnfXwy8
eXL2duWM7M8a86+hTz734nG69Jevizs6uHPIwnRlm8WHpYHA7ZXDj08+p14Y+hGeWOld323wuXLf
seLNm5z2zTPOZ4c27mjLZcVz0cnv6nNq0+xL5f0/r8PDp38mnh1ne5tnXHJxdHW7te/mmMtWOeLn
qnOz08/C1k32sLh0Jo59mW50c3s+5z3wfQfP6s71b+bf249U5uatuOuzTHPHjrJy8seno81Z1aY1
MDTubpjlZMNmFmF2dC+Tp9HPWfK9XVvOrndcvn4elkeD0e15p2/V/E/S8unT8j9l8f8AQ83n+9x+
l149ff8AD/U6z3yiLAKSiLCBEsABKY5DhnfEwz4sTvYbFxUkA5+geZl6OJjnw609LkdS+d0dPMnQ
87dXWQLBydZfLx9bXHLdWiXq0ZdZy7py2d+/y9p3Xl32Tm7Bybtmg6NXPSXpFckOtr2WAMchy7Nt
l5/nPqOfh35fY4s+nHaOmQsiwlCAAAQQBLAKSiLAQAixEoiwlBjkJKIKSyRLKAEAEsQCVACUIsIs
sSiFIBKWLBYgKAAd/B6HPePFt5vP6s9PoZnmdPsDT5fuNZ83D1bZydculCAtyxRnjKUIsqxQAKQo
BbKALKAoBUAFEKYeJ70T5LD7C518n0/R1fneD7FJ8Jr+/wBK/H/Rbdderp8zXc92rTuNWj0Os83f
6JeDq2kqValHN0zN+Y+Z/RPlePTwt2vHPXKE1lYmpLLMViRRBQiM8N56fiev5Excplelw2Q+39bP
d24xWuUoADgTveFhXp8HT6B5G/1acvReKXsx8HSno8Hf6leF6XaEWWUFUiibdezx+n5rh7uPy9/p
/jPrvkLerx/Q8rvw5OqbTVz+h5lnqZ6t/Lcx15DZtRlljM3Zlqyl2ZYJdufPI7N3mQ9jX5dr0Zw5
HZOTFru4Zsuebq7ujlfD9fdnzvP5/s08Ps9Dn6zXu2c11ty4untnZnrG/d5+Cephx9NmVRLMdcZ8
toaUbObdm3sxllx7ubKTJZ87pNWzzOk5fZ+e9P38fe147efTx/G++vTn+ebN/o2eNo2bo4d/Rprp
3+Psl97p8Tfm/WdHyfZp9Bj4vNZ9Bw3Zpx7OvhNm7y+2N+roxzrVM9eWPL1rOG4bPNvn8n3Ne89X
ufAevvWz5f8AT/Bs+W147OvJv5Yvoacd2LjNlNOnq8/c7nFnDV2515voYaLOrDRpL3cGGs+nr8wn
oa+fYXXupzTsq8WPobk8l6UPOy7ocmzfkaMs8YaMtWp2dHlbjr1bdvPpz7bzmfTz9ORp5l6NNaZ4
7OLWenDbV3zu+r5b8r2vK4uW7yY49vP3a9bU6uWdLphqyz460p5p3cfHs3nVn0zQ01M+bt49To2d
vu89/Nen6Vxv1PnMvC3v1PL6tPTjt5s83LrmXJx69vLwadN2jK6zrzmrTbMc0a+n0ZrxsvpOqX5H
0PRzjVx+v4t1fqPmejM6/J3Y87h5OHD6eW3blq3nDfh9bZ43vbsuXp9LPm4OnHk0+rxfR8Xh+1p3
Sdmvm9GvP9B59eo5OqCyhBKSAAiwASygGOQ4cfQxTHLiwPQYZgAAHNyenE8/sz4l7ufl3Jo292mt
rzdp2JQCKOLz/cS+H6ezzZe/k5vQTn2d3HWWzjyPQnMs6cba5Me0nP0a+aXbqu9NXTr5q7HJ1ChF
gwzHLj1k17NWo6nJuNiygEogEoiwAhbICAiiSkiiSwFIBLACCyAgQsVLAAEQEokoSwsuJZZQEmUQ
AFAAioiqiod3B6GN+no6NHPsFgAAVRKUAVKKFSikoFBKAAChQVKBKoAAALKDSbnnYHqaOEmzFDLD
PqXiz9Onm9e+gS0ACygFAso07rHxHhfofxfPfnZ63LvvmpNbZhDY1q2TAZzElYi9nF6MPP6+O4yy
wN7Onk+s0+rq9vLGriPSw8RZ3+f6HeeB2+mJWMubzfNT3vM5/UXxuv3KcvSEAAoLKBA1HTh52zxe
r4nq7PG59vR5sdDXq+Bu9Hp5/K6voebN4fns8+vPp7/Q6Oe/E8j0vP6ZlvoHnfQ8H1XLp8j9F7M4
7+C9z1+iz4f1fV6j5Do7O+zw9nn+tXJjyemadWjuTXux2Y0ySXb0cGOb6u3xWXubPC6cX1MuDdz1
1cm22aZsx9LHPqx53DTdnoxOjzdN5evp87bLs08q8t3o+R6k7tvDXfdhnvzzx3TLiDy7x+d9fwvX
y7O3Xt06sssvJ079GjPrPX5fM7++fJ4vrtW58Ds+3+Hudmvr44553ysevzNce9u8Hsxr2NXL04t7
+e7dnT5fo9c4TnxOjHbpJpy5c3TwdmvN1TXhrnt5d+dZ/ZfCs9PqPif0XDF/PZ7fk9+XNu18m8+p
p0+njXH075izHDls79HnY7x08+7dqcGz0cpri2SN7LqTe7o5t+Jy4dPNrUJrWXocnRjGnR28Wqlu
tXu07OfPju3TvWy6bZlj16Jy5NnXjc3b5tzOrh9Dkupsw7q5vQ3o269LOuj1+zj49fY+a5OzU058
e7Muv0Ms555dVdXL53Jp6nnY9WnHv3a7NmF4q63Ty6nF6fpe3jXz3tet4XPp6vDo+Vk5/Z8n3+3D
p8n0dfPtp26ue56+fkxTLHLRub9fD2bmeHV6ONeN1fUduN/K+ju4493X4HLd+lw8OXbOe3XsmZr3
582vJhhfDvnennl16tHTLsy+iTX73B2Xp2aPM9H2eLi6d3JrOFy3Jy5YY1hh14S3r8jcbJ0Y2bNv
BsOtCgggBKgAllAAASZQ4cPRjOvZxYne17FQEqoUmjejzcvQxMZy4Iyz6DK+er0GORAJYY+f6Q8L
r9Hjzrr1eL0m/Lfts5Mteo7tvFlZ1zn6LHP0xePPpxhz4xLtDdOTKumUCWWA0b0vHl1RMc+bFepz
dCWVSAlEBBYlEABFiJYAJYWWCWAVFiQEWJYKWCWAIgAQCUSVYQWCpYgAKUBAAEUTv4O/G/U0b9OO
0KRRKCUgFSqAoKFAssAqygoAUSygCgpKspE5jqeWPS08+wwTUm3LR0LrnobTyOrsEyAAsFBZZQCg
BZQUCBSef6I/OeP7X5Tj14sPQ346+RPc0V5T191nhM8ZMSpjb2nH27t6+fyfS+jZ8Vt+t36cH1Hl
benPdz+p0MeJ6HYWUDVynfy+XlY5fb608r0tlmhQIiygAFA0cZ6fDq7Tz93okm3Xs4d/P+Z+21eH
0fEXd6Onl46vM3ju4vN19uf0vX856/Dp2Z+z18tcfm+nvxfOXK71btenrrv2cWGJ6G/x9PKe48To
p6OrDR3/AD9t278PWPD6d/pM/F9fudlnwPZ7HbZ8ju9PFOGzEz169hMrnnTr48M32dvhZ877t8jo
53bsm3c26NtTxe3sx9HPxb7PPrOvXlrz7LcvQl5+lOHLPA4Vjlznk6tXpe/l07Zu828q5eWuzRqn
qxu0Tp64w+h8LGXt7vG68bni/S7Nz4rn+50M/H56e6zg0b91cPVs5l9Dp8HKPpdvgdXLXt6OTaZ4
dGXLXk7b5vp5elLqnRw44defThnM6xbJma/p/nOBfW06s9Z4+T0ufed27j5su3iy6dTz9/fF0Zac
Z12Y4266dmvHGcMOnk1axbdZjjO3m2Zrxyzeuy5YZxno2Zrxrs1roxzwmMuPu41w36ezSa8spnnt
bvRq36uWeDPs5tcr6Xkdku7Zq28tbN3N0XWGM2XWzX2ednO7t87gw9zh8vZa19redO/DkOy9Xn1o
9Tm9VfC6/p+nN8z1t/k4124+T5/TPXlzb40fM/R/Lb5el6nnZS9ejVszrC5TUY6uiuHR9dx6fNfW
6OZPtsfA2cuvR5XbjvOnT1cupouvzt9Z63jbN69XDz/T5YzmN8+J4ufl98Omc/bF7b9HE+08LtdN
fPue7xcnl+7yXLr8fYd3L3hzaeo4c/Q4bN/mz0pfO6nEnp87OybtGZ1OTrUFixAIsAIsoABLAESj
i1+jE17OTSeg07llSgEoiw0cnpDzurbySYS510Z8WMve1bLCqkoc3TI8Se5ozrV1eUTs5d3UcW7f
wL1bODdZ2uW3PRYrTq6xx7t3NLgVOjLmldTXmBYABjo6ZLy7N2Iukm5p3VJViUYqEogEsCxIokoi
wAiyiESiLBKEBFSShLABAAkqyLAUgEoilFiUWLEAAnfwehjfp6d2jn2opQiiLBQlAClCUqUFgAFt
gqC2UAMeY7L5mMejyzccs6+My38tN2rr6Tyd3ojTuFqUqUigBYKlAFQtiWgWUAoKIAAundTg5/XY
34Oj6WR8pPc6z5XV9pqPi9Ho+tz7fI6/02b5/nHZ9L189fEe72+x2x43pdDeFIWcx1Y+VoPQ4ent
TyOz0S45AsooVBUFsQXQbcfLmp0au/dHn9uVVSEcldmnzdSdm7m5uHb1OfRq8ft0eV08++HhPV+e
68r9J8zuX7vd43veH0b7hMtGjo5NXo5vNy9D1+bX3GrezxOHe5rOjz8+1fP2epqb52/lnPZNUc93
peLnN7Po/mbjp6HufKdXO7Ovl9SPB0el7nTPxPnfUbtz5Pdfm+vP6fLxN+b6Ousaz2YzOu3f5LF9
vLxt2L6uPFuTtnJs56ymLc6XNlx1ua8Dd4/ofP8AfGfred7HfGvrwnl3l5vq6fXOHLt06mHXejOO
Xb0U59fflXn8Xs88vq8HmbJr6rwdnRHj/Ofec2nyer6zw048enirTr7c7OPsw5l9Tp+fzzfc8yxn
0teGc1y43PSpinD3c+Fy39FzrimfDvGzRs7dZ5Oq4Z7Zad3NOm/ZpxYy5unRprmy6bte3HOcWeRw
5bstW4Z4SM9crLHPHN3467tt26M11b7lhjE6zLC5Yc/VjKx2a92s8+eHUrDJJz5Y7rrTh1c3Pl17
uHfy16Gfm8eb7Gjys9t3P1Wzm6cdVnSucvH3+b7Nedj9BnL53u5bOetfp/O5tbvB3+T1nt7dNc7q
3aS9Gu+fp4PJq3e3y9mVcet1zHRfQ+lzrwff9DzefT1Pm8525c+3Xp689G7b5x6uni2zW7kYzWjR
33XTz+uSZ23O8I8W+X1zd+Gntzejl78Z/Y+J163rxs9nkC5gOfzvYxs8zv5JHpcfP6Ry9enks7PL
9ey+X6XP55lu7tBg1b7K05m3fq1HUxyIokoiiLACLKAiiKICSk49Hpw5+jl0J6Dn6KABUESwadxf
PvfinEy0nZt8vYeg0bwKSho3I83X62EvHnph3cqnNv36zDZzbE6d3n7LOuc/RVxyhow65HNNulb0
cpOtz7K2LLEsEoijDVvHPntwjJpLtYbLmCksAIogIEECwSiCyKIlIBKICBEoiwSglJKILAIpYtiK
WLEAiiAKV3cPdjXp6d2nn2CqAAAUiwoBQUlUCUABdPOdzkwl6Mceizjy6uUx6ePE9Dky6o86+sOP
pzKCKqxRFAFQUABQAUSrEoqygACyikKEoCqAoRYAkoAastiUuszcPFZ6vHzdhw7/AE9kvJ2BZRAA
VKUApHPwnqcOn0E8/f6JcV8xO3yPkPVzryPvPlfMzv8ASePxfR6c9V9rjs18fT6R5PseVqzdmn2t
+d4fK/a+R5PT83859X42ufh69mPTHu+58T9V5O3q9XhZ+fp6PNvsZ69eXRM92vvcZzq5N/To1vuz
155mePD05mWTexx8/q6czlw6d2dct7MemOD0NGCbNfRxct7c/O3S+tt+a1Y1w8H0/H6uXiavsdln
j9X03Zy6fM+D9v5knj5+r4VnVhNcueTKW7NOEvbl58j0r5ucejOJE08Xodsd3Xyd3n3smU470XzO
/wChnZu1Yzlj2eZ3Sehry5q9DHytLt63Nwas9uzTz86ej1eH2znu93wJnPqe18n0np4bNE18j0/d
+D0x8d0fTeFZz4eh4qZauu6nJu2c9defl4Hs6fJ9iV0tPO7PO15deerry0t9Gzz+rO57nk+hjX1P
559X8ym3LfE1XKM4qILAI17AsIoxx2U5Z2W3gejy6vocn3ng418y03053bObdmZ4XGsejn32at7G
LZlqc3Xq3LjydfPjWnR6HkTl09E9HN8/Z7PTL4/J9H5Nc3n+x6EcPp9Pu8+nhdfJ42dfY/H+Ps78
ve0+Xw6z9D5uPunjenu5ss2jZm79evmxe7k4PLsy7fJ9v0crow7cb1/ZYdPLp6Hk+JdN2nN25zex
ScPRps+dy7vL07e/DDnvo1bk1pzYJ18vReZ4jzemZnt5e+J6s+gzej6bj2bumWerypVzBUACOPsH
jcXveNnWPteFss9Dp2c+s9Tj7V83Z285v8zn9WOa58Vm/qw5zouO8zcnSWoWAlEABC1FkBUWEWIB
Ac2j0FnJ1auQ9CcfYFgCxSRYRRo5u8eNl63MXo8nGPYc/RSVUWDDMec9HDN5sMcTbnnpTDDZmY69
2Bnt1arO9x7DoS1OfoHLj2YzXPlcE6ZzZ2b5jnZJRFglJAYY7UurLPWZteRUyqCyLCKSLFSkiwiq
gIEiwssEsEogRKIUkUhSSiKqVYilECUAJChLBZ3cHfjXq6N2nn2UoAUllJQAAoFmBsc0l68fNyOz
RnuOXX6PKmW3gwX1OXXvjRr9cef1byigApFAAFBQRYFABRLKAKCyy1KACgACkWyqAUBFBFhFVAlc
/Ielx+b0l5/R6zz+roLKAsLBUFQUA5zonj5Wb9ffvjg71lObgPV8vHcaOqa7Ky6K4Pjv0v5TF1fQ
/m/106dvo/I+prHTr9noObfkMWWKc3z/ALvd5/d8/wCd9P8AIcuXj+d7/h9Ma/U8uV9x1+T6PzfT
tadObtuHL62jX07Om92XNoutmnXNb7vQ8f0ueNiscfQujbcZWZRrysJhs0ky08tx6F4vPrv8ebo0
+vr2zcx5/Sak6Nc1jt0vPr1OPTNz6DxGtM+L6by6+T6PodWs+Js4+oTHbm6cN8OXR36tzzvd8X6a
53dXL1eDuyXnfI5fV1/R5Tm6trHL386T0/M34Xpwu1PVwvXwZ8nm97ns5PQw3OGni25Z56O3g2Ho
7fNz4dvY1eW09f1vjPQ6Z9PTy+lnfyE+z49Z+Z4/oefePn563BucHs+VvNnHj23GWDdjrxbu1Lhn
bmX6b5v2cb974jt87Tfr26szGXDcs0at57MeXCtjDLcmeEro2cTLuc7DoY5ZtyxyjPk6ua36DX2a
+fT5/k78OvLzp6XHtlhhh0b+jmzjdGO23LX0JjFswmWedY6N2fJ6c14810+jxbunt5qfXfPYcPHf
qznSbOXdjLjzd0T571OnT1xsvJyV6GnglnXp17DTh0yubDpw1GOXoJr+pz3ce3neZlPVxZ7c7lg1
S7devKLoxyXDi6/HvWb+TLWe7s8r0ebObHFfGx4OmJuy5O2J6mXvLu+j5sttY9XmSy5SiFJLKAAa
9kPh/Z9zGTy/R5NNepxbeswz4s7Onx/aS+T6Gjjsx6OjXNOfDtTp08PZZlux5jrYZgEWAAAgFQCU
kBKEKQGrj9C15/beBPQef2LmsEVIsEsAGvYPO1etJeTq0ch6c4uyyyqgRKXz8fSkurHTpl2Xftue
SzXG2a9po6M8F27ODKztc++yyytc2o4NvVpluzmwOyatllFkUkFAQGGG1LrzuuXY1WzMWRRCkmjO
NgsgEKBEsEsAIEAiwAiiFICrFASiKJLBKSEKgnfw9+depp36eXcKAWUAVrl2OOWduvg6pcMOjcnm
bfQ0Lr6ePQepo5dpjq9LYnldXWWUKC2UAolWUAAoBSAoAABQABZQJaBZQUAFAgUAoUolCpYMeQ7N
fn6LOzj6Os83r7xr2CrBRBRKABRGjhPU4NXfZ52704Y5TTL0Y+Ppj0eOddaduSyXPul4urdojfze
furm6fSqflU+1+R5dfqvd5PT3kLCcKb9XPdZ7OHf3Z34HP7e/wAX0/kvL97keP4/Hs5u2O76z4P6
Lz9fpfP5u7y9OPdv4+s6Gjd010+T6mF6YtebO2atMx19nmZR6083cna+a8zWfufJ8n1c59aeD668
fF0+Yy1bttzt5PQwk5dun0brDLT1zWnDdozPa5PEzr38dO+dOPm+hy1j572bxZ16XqfIevjeGrt9
KdPjuH6Pzd8+TPq8k68cmbr7NPGen2/PdOXt7PG38tdWWjZZ38+GPSbNvG1O5ju1MmHNXZNWuOrX
rxXZzZ55mnDZjM8W5hZuq464bcN7eGuXrjLTltnTp4urk555+b2OCTi09vD2582GfX0xrvRt5dsb
UkUTKWM/pPnPocb5PG38227n38Sacdl9E0XZLcNjBm4ze5bORtjGY7N7125N4bEmN+3h6ebfydXH
i7+rl7K0zLHCTKJjx9t08zV6Xm9dd81Zbm/o49kbqvXM3GXH28fZ599mGzHya167p9E7Oectz3df
jacX3+bxcY9vR5l06NGezU5J2Szn2sTY16bN+GN1LjNxo3+p73PfifQd/j532+Nqx7c9l5NPTHoa
sNcZY8fXK59mtrLRTfJ1477rXyehjy58Ovr13PT47k3mdWXHqYepn7VdXvaMujWPV54LhLAACCoo
iwQRKMOPvh5ee/Udmvj67OfucB6OrDevlbPQ4U28WzYYNmgz9Dj0nTM9hk5N5moiiASiSqgRKIAC
KIspKEsHD3E8/f0cx0OHYdQUEiiAAgrTw+pM3h6c+M7nm9NnSKgRKOPV6Ml5+jRoNvP09B5e7q5D
beRHRp3b107MNVnY4t9m6LUlHPr7Jm8u+6Jelx7bNyXWU5PNx0914HfHenSnO26umEqzS22XTneW
Xu9H5bm4ej09Pe7+fyvSnmy+tOPs1hLLAEssSiLBLAEQAEoAABQFgKlxZSySjFkMFI7+Hvxv0tO/
Tz7E11tcOUdmvi6DXnv2HndHTgTbyGuvVzkunt3J5+fbV1bLSKJQAqCgtgoFJaBQSgAABQAFJQAK
AiwUpFlVZQAUCKBULYWmg3vN5k9Ln5upePf6O2Th7MipRFVAAVKVLA5zox8jO526PT3r5/dklNXI
d3Hwazdz9nRZzbs4ZTs2y8vTcTLDzc0mffksyllKMfhvuvLk+Z+5/LvrM9fpePh29OPN2ejmaN1T
WLKDo5ubl10+Fv7fP6fmfnPp/H15+Ho5tfTP2/d857XzvThzevr1PLz3cvfOHpcnozpwZduNnDwz
53ePtdnyvtYvdwd3mHk4ex73Xn8n73p+XiYapli6u7j2Zxy5dWVujk6euubPpkvDh7HPJq3at2b2
+dt7s3gy7uzevK6+rXub+abWvFepyTF1zLHr97m8j0cdNnh/WeS4fO5eht6Y87lw6bNGZFuWzOte
eSW560dGfEju2+XkvszyMsvZy8fPN9NwZS9s5Nms9d5xvx158t6uX0Nes+f2asO/Pox6uWdcevR0
k4+x53FNvPucnl30PVxbNk57xxadNl55btwxy1ZnadP03zn13n18xw+n52858vVy26dfRh24vO38
2ufpadHbm6ctvm2Yen5m/WN2eeWOmrm1Y9M+nrsm9m/R0Z6bebq4uT7DPx9/Pp4+rbyd+XTOdqdV
0bMMkZc2rv5t3h7fO7+vTtzatct+Uz1Ofl9PzvPvnbbiam/I58rkZZ4852Y8XauOOfLZnnp2ak1d
nTHmvf6s6+Y3/RdWb4fT6vzLXudXyvfc/R+b5bTPTs5rMNHdoZyx26zDDPXqXbj2Z1yb7MXDZpy3
bNfRm68k5TR4mfH6ue/Oa95noafaX0vb059GtZ6vKFkWIAlgi1BAUlhCpFgA5ekcXN3YGvt86nRr
6srK4Oo18/oYHJjOlOazXWxdcvo6eDvMemcp1tO4QEqoCBAIsAAIsoBLACLBr2DgnoRMcuHE9Cad
4gJVRRAJYYc3Yl8vp6ebN35+Xts73PtsyvmeVnf07wPauXP2yzl6cOU7ubDrTzsPUxXl38XJnfs6
vC9GOnPLLWceThyx06+3h4dY995vo75sNmtdXnvI83rmuzh2VYm/RjZ6/pfL5dOX2DwPb9XmzXDe
MpeWbnPlPP39C6b6PLsuGU3wY9ezzd/Q+Z9H5zHbq7PCuuf1WfynR14/RvK9HtxzWbwlEWBcULAA
BLSKWUBBLgW41KlWyjFkJ3+d341u1ejr5d/O6ejCy58/Oehr4dhs1796+bt7qmjcLSpKKAsoAAAK
CgoKLLKBQAKCUAAAKABRFEoKQCqBKLKUQoLAceg9Hj4dww7O087p6qoQoRRFgAllVBXLwJ63Br9B
PN6fQLMmuXY4ORPQ5/P6F1T1Ro3auizn29yXDKwt87mTfq9HpNedTSAspbz8Cep5fl8Vng+963fj
p6I1lQAiwjm5bO3j7/I8Pv4/N9DReXx+v2vF6ctv1vxfucen0G3n2eLtxdnPs9GLnjyb6dPlbZvp
2eT3atPF7+fjvl+j9bypz6eh0+N0TGW3uXPDye1yZx4t9XgTT04+lWHbszu/N6eu5uvyvaxl8fZ6
fimv0OD08uTXc9Y7MdW9cstfVdOfdhNadPRqz0xzm9rX7fjXMy7+Pr892atHZ0nHxej7ep8z8x+h
bLPj/M+35bPlnu5WeDnfZXyL5/qprnJmu5jpjfLjLkUlzsuFzkstgywJm1xds04p0aJwbn0nk93j
4mfp5YmOrPRq43ZjuY4S3VnL2sY7ccpvp9bzfq+Gvg9/bx9M58nXxrs0zVvjxYp6PPfT8v1efTRw
d3BrNS7x6mnfzcO3Hccu/DZ6fk+ty7XPk6899+jfyc32WHznfjfm8nZyejnNbZ2zhKtx2XFjpz5e
zz3n0d/O1smnr9Fz2Sa57dvN0cN7NnJlh16+Xijztn0dl+f3/RdGNfJX7vgPB3e1y51zbte3eN/b
4/HZ7XH5PRL1fOzwuvPLu5fpOnOdnNz8Ovp6/Jxl78eKnVOem3HVKzupZ058SXv2eXI9K+bmei42
b2eHo5/RjLds4t5nq5evbPP+o+Uuvf8AZ/Mfu/V5PTky3iSgsICLAABCoARLASiFJr2w452chy9b
Qejyc3pWc3VhyHdxdWRx5beSzm6+zz5bo29Rp7POS9mrPosOXZW4RBSUkKQCWAEFAJRFgliAJRAc
2ruHN0a+c7HF2FlVAARYATk7JHk+X6vy3n9O7Renj05e7HWfVdnw/uenze5K7csOTtS8XXeNez57
0+qa+Kw7/P8AH6s/ofnM9Y6fqfkPoevLux2Z9efjcn0PzPLt6fg4zl0xyTG8stfZnWPbvz49OPi9
rUviT0eXpz05449M9mnS07dvBmnr4eQPdYen6vFp2bGs5+d9r43g+j8pXk2a/R8/39cvP5/peZrw
J6/Njc9HyNfXj9RfmO7vw9mcW/ry3JlZJliKtzGRYFSxJQSwxUkWFTJRAkR3cPfnfb5/o4cu/DfQ
WaN2RVlQUCIolKolAFECpSwKAC2UWUWJaCpRZQAAAUiwoBSUBZSxFFAAllFlLdXLHfo4NR18vR0H
Dv8ARpq3QtAVAFAAIWXlOnDy7Zlp9XanB3WqauU7+Xm5I7uTH1V87p6NCdXPn2HH1bCwQcnBZ2cX
f2VydhLbLLFxMpw8idnFz77OHt9fcujfUsWApKBr81O/zd3pWcfZUrn6GemXxn1/jeL1eH8x9Ry7
4/M54ztj7Hr+X9z5/o9Ll5vT564uj19HV4zv8frrr8zL09Z5fI+p8+35P0eHo6+f1/Vy38PRp2Za
pI38blhqb83Dp0Yax7E8rTb7bxds1618jGa9t5ea9eXHtzO282rTtce2zbmtzhzbOeXPHiwy7Nmr
Cd9uXP0xs3R5GPwH6F8d35e57HyX1vLezHXzavq8/Be2fo+Tjtnq8OFTfqZ5ujTv9DOvM8n1/Vrx
fB+p6z5fm+s56+U7vpNMfDez9HLPiOj7zzT5Dn+w+O6Z9nb6aXy568PJ9vT3ct/Mcvbz3PRq6uDO
scMse2WfLucpo6/Msd/B12dDG8/R2+94/wBLx3+f9u/T1xl5/f511vmib8/Dj3cPbj0dd0c97/L7
bXn9nL6es69nJsxeO9XN0zt7sebnvr2826ejs59/Nzn1O7571cdPF5ung787ty4d8MevzvQ6Z179
HSuro0yb6suffxunq07utm3dNXVu0XjdWLrjfeDfx39B85ya946+zzvQuWrtlvn+d6vQmG/XNSaM
+bN6fNfOWTPZ63blp354eftM+Ll3PVw585c8cthox9DKXznf0nlZer1zXzmv3eC58vPh09+fq2ej
i+Zx+nxVxd3Jn0y9rP1s67PU0bes5+H0MPV5fgfW+g+ezfb9P5/19Z6oqwJACACFAQIlEURRFEIA
cjr5606OqJt2c0XHPq0s7pxdq8HR0clm7zujtl5ebfzGHTt4j0XLtJ03nre07kAEUESwAEoABLBK
RLABFEADHj7ocfRnzHVODoN4pKIsEo+f+f8At/i/N6tdxvHfVlnjz6YsMN59v3vi/c9Xk9hyOuOp
p33OPJ2YrxfLfY+Z5PX4mzdGdfd08/TGy+lp3jxPOY+b0dbV6vHt5bv510LjvPv93yndrG3py5fP
6OzCdmd+fzd2658HT9Lo1n5+etx7xx42b5benhup1dnl7rOjTl6+NeRye15OdbtTCz6Lt8P2vf4M
obzr0dk59fK4/ocOPf52d+Pl9PHn7r0+fz+nfn14B34KAylxlElliURQlgY0gsCHfwd+N+pp3aef
YKUFlFlAlFJVEsKAAABQAApQUlWWUFChVAILECkoApZZQAqUAFCaTdPP5zt16d8c+7u3HD22gLSF
ABRACwVxcVnrcWv0DzevsSy3WZzi1HTp4dtmd6thydnNmR2ZGrcS1LC8nBXocHT6B5/fRFQTgr0d
HnVNujX0nJv9fOawysABRKEeenfw6vRrz/R2IlWWLCUrl5vT2ef1/PeR9D43GfLcH0PhdOc9/wCb
7pfruji2fO9HTq4d+pxdPbr7TR148/Te3ny2pp+U+y8uctPs/F/Y3WUz48257ddatuNkxyljVp7N
2bo1+nlyvi5ewrzevezrndEXC4TszZ582vn3ZdOfHl08mufPr9LG5z0b8Me29vN14lZvEx8P3uXr
j4n7r88+59fLu87tk6YMNnTn1tOes5a75OuW35/PTemW/r251p7cO/Gt+eUxvdhg1ndztuZ0cO3h
xvux5MpevydjrNWd2rrz6/P5zt9/530uO/mcte7ticnXw6uUy0b48u3Ru3z6vM9jxs29/NtroaOj
Hp9Hu1e7x1+d+np39ec8z1PLusYnXydfLt6Mb59Uu8a+vk3Fw6uGJZemOvXOrl15tJ057Orl6sev
r09Gni9vu8zomvE830vP656vK7+bfm5u3g3deXfo6/Lzv0scLrV6+bZnr0+P7vLyv1/lXzOfXO+d
sY68ufZidmfHhnXVxdDUyy1Wui8WHTPqc/BidefFrjv5/N4rOaO308dtnfw6+bs+t9rG/wA64Puf
K647vofh/U49fodXz3O39bq+Z03Pv9HzVl+iz+W6z6HP5bViZeXr8r08fX9P5fq6Y+n5+b1Mb+U7
+/yrPr/e8b061E9/lYZrObV28yfE9vpeDz39r0fO+90xsWKggEmUIKAixClhSLACBIADVz9uFcm/
VU5+28p28WfUad3PpOrh9Ea3LsPN7O/zTKaeobuDpTo5511Muem+UsCJRFgAFQAEABFgCCAAEFOf
oR5+zs1mV4adrDKgHi+0zr4DZ9H4Pj9e30eXv8vo8/zvc8jU0d3n+h6fN9N8pj1NcHZ2c/Lp7+j5
++nz9kx+kuPjejl0ce/suLfrHrfMMFDB62cmvV5OLPj3x5/Y1L4+Hqad54rt1az7vqfGm/o/G1Jd
vseD9Dmedhw6rNcy6enHkvTosx36B36+bfi7+rk68b1c/Tta8vL2NG84ej5Gjtx+nfM7+3H33hbt
49WeXJr154OpPo2OXbzgtBRKlhCWEFAXESiBJUHfwd+N+ro36efeWywsAKCgWUWWUAoSiAUAJVIo
UKBZZQFlFUBRYlAAESqACVQLiZOLWdnJybzDHu6zzOvpBYURUpQtgLKAVjxHfq8vYmOPq7Di7iU1
8h38vNoTr5sOteDb6nInXqw6zk6NwWVRC3zec7ePs77OD0Jc0VZNHBZ6Xm5U1XX3Jx9XoJpQWUSi
FJWg38nN02cXo70WxLUFgCVU85Mvhst3Pr6G35773xe35vxff83fn+am3Pvy9bf5f0Xk7b/S8/Py
9fS4Za59Wzm7m/dlhctPBzeLt5+n28fY9HzcfL19PTw412XjL25cFy7seQdOOkbpqGxqtdGfGjuv
npfUz8jLL13k5Hr3ycs309fHlZ15cmUvZq1Mt2fKOqcuMvyfucvF9DzfbaNufh9HH1ebr9uPWz5d
95bMc5rPkdPYjDPVyZ13cGnFd3r82252tFXbny7M3LzuzDj05ejRqOrXq3bjHdjz6XZr5+T1O7xt
lvznpXLtnl4PT8fpx7OXp0XPL28fbrO3h6N2d6NW3Xc5dfJ3Z9Hd6WHuc7+d9+Gy5w8z0vN3rXLO
vj37tPNneTo0azr69GyNnLr7LeU2ax0XXy8+mTdp6c9nVydePX26turg+h6fH96b+T8v1fJ7Zx2a
9nTyefvw7bnZ53oebXV2eZ6tunf53qN7vI9fzufT6rx/otHn6/NY68evLbjjslwm8c86Bzzpqcl6
RzOjE59fVjqcE7dmpye50/TcejLyOSa6ePPHtzy4+vDefO5PR8ub9Hy9fpa6zcz54a8seLPDl8be
fS8bXn6eOGUy3nDr7/Zxrx/oOrOdPL+t5fUxdPJlh6uEld+MoQGn4r7vBfivc6vBk+q2eL62pnUA
EsAqASkBRAABLEAhSAcfZK5ssCZ8m7rMLwbl09W/is7NGntjh7HHZu4fQyl4tW3GzDo5dcehu5lX
oms3ufoAEogABKAgQAFQASKJKIogoCSjl1d9jVs59Z2TRvHH2F+c7PWnn7/O+L7ng+b0yG+TLEe/
2/Oe9w74eZOPpjo9Dy+fry26iW+14nZlt5NPtL5fpTnsz2b/AA136sCe/wCp819dJw6/oHPv81p9
K8+nh830mOp8xr+l8fePOsdOOWNtY5wLcq1LjHbt4+znvfe3hw7+v5L0/f4/anL19uWGvemuTn9K
434D3dfDt4er6GY6fO7/AE+zrwlr2eSWiUlEEqxLCSgBKSAxMS3Gl7+Dvxv1tO7Tz7rKiVUWAFSl
SwsLQVKCkmQxqgChKAFAssooAsosqhFCLCggABo5j0OTjG3Tu7Dg6e6rr2KQoAABURbKApycyel5
2zsPP6OwF1rscGpOvVw6Ds5un0JfL9PUM9fXtOXpygWErWbMfKws36fT3HN1RLbNRvnkZp0aejhX
q0avVPK9LrEEtIUFQVIZa/Mysyz7cyUlAsoSiUGvk8Y9Hi2+xc+B6vfc6/OM/s/zvh2+w8Xv+e4d
9/m+/wCD048nteNdZ+1y8Pf4vR6uvy9Z2bNFrZNPPqbsdW7eeH1byS9U9DizdOzvyl8bPt9SvC5v
Y9GPD5/pOSvF7Pp/Hl8Dr9rtT5jl9rvPD5/e5K8zb7Hztz0Z/O3efo8vC9HGu3NyZ11zi64s59su
wzzdTbK0zdDH5f6r5z0cvuenyPS+b338m7Rpp355d8Y54aumOia1m3Bc2447OW1wxxrfOTDU9PDy
9UvrvEmb7Gvyld/Nry3MNe/DU05Z7dT2d+nfjfyPqaumzR4XteRvj1Jss4s9unWb28XXnXE2a94y
9HzvT5+v2JsvK/NdPL0dOeHn93JrXPvs35uLSvbl6vLj28euPmdvn7y9Py9+s7erLm5dOXWnfj6n
Pj0cumHTLn09vP0aeU3fQcns56fI+X6fmds4E7eLbnlzc93Rnl0xzelydObyen5Hober5/b5/P0f
S83f7fn6/nU+/wCbWfi59Xx2eHl7kPFvrZHiPW3L4Wv2+K58/P0+g8XH6XE+W9T3vWzeLypy6zte
H5Pfn9jPG7JevRE3zeZ7nJrpw6/SxmOjbzcfKd3kefh25rZ155Y9fuZvkfQer9BNfN+/ovZq831u
Dv593R5XoJtAikAlghTx/Yh8p6evij6bPxfWszBBQRFVCiWCWAIIWABFEAFTR0I5MOnns6ueb18/
0HnnbybulMcuLZYnVhLlyY9dnLo7dJw7+nz5e5x9NnXzTemWeiL0ECwCoABKIEAAgAVLESiCgJQi
wA0c/ejk6tfKdrj668P5v6H57y+oOVAv0fzfZNd/levxcu2nl6uXrxC5dXL0q9Ll6zZl53nS+/4m
G0y9W92J5v0vhen6OP0PJyaujd5no7OPX5fq+n5eXTxcZ6Wp8zx/V6+W/k/U6b15cmn6Tm1PI393
mGrjyzzu+h53tZ1r4tXIzcsG8793Pva9Ds8W9eP0l8jLrjvy8DVi/UeBceXXp9bDZ7fBFumKiLCS
iSrAlSyktTGZQksTGWApl38Hdjfrad+jn3UQAAsBKoKlloKABYKUiwWUWUACKFKJVCUWCpRYKlgn
Gvbhwak6dV2nLt7814+xSgoAKIALAKMec69PBvTHV6exeXqWWMOQ7ubk1p0cunrOeenqMs9XYvHu
7CTIlWCxwnfzeb1Jy7PWVq2yymHnnp8nF2Jx7tnn16nnb/UzfI9baVZRKIoiwKE0+bZ1c/Z1mvbL
KBQCFAPJPS+e4PZjz/e7MiCpQfGfaYSflP13j+j4/dxz2OPnj4vL1PM9PLu6dOXHpt39O3N5OD1u
LU8fX3/Q2fL/AHU6OPXV7XhbcdenXo57PpODz7zndu8Hfubejytmtd2rmMehxasLz6+3yLnfb1eT
kexy8uK+n8h1Zbz43t49+nZ3ePt5b6tGVlw5fQ7I+Qn1fRc/Ic/3Planz2xrTfOTdnWzz/R12eX6
/n7tTvvJny10zRZdzVTOQWUYMpWK42XLXDffO7rM3FsrZr9bTL52v6DSfNfU+n5Gp6M4NS+5o9T5
vnvz/Q4OiTn4+nDfHm26t2846ejQmzn28+nbo3XGtXpeZ6ufT6vP6mGZ8v0c/Rc8vB38XS9nk+t5
uvPzDtxy9fz+7j18rX2cXbkuPXZ3+V6XNx68RO/HL2PJ9fj15ujl6senr5+jTiY/QY+5np8b5vo+
d0zrm3Lr5M+fR1Gndq3bzlq36peXeyT1ufHk5+j6bp+e7mvV0Ys3yPTbumPS6vM4I97Hw+Ll0+q5
vzrm68/07i+F9RPucvCyzv2dfz+DPueFz5TLt28+2/we7l1vl9fjSMrjG3m69GWfHxebvPdw93L2
54nrHne16fuZ3wd/scu5q2J6OQXDVsWcvD6GVZZ+N7EWWAEUSWUlGPg+/inyvfOSX6bLwvcsUIsA
pFICKIsASAASiAAAmOSuPPo5E27NEMMO/SuzRp6E5u+cB6HLs3HPt189k68+ZeXV6Vjk26Ybenlt
l368jdOfoAEqooQJQgAQFgEEASgQCgIUgIDDl7ZHgfPff/M8e/it+nz9YEbNeS+zpy2+X1cPD6nm
ejhBrm6ebpXLu8/QYgdXL2x6zHVys+g4+z3eXFk7Y1ZWS+/PC9Lzej5nlx6OHft87bMW8vt+Pc6O
zn6umN/FuxzvxcfRWcP0XDx5aJj9J6OHh4fV6u3P5nD3ePzevn9TyNafS+Hq9HWeTHq4ueuj2/Lw
9fk9jg8qTp9Fu870e3mhNRLBLKBECAtgRTFYmMzhhkpO/h7s69bVt1cvQCALKoEWIVUpKsosospK
CygACygQsoKopALKAU0m55mJ182G+NOfR1nHv3liiKSKAFhagyeN5+dfUvmfWX0HHrue3h2dVzx9
G8qzCNl4ec9DRy6zO59Bx9N2GOvs3Lw9uQCAK5/PT1fOw9I8zt7CyoWcPOnocG7pt4uvz9KdnH6H
dHnejaqktAsolAojgTt8u+jXn+nUCqAKSglg1eb5Bt3+v2Jp3CgRRFpKRw/Afp/x/m7c+nxu/j37
Pmuz0t8noc/nzW++dy3PrdXznsY17Pfw5+Pr7/j5bek465+l6M8OfTvw5pXTjzefrXobvE9u2XTs
zi6ZhOWWejWdOLK56tXndS9u/HRZ1bfE46+nny2636N5fo4ueMzmrhstmzq85m9bm389/O+X+i+e
nx+zv59Z5Me/TWq+nyxy5dHceRh39a+Zp9vUeP2/QccfN9n0fUfOeX9Z6i/Pc/tdUeRfb8uaw9Hj
0Sdvp/Lb9Y78OCZ16nna8Zty9V68/Q8/6v5DnvmuGEmrLXu6cePKad46Jz901wa8cunPZ2c/Zz68
nreV6mPT9P4vb89mZ5aNtzza8ufobeXq15/Lbs+nPr5N/NjXZ5Xoaa5PU5tlmrr4t8cGPbz9M9l2
cvPfZdG7Pp6+fp1c86/ey9zPT4zzPU8vrN0c2vNzdvDv7cN2/Xnnd1G87Nkzx0dOHFy7/R7fO+p5
78jfnq9HGY8/FjXZy+X4ms+h5eXR15aPZ29nHpo3Yxrbv8zpTLPdyzWWWvm1rs450XXm6/V5bvR2
XZz567x+K5+p5GGXfnjlj06mn0+v6HF8r2+b37vp49e70cQ3zgAqBMdPRqMPM9bSdU8T2wACCoog
TV43u854fVqwPoHj+uZIEqpQSoSygIsEpIAACCksBADmnVqTR1c1N/L1ZHF1adNmTdlLsx8/0K4e
rbymem9KZ+e7DzdnZ5cvY59lz289pnt1al65KJZQAAEAAgiUJYAIUBKgWACCAswzh8x533PNx6/H
avofO49fN68vQ5ddfVr6vN6OLxvovB7ctA68G3VsM9HVyqCOzk9PN3evq9Hpywq+zzwaiWFS51lu
0aPN3x4vR2c9eBh3XN8XK7NzXw+j5zWerdozVyhu+q8T3vX4krtzil1c3bcdPG2erefXw+P0+Dh6
NGvvYmnb6Hb24M09XmRKssSKrFRARQAAxogRAd3D3Z162ndp5eihBVFIAoiiUABRQAWUUiKABSUA
WgEK4tR38ejYTDq6TlvejDYKBQAAAiUDjOz5DX89z1ryzw59eq8Nl/Rvd/K/0fPXax5fV4+zn4ht
5tmVk077FOxeXZ21ZSBQYGTzNdnVx9/Yeb6GSVZznTj5OZdfo6LM3n75dT1Nxy9NEUSylEAW41am
Bs0+dnZj19dIslUAALFDR84ez4mz6Q8v2KBYgJVIoiwHMfK+b9b62NfE37r5zxerD5v7Duzfz3y/
tfJ9HHyNvZ4Zlo786fRfHuevt9vher4+voZ+dnm9XHt19GWM8r031+Hmx3vDo5uzWunq5d3Pk0bt
OeLTnzJlz8vHvP0nd5e/Gurr4dqaOT2NifMe/wBPDdd18nKa7t3lejG25RnDZjLdvPtxMunmxzrb
6ngZY16HR4WLXr8HP8zrP6Z5359Ln9J6vmN3m6+lfA7u2fY5uNvj6GPLsrNy0bdG+NOzY04tXpa4
8/m9DGTTt27uW9GPQ8vfl2Xbp6Pi/QfBds+tyZ6LnRv5tvTjhxdnHvL0ObszfJybemOhgx0vped6
HP05ef3+P1492/n38tcnPv0dWuxvzdUy18+nPE68b18fXjbk6OayZS7x1YzPl258ZevLZv0bsert
5unVxa/p+f6HPT4rzPW8npOrj2unl8zLG+jyetx9fmc+3Ru4/Q6YzkTfS36PP6vS+h+U+i5dfJx8
rRcdvz01d+M3bO7WNe/K8O2e/lmNduvl2W9W3DdGOOzTpGF3WcnImHkM+n4PM9XLfpvRvHLl6HvS
+T7XsehN6OrU7Z8/rx4e3D0Wzns6BKlgBFVFiactmk0/O+14+de91fO92s+ssIokqoRBDj873fMX
zO3DhT6m+R64IAARVQEWAIIAJRFEllJYFgKYcvbE5c9ZOrVjvXkdPObuPblY36NC9XH3kx5G0u7T
qNer1ec492qJu38m5Zs17ja4+qrQiwLACAikiwEAAIsoCLAsIsQCLBKIsMde4eL4v2fN5PX8xp+q
8fz9PEdreeK9qzf5X2PyU1rZW5e1o92Z3Znv8omoUSVZFgPS568zn7L4fXp2ce7n08Xp7OrOfmvJ
9bz+tunq5NTLdj0yex3a9n0fnlWxRjbSZJLfO9C46eB6HbcWSztyiyyWQyY0sBjYRVhASgKliQIl
hPQ8/vzr19O7Ty9CwUCoVAoAFgKSgsCgsGSWKFKMbYAGvjPR0ceBt1bN6cm30aujeCwW40ySyqA1
xsefyntPD7V72jI23z9Nzu+L9L5jnc8I59QgC+x43bnr97hXblK2deOjPt3rw9ywC2yhp4E9Xk4O
05c/TyMM1lNXAnqefo7a4N+7nO3l09ZxdfbnLr2gSwAlEoAAHFx1uw9HemOZNAAAFEPPO3zuDps4
vX78pQKIWUAAATV59mezpps4ODpXi+hx0Y1nz467PN9n0NnH0fI+R9tw8b+f8/1nmaxy8uracfbv
4q+m6vifc8/T2pM+G2GzHqZck7TfswwutmrDGY3a51yc/Lv8+vP7ez2dY4blvxdk07Iz3aBnt05G
7LTTbebA7XFJetz1d+jHYuGOzCLq2c9zyd+rcmWG/HPo+e8n7vy75/lvqfmdPbP3jxfd8nbnzx09
Z0bcJrl17OPVL3bebBezHkyzrbjht49Mc9XNz126vPxOrS16Zej4P0W5PO9b5GvX0Z6mfO28/V28
+HL6PnV24ZaJW3drNOeGWmz0/N7+Hq+t+V9X5i527dO9ni07MOzUdN82U4enO9OO3V055dM4c69D
izzNOWO/WNrnwx0Nunpz27tG/Hq7efp08Wj6fD389PivN9PzOkxymHp8WnDskx1eb0xeP0vN9Dpj
DPl3137JPN7MftfgfpuPX5Xxvo+rry+b3/f9mdfku/6fbufN39J8fnv4/P2PK1jNz2t/VwTL18eD
rxrZlhojp87yuTvz2683flcdvtx5Ht+h9LOniejlv6M9W3X38+bXss1Y7+Y8X6D5BL9Vv5+i5gUA
SgRLDm+f+pHy/r59Jr6OHos2hUCBEolDydfs+Oc/RdVntvG9gqpYsJYoBKIsQCAAiygIAogAJy9c
OabNTPVOXpXROnWadmEs5u7XrOznnScPVeNXXjoTo5t244eXuyNTVinZjjF279Wk62vYJZQEWAIg
ARRAJRBQCACRYJRAJVICULjjz3896fdu49ODPom86ebvJNutqbNR0xBYWAEUYrLL18kze7wPSw8H
r8vX6/Bnfn9fGzc8cdms4+Z6ezpPM9Po9Ht56r08YollIyLFssqBKTHIYWrMZkMVJAQpiyhJZYSg
gQFgiJe7g7869fRv08vQBUosBQKQAFAmUAFguWNKJbZQnKdePHojp1zKtOfZtTg7cy0AAAChZ5/z
OX0vD48murluGbbhhWeWrGN2Wm16/r/J5r6HyXuefhxMscaAG1cPq+z3duPb23ryikgFx4jvx8jK
t/P6HTHl+jnSklrzuezu4e3M5u3l1G7Rt3y8u/tpKoEUCUCkWKCJq4LO3zunvOLvFolqCgsoi850
cXLwWdHN1+1HN1CiiwUQAoDWbPO0+jqcvfz+JHocPo9JlqdBo5NPfGv0YW8jrMOL0HPvs+V+r8jj
v5jxvtuKZ+E7vT8WzPi6eo5foPnNEv2zwfW8nXHPdN51c70Os09MydcNOjCzXj223fjx72unbp2Y
543bIxm7GZwurONbZic919k153Z1uWubfljiroxrp2cGJ33hsdufmYnqY+YPTx88YeJ7zvz+X9B5
3o4/dbflPo/B6d+ndIxwy1al6eDXp3Y+fszdmvPBWfLp3nu0t9cGH03r1857XRc78rye75259bh7
eWTzuvj6e/n6eHs1Y1zmXTHRo2sb5s8ctt3reX6PD0fS/J+/8lZ054ZyefL0dXHty4NefTnrdefq
827Zx66vN26evO+r5PbE63HjfIk78fX5M8+PRtsnq7ufo08s8n0ur6dv4rzva8bTDXs1evwdGeu8
t6dunb158HrcXXHJdvN0x7F2vJ7vM/RPhvtuXXzevo+Xze3n5NPabtHTwXn7XT4G3n0+o3/Mb+et
/wA39b6Fn5P2tfs83Tj6nh4uPFNnbnMsfQTh9b1/oM78v6Hm7rrmw6cfTxeZ24659Li7Y0bMtFb8
aPin0es8D7T476hOmKsAllggWADg78TDknXU28fQmaxZUQQsAD57r9TwjPLZLPVy8P2pcpRAJZQC
URYkURYJRBQAhYACWDXsHDl1ctzu2c2Uu7R0K06tsTDTvwOpwdi6Z1YJqwm426NHYaMtvMc23s51
mfD1Jsk2m2cu+zMLAghZRAAJRAJYBSWACUSURYiUQUBAiWACURRASglgAllggABFRioSyooSiLTF
lTG5WXGpLcbLEoiiGIxS5qULFxCBYJKlWSWBYgKlxS45Qnfw92detq3aeXolEWUBVlAKCVQCxSTI
Y0CYmxxWXfo0boxnR0r5/T1LJQiiKIoAAOLys6+jx+K87Ovr/mvP2c9eg0t42TUMsaSZSGy4Vdkw
q7LhnLy8HtZ4vz2z6T12vkPuO3d0zRvkNJueXa6+Hp7TyevrFELjyHbr8rdWM7tMm6aot17uw4+j
okoCgoBFSiiyga/Os9LzJ6qef6GRZSVZQBZQQy18OlN3Dp6jz/ouqyxRBQAFEUBjwGzXj4lvfz+d
5/O/U+n8h9ppr3Z+TrPq+Fn7MYbwvJlsrLOIpC4ZprbxdOPPfieJ9B7HHv8Amfkfb+Y5eIvHUym/
TH6H5nr56+mmjLy9O3g059Gnsz6a03Pkt6eX0c9a0WbMSY3oxOTV1WXnx2q0268Xs18kw34YYamU
1Y7z03myXfNWhOueFo65+iTt565MfZ444cmNm7LiwXv4bvTxr62vpnPdqz5dNmWiR0vPz1OnDb3R
5er6b0l+U930tOdY7uLTZ1eX5Xqd8a/o/N289eV5PfxZdvD0cTOl1aOnHoxuOd6csNuscvX5/RTb
njOu72vH9Tj19n4/6f43c9DOMTztunf2YeZ6vk780HXn0duG3h28abNffi2a+6Xt8X2PO575i9+O
fseP7HDtoz5uiejt5+nTynL9Zz/Rzp8Twep5W5hr6dXo8W7DVMsc8b35dvN0cvPps4+7RvHo5Nvm
9fjfoXxvs56dHh6/M6dvRyw7XPROrTwzx+hxNTvw8rij3ODzNnTOO+6t5+h04ejm/GbPR+g6Y871
fodd3vy0vRwE3ixE13Pnrk9F4h7ujbgbGOQxyh5mr0eG59HPzvRUFSxBSLBLAQ1cvfoqassk2Z82
0zIFEUJRNewfP9vb4xsy6uOz19nkelLsAlVFgACQAgAlEWUABACAECc2PXoNfVxrOvDMvLvz5iae
3UmPTwDu5eoaNW7Um7bz61u3doM/P6eo83oy8+XvurOy9GjFOlq22xRAAQIBBQEWACWACKQJAAQC
WWAJYFEBAJSJYoICxZYlgUQEABKAyli0gIoiwShjlDFYCGMylzFEmUILAlEAIqyLBKJLUxWE7+Ht
zr19W7Tx9AUAABVAFABRKvNoO7j0ckd+ry8Zr28vO9ysd0tzbKLKLhtlwTgPQfM+bnf3D8+0Z1+h
cPwsj6Px+RjeUlzqXZuOTr9D2+nP5bH2PF1xyuGc1hlstY1CgtiXPPHJd33n596vPt995XqeZ04R
5t68+3h2dZ5u30aYZVLKEunhPS49fUcPbqws36csSt3XLx9iy2ygAACgpAFYeVXqeXfTPM9PYAlA
AAoJebjs6OVzpv1dfprp3koFSwBBQFMYy5ubqrT1+d4h14+71Rjr3D4Xn+/+GxfW5/kf0y776a5t
Fll3EqygCgA262debwe5s4+r4/wvtuXnj8/y+n8bWdHPq6DXnzY6nv8AZ4vu+Xr6E43De3fmtx1b
ceq6puyzunn4Xs0cs3N2vRj0zv15dZxY+/66/E932Os8LP0eTOuzp8bQn0vkedx7nhdu7v78fquf
xdnPr7PLyaLn0fQ8Gy+7y+Fnx30eP9F6VfAY/W6NY+cz+r62vjt32GuPM6+DwF+45/znj6Y/TOD4
7vl+jcHTG5jDk2dV1M/b8bq5b+cmGw18/To1yz59+rWMsNeNbtfX5ZiZdOfp8Xdx8vV3ez4/qct+
j8n9J8l0npk5zzOjnd27g37r5/M2a/R3jLRjjm7vO9fy9J6vPIw6OXqPLu7Dpz692fHy69cxs9Pd
z9Orlnk+l1/STfxXD6nl7ly58unDgz030+L0k28++XPz9Ws3fy9S49PL38u/jfceH1Y6dnzfr8HL
rsvncOufs8Pj3pno5t3TvPJvx5zs0vTTxuj633rPhMvqmOvhez6nZzunFPpeMLJKMVlkspz/AC/1
/wAtLze3v+ST7y45UBOLtxPK69OjWfYc/RKlACBACCWVyunlRLa3Zc++MoABYAOTrHz/AKU85Nu3
q849bb5vbbtAIWAliAJRAAJYBQCAlgCRYAa+bthx9PPLOtp3Lpx6MEnNn0HnduHIenhzdZw7unkN
XbzYp18m7euvPmpho9LGXmmupvxxtdN5dlm0KCRYARYCUAAlEAlJAQAELUAliCAAAEAlECAsFgCU
QCWBYFsAohUpZZLFWRYQgIItkWElJFhCklgAhQiFAEBATu4u/GvV07tHLvUtALAs1G6+bwzX0E+R
83F+44fjNedfT7Pk2dfT+b5RPV5+JLsxxRYLd/OPV974y6feb/z1rP6V8n5Gtnu9H53Ln29Hn1XO
s8ElwmSyNvvaz89n9t6G8/DdH2/hM+p07dGd56/O+e7efs8vXMayxiWoKkMrhVyuGZtlsuOyZR7v
0vwv6Fnt5u3dp9HlAprNk4IbNPVqG7n3GDEMe7Yc/QCyqssUosqxUgCwVp86vS82+onl+pmUJQAA
JQXHz7O7gksxww6Tk9jdc22VSoiiKIKFInnpv59vLXf4N6Y4Pp9uU1FJKCUfG+L+mfO5d/d+Y/dt
9/TGsCFBbjSpRYKnGbeTL0TV53rXl6OL5/635PjvwvI++8Zz+P6/R8nc2c+yHoe/8jt49Pr75vb5
Ou6c+g6tGrXub9fBh1x15/X58+nmerl57Xuc/hYyejo1c1m3Tv4+t63LnZvy07ZjXyTqY1a9tk4/
S8vqst6dSc07NNed3bcs60Z7NmN7eryt9voZ+bjb6XNzY6jj7Unzun6vGY+Ix+q83rjxujs5t57v
R+Z1Z395s+O9Pzdvre/5Xvx1+f7dHXrPFr6eLfHY053PJcdm89nmer5kuHZyd9mM12d/S9byvd8/
Ti8X6f5Trnvxs5vLxyw9WJ18mc447+nzs3EdeWzPn7+fTDh3ad5Z43WOnDZnx7aNK9+OzZq249nd
p36fO0/R8n0rp8hwep5es54J14eXc8PT4u3dz5ce3L6HndvXns34aMdN3Zx9+e3jfY+Vs59fV/O/
sPmpePfs09OfTjo7ZPP3fSd+p8t7vT9Nq+fh6OPTOcxdOWnl9Dms25+X3rsi2AIgIgU830h5u6b0
5e/j3rtBAc/n+xxWYdnm9dnUM6CySiLACEpjkOWbtaZYYbq2Xm6YKEsAEB5fqDw/U862Td2eOexu
87pOiUsACQAEqFlglEoIUlgBCIWACWDm6VcWe3lTucnUrm6omnPHWaMPT5Tovk95nyd45cZkdWvm
6zl63IdmptPO39PLLjnq2WTp0LOhq2rAgEFBCFAAQISkAIAEFlgFRRAkWAEoQCWAAIlLJVkAAAQZ
XGy2UBLYxLCxLCLBjljYSpLASgipYgEWCWWICwWABGUJ3cXbjXq6durl3rzfDl+p4fkOfn0+o5PA
mdepycyXPAgJAAAAAAAUKAACLllhWfs+F7Lfd9T8t92vmbz0eU0cB6nlat0nd4fgbOPo2c7Xrlkw
LUiZTEZJDJntXT0TGXKXYY7mUt9by+/O/suXhz68+zl5OnfPQ3a7OjVtkuOfNvs13usuNohQVZQW
UqWLZSypY0cdnb5nR6CeZ6WZViWgLAogBpTdx6NtaNzmsy09HqRz9Ms0sospbjYqCggGjlVju84z
p6ujBZ6yyrBQKEqDRv8Ai05Ozv8Ao5oqyAAAWUWeabMsuwWfPy/Q6PlfqlbNiL8f6zz9/C5vtPLx
fgOL3p058OnGy83q8vXNd2Wj0uG+byfovL648z1Onfz173n6svL0xw3OtxyuyzXMp0aufq5umuPp
4nTffp09mOW7boxxz9HieceJ6nL9Z0x2XlyzdmLCzPBrTKYas6z0as8tcy7OfTDZdXDWzXzys8Me
fc655vL0z7fPo6o87T9Hq3jx/P8Ae06n1m7yceHo8vu09CafK9bx98c9s2axwdnH26mOOPRnXLnn
p1mpXb0/a8T6PzdfG8z6H57rnrxYYedhlh6ebbq65y3efj0y8pOnLLt1cWOnZzdnLcxMt46bhOXb
nZY9+G3Zq249nfo36vNdf1Hz/wBe38t5/reTqaJcPX48uL0vPnP0de7DO/P6OfZ24ejwdfFjp6XV
yduO/jfR8+eentfnv2vVjXxnre97PbHj318Omc+bdh14+T6nk+nqbxCwOfowrzunPjufSaOiagJK
JKSCgJ5vp85t4c9tm7Lk65YBhmOGbtGs59nMl6SAAEBCUIObpJzYXZZs0YdcuTm6RLAQAAnje1ie
X6Hlb7OD0u3xJfW6OLZZ0LFBIAAgqAACLKAQIBKSASiLCTJXFh38xvy87sTbhnF5ejLnTZ53dvPP
7+Xjl9bVdlnHerlNm7lhnOnAznL0k5O0cl28su+Y2zoabW2WIVUAlEAlEWIIAAQpJQhQEUkABFEI
WKQEURYJZYBFgAgKoyJYFFiY5YBJZbjkJYJYkFgLAJSJSyZRJKMWUILEsACCkL2cXdjXqeb6PzPn
7+LxdPNw9OAZAAAAAAAAG6tL1eC50jOgtAAvbp1F1iLC9v6d+X/Vu/1nz/F42vJ6XpfLydPp/G8+
XO7j7fMTomqVtxwyDPYastgyz1UyxyzjDZnlLjlclbLojO4+jb63Thv689Dq49Y7OXHtl4OveoIW
ULFBAKFAtmEbXn4J0cnd1HJ1ZSUAFUFAABr4yY5W1i14pp9TdVCAKlWpSlAipwnT5zgs7PP7vZOf
m0eiunvqJQoFAlIeWbui7QUxoAAAJPIsy9DLats1x4vm7fpZfh59/wDE5v2Xy3yv0t19J3sLi/M9
fqzfjz6PX5u3xPjfWefz38b6u71N89mWfgYufk9HJ25a+rmx1Pc9D5LHlv7nL4ru49PqL4vby124
a7Ljz9uPWuTZp6YvVz5Wae7m7WseDo4bevfx7tb6NvLszN27zfQkz0ZTGZr6NeWh0MXDLRqxdmvm
5umerm6vqa+P9D7DmmvgfpPG9Prz+p8zy9mN+28XTufU+dw9nPXpeB7Pz+L5fXo69Z4/O9Lg3x35
Yy51Z7uapv591NG/njKzrvXb6fD9T5+nxuv6b5/pnPDKZedh6LrjjdtmfF9HoHnPSlzzed7RfO2d
sPM6epZ471MdZ18vo6DTs2R37NO3XwY/YfJfTOnj+X38Gpz4Z6/Z49+Wjs5TnunLrz17MtsuXB28
ep3eh5npcfT5P0PJ7Tfu5c+PTG0ejilIB5+nu4LPVadykolHPx9udmvZwdZvJKgIiBQFgcHRnxm1
s12dU17ZYsHN067ODpmg7NnH1lABJYQVFiAY8/Vgacduo6uedBXN0iKAQAGvxPf0HJ3/AD3qp5/p
9PiL6nTyLOoKlJFEWAACWCWUBAQAIlgAllARRp4/SxNO7hyOxYatXVEY6N55vR28a9c8z0jRj160
5OvlyOrnu817NOB0WWuLHvwjRnz5y3fyyzsattgUlEAlEspAAkBCkFAQAJKCBAARRAAJYJVkAICC
sjHKpRZYogMdeWFkKMlAEuKRVRRFGLKCAlEIWQgDHKGKyyWVEod3B343v+Z+q8Xy+n5vT1cXLvAy
AAAAFoAQ9Ty9jX6Ht5PT64mrbevDwvF+4ZfmnP8AqPLnX5s+48jOvAz69GdbOL1PLzoKGZ07tezU
17OTouZjsxGWrHOt3H26znz14x058uzU6MtO2sq2JjsyS245RcstssasjVv2azP6/g3dc7+bq6NY
4uyiKWBFgWUAABax4U9DRx7k1T0slx2Sy2xFsqxYAUFSho4036enVWWnq0G7j2ejHP0CxRAgpKAL
bKXDm4U3c/N6Z5vu79S5eLs9hNW8lAAqCpRjr8gy9hkUFQUElEsow1+dZPXyS1MR5mr2TLLGy5Y2
nwXF+k/F5v0nP8F+kXWXVjlrnFS+b8f+i6fD6vi591o56+L8r77k64+K4Pv8N4/Osvr/ADdZ8S93
HZqxXUwmeRn3edrxr6Pd8qxr7TT4uXLf0XPx9GNdeN57eTdn6VvF2Z5ZunJrs2tc1rblpvOdmjHR
wbOXHj6zfydn1m5859Lt8vn06eXm16y2Y6u0093F17mG7jHZnxVnpy5eePs/jvV8jh059+OxObn3
Y746WG3eNnNv051jOract7c865vp/Bs33+Bpy657rjlxsVUWCWWDGspKiNGnRMcoJQSixAlENnre
Nra9Hiy6+s8LD0eXp58ejDl5qz19+PTnrvPrjryy6c9nZzdXH0cHu8nqt/Q83Vr64rVt78RCoJ8n
9X+bL9x6P5f9cfRUQDHDbwVy9/z/ABTX2Oz5z1LnuIAkFChKTk68TmuvbZj0846BLBXNyepxps1c
/ebZzdRFhAJZSUkWAppu3lNW7bxHZqzyrJz9MAAJYAcnkfReebd/heweX6e7wz0uzmx1nrSzQCUi
CgkAlVFglhFECJRABUWAADj7EeR6mHnHrTDOnP0E59+POb+H0KvL1c3NHo8fXbOOdGg6pzdFaOjL
nN7VtHNl4vPrzeRjv8/f1ve+bz6cfpctGXfjtGolgABFiJRCkAIBQEWAJFgAlEAlglEUkJRYAQFy
mUqkoipYjDLVZJjUuWOa5VBLiJZYWAEoBEllARRipMSVYiFhJkMbYO7h78a6OPy/mvL6PV8k59Qa
ACAoAIBAL2cn1Lr6vqR34LLvmsFurgl9TV4OEvqeN6GyPiNfVhx68jdpRv0bl37ua2Y3LVZuujA3
65uzrV1bu/L5qbq1pehvzvyNuzn3j0tvJ16xc7mTdMIy1ss726/b9Dpy+T+m7enTXsl3kCUBSLCB
KlBznQ8jCzu4uztjzO3eVZVWIoLcaZJZalEoJ59ndw3oMGHZHNv5aurt3VAUCLARFAFHMnR5vJyl
6/S7TDNwrlpnqEpAFgAAOfTzmPs0RRAAVKQDjx1ph6ooxHjZeqZZgsFs45cuTb4dnifR6frJUoxl
llz1pejzcvPzcvVmekrRm3zPamenj8f0enM+L4/tO/nv8u4/0rxtY+Iv1vnM+Lv3jm5crqam21s6
vP6sb9Tv+Pc9fbZfIdvHf0d8rt5a6mpLuwnOt58fF7c/T+md2enX5HFjzrVm6sssKjy/R8nePc4M
/P6ddnRj0VWm4531vGuX23wP0nz/AD1t19Olnnl275cPT15Lo3VjclRFF26t3WcvHHXHXcbw2sAJ
MZzdM3LXe+N+vm1J6GnHl1n0dOOquxz7+e27j3S75Zx0C3HLSbdW/Wcvo7/sWvhJ7Pj5mOnomsc3
J6mPTnz47tOpr3aN/Xlu38u/Hbn37+/PT6mc+3c079O708EqIDR819X8jrPt+V9NwnoTVtmqgvhe
35SaunT7CcHg/WfM19K5umVLFAlEAgrmw7ORLt1Ky3c+0yA07hx4bdabsteJ1yVRAigEEAKXk6XN
Zy+jfMPR178IzadwKSURRAeXye/46+jl43s2fP8AtbfET0+vVortSygICUkoRYJZUAlgCRYASgAl
lFgAxo8zq6eY6HJ2JJRybtvOb8NXQed0dHKdWPD3HLerQTo5sTo1bqvF8d9b8t5/TxDk+h8fHux2
w+r+I+k7cPXz8bit+ncXZ6vJY1G0UlEAlJAAJYAIWLN8eJ6fz3scu3TY7cRAUkokoAQAsiwLYtlm
kQAQsmFGEyI2SrAJYJYFgBKICwiIVAliCBC1CJYAE7+Dvxr5Hz/Q8/we4ZLjbKQAgEAMuua0dHo7
uXXwO31vd648n6TF6OJdO+e543z2b9Z8x4+nnvdqjGgW3Ed/Ntxx09HyOjfrHm5b96cs97vmvmr9
Brl4fV5PPzv3cfn9c37l8XcmPn+54m+V26djefP0YS49/md++XduzSY8enZnpfrZ63XAdvOspULQ
FAQOauh5Bnv4N/evkdXeMbRFgoUKLApKFuNLZpl3cmnps5uvRkY793nx18O7vXXtgtgqAsEsAAE5
fn09Ty9v0C+V7dJWHkL0dGfSBCgABCF8zX0WYemSgVKEFAIPO1dxp7qIsJ5WXZWW+IoC+aXZnsNf
Hs9SMcxZLBAaLLnmvN66gat3P0FlktQX5j3fg13fR+f4XLv+meXxeWmzo7voMPgvF/QdK/nvB+la
Jj892/QctniuzludeOVs15ZVdvRxas6+i2cHp+btl879Z8x2x7Pb8ft57+svyNy+tnyeJ9a+Tp7r
x9Wp9S+azzr6TH54ns7PCzs9z1Pju1r7r4X0/Mzru1dFxJbLJJDJYJZApn6nP9LN/nV9LzfRy6zH
nbjNHXDLiw7ctvPi1zl2brGzLc769XVli88346vNz+hz65827TqcvU3eT28+vRePfje7DNy209Go
4frvme3d38mU5guYsEtTROidM8+dx6Zy5/S6c9Pe7fH0r7O7g7/Z5ghLBo3q4Onz/VTg7/M9GqJQ
OTZu4U7qL5voc5OgKAgAiWU0bxx0s3aLvMpz9JCk5erSc3RrwsvXhoOmksFRRFgAKTXthz5Spxel
p5jqm7UbLp2gACWCUeJj7XintYeV7R4fq5eRXf1Y6E65jmSyrAgCAllJRFgIAiWAACVSWAEsAGvk
78SZef1G4Jjo6BjdORlwekNG7kkvRo6lnLnhmb/C9PyuPo+XmWPDd+m+e9nO/Pxlut2/k2YvR6/z
/T15epfG9jtynfz+ZvH0Tn2bxsc/VN4tXm8vX60+S7tcPfS9vKAlgBOvl+R83rx9/wCV+qY9BL6v
MgCACAAAMjFkMbZLbhShZLEgsmOcMLaWAlElghYAESgIolgElgCRYSVUlJEJZYO7h78b+R8/0PP8
HuAAAAFXf7u76Ltn5Ls9vo49PF3+tOvPXt0+HrHvcXx3Fjf1fgcDIM0AAFAdPNm1tm7ZnfT0+X1Z
1hz79LWWXOXZrECzp16kfUfMentY+fzwy225YZ46c3p8O3XP0OBnnWX13k/ZdM4j0eQCgBV08iel
j5WR0c3b0r5XV1klslCgASUAWgWWFlBC4cWSTdu4zq0b+hcMOKxh6G2qAAsoAAgDnN/j8PWef7nb
mgF1a/ONXuXNbcbFABUomWoy8XL0zV1gsFSgBKU1mXkZ+pZhtJVhXm5brmddS4qJZ5Bl3ZZDg1ey
ZXGy0xMp4nry56MdmszzekdWYNO/mN+UFLLDms09LoHneij8w1/pvyOdfV9P5z9W16ezRt1jd5Pr
eTjpn1575Pm77Pbjr8B5n6X4mL8Hzfqnkuf5xv8AouatHv8Am9/Lrz+B6Hm7x1ufA7cePA7nFinZ
hy3TqnPrOvLgqdzjL1TQN36N+ffTcevf52TjqS8282Y83bn2adM1Onf5+6XdeTOOq827lvazlvhd
PZy9OfTp3c/PXNrbvV5+TLqyrRhuyzcN+3G9Esu2bRzbGWGolbc2n0MZz8vLqa46OrmyXo2cHZy6
dOrbq4dZ6Pk9fSbdVnOjXZmwyTJz3pnoYZZtxZGvz/Vy0+w4PSy1fivtvnfofX5glAA4Om8tmyb+
OvQlSgOXqxNW7g7knB6HNXQ0b4BRAsEssSww1dHLWG6ak3ZbOc6IS2FcmHZyXOWerYXbzdC1YAJY
UBAA5ukmGjHrOTr5cy3borc17IAQAGndK8Do9Dx497XxekeR6TyD0N+OuzqmvaQCUJYJZQAEAlJA
RRAAIUAgAAk4u6LxdvPyR6cLGvYObpnOdGLrl8rf6nyvl9Xt6PF970efRy+jp1nj4vc6M34zt9/D
y+v5fzvf+e59dt6LrO3Dq5ca2/Q/N/T+3w62fcvyvoeP0+f1eh0eL5HP3/RfPasXGSzfl+u9H4Lp
78Pstfy9mvodfhzO/W5OLdm4Y+jtzfnfofH9Xv5PZsvq4xlw512JdZiwShAAZY0zkSyZDFVlEqXE
QsijGgIFhFECQlCFABAAQEWEIgWSWACWI7uDvxv5Lz/Q8/we4AAAC56y+t9p+b+12v2uvl0b4dXP
4vg4v0Xzmpz2GaAAAX2tPFv1/fvHwWH6HwJ8W9vyca2d3n9PP0rlpz0S4hLcpRKKIZ/VfJfRSfNY
+n5e+G3PVnnvlHp1j6H0nX0469p24E0J0POwPU5+TrXnno1ODr2ChalKIAiiLKABAKlVZYrk5zbr
7ddm3mndLxd1806+Do741baUCyiUCgCLCTV82dvD6XsHN1QVBeDTts1+pUCqspRCykXlM/MvrUzI
SiUAEsBymfNl30pCwXhQy7YKgYzyR62Qx8jD2rMspZaJb4ns/HXPJxfe4Z1ofF6Jr7n2fO9HWRDT
s1b7EC2Fx4cPUSiUUSw8X4H7f4rN+w+m+d93TXhyewmrbq2iUYgz4OyzXgeB9tu8vb4/z/uOPh14
un2+f2efwOz1uiX5/h+s5V83k+oXn8dzfUc2evkbfqOdPD0fV7Lj4+fS7mvj59n4eXgdMy8vVhnz
bk168vRy03plcOrs1OfNltqac9lt0+x5np8OuybtnLr85vuXXG7m6uPDn7ubq9OZlLOmrRcbrsx1
7ZiJnZhzZY3XRdHTmYF3Mubbz5u/Pm6k588288W/DPHPswy0+ft1/bfmX17e/wCb9Hz8zlx48vd4
905Zc7+jgzrvacm+nb53Xz3vY58d+v7/AMV6116XRNXt8vWJQEsJ53o8lnXx7sKy6OXqgFA5W7ls
7Nedl4+rTLOhUsBAJZYBJRy4bc7Ne/g7jR0a8TcBzdMOPOLNvNn0FcnWAoAEoJQlhOXr1ps4d281
bODtrVns5zewzFiAAqc3UjwPQvmnvYauivG9O+Wnfsus6GraAIAUgARYAJYAiUQlFgBFCABLCNew
vlehnwR6DRvsA09E1ef0c/zGnR5PX1el5/L0x9p0fOer7PF1dfNlZ7Pm+j5fj9fD8d9f5fPt5Hoa
r6OOzz+zizeHZ6PRrG7bzc031c+7l4e3Vy59+vN5212VwvX2y+Dr9XjmdHp+ZjvPrdPjejvn7XV5
Pd6fN0yTecnzujn0+oy8jK5+n/PnP5Pd9L6Hk+t7vmBpFglEUJRKCkqZQLFksSFsEUsSTIQgABMc
okLUWCBFiAoiJYRZYBARRJYjt4e/OvkvP9Dz/n+8AAAAB3cOV16vl4ECQAAAW2bPa+q6Z8T6eunN
KsWUc3Sk+d8X7zDL4DV283m92GGVx11ksC5ILMcbNm/iqfXfH/S8Tn4+z6ydHB9l5m3rz9LHm3Xn
zY+hnXkdPcjXssFgySqABQKQKShFGNsEqosRcOQ6uHLuOfdzbDV17Ut5+PaavS2IUUACgFIsFnOd
Hh8Xqp5X0HQWKSLrMvHnpmrtChVgyAEW48Bt5d3eSoZIKAACJ5dbtmXUllSpRORhV7plEA0Y8NmH
s1LPHemZbLBljVpyxyepydtgS6fh/vYnwH2vk/FZ3+q83L27ztokUTh6uOzq6JZVJVgvneh8Ymrx
/uvUPh/Q3fDZ1+he58l9Tbhv0b7kCKICad+o18e7bL1aduqzcUAGs87bwevZtpLFGjyfV8Xnro8D
g7+HXblceG9PJhl6eO3omU6445Y3WOOzFObo09UalzrzPX8r1eOPf9P5HTy78mXmep25bOLs4c5y
6uHu7Jp38bpIdLerk25m3HZyTMR006OfKOjJp541Q66b9GUdC3OOTLVscu3DLHh3yy5vS7Z5pu08
nj9ejv8AXw4nXV8/dt3Gjdbd82PXrc2/z+vLpsvLp7u/572PRz9i6N3TNQVLE1blcHXxd6cnTptd
MM2xKc3TqTDfy9Jqw6eat95+mIFSwSyxKIBy9WuzDVu5zt59mwxvL1EWGPP1c1kmO8YsToYZqAWA
ACABou3ms6PO76V5vpGjblzx0MciACiUnB3xPD9fm4T3NWeS+R6LzU9Eptc/QJRBUAAAlEUQCWIl
GKqgAEokoixAEsOO9fPL0OLrssyi/K+R+n/CeL2NPVp4d+T7L4z6b1+TX0zxV+p7/jPbuNvl/SeZ
w7+Jo79G5x8npedqdGzC6zp4+vh59dl05Y1sSL0dnH9VOfi47tcvP1efvzdnL6Hoanyuv6Tz7fK2
5adTJqlzv38W6Xs49O40dOn6Tr5/R3V7PNARRAAChbLFgiFSkBjSwFixEsEsLASwLEQAqTLESxAW
BEsILEUgEokpMe/h787+R8/0PP8An+43fR3Xy+39D5t8viM/az83p8/p6fGzb52LtxCwAAAAz+g0
8j6z1t/bEtmsCkKS3XGc4FbtHV0HjfH/AKP83y181nzZef2546lzsxvZZwPa+gs+G7/opvny7Nuy
MNnZ389fMYfRej3583QbwCAACiULBQqygFLAACyiUYrzHRxauyzk79fCdGHTvMc8PLl7uDf6Jhtl
lqUWUUAJULYLjxePHVr9LuMMyoohxmzzb69kzWWFQFWUqUTHzDPfn0lEAKEoLAww82zL0NmQEqg5
msdmOwCVxa5c6/VFeVl0WZ9ONlssFlMfPw9QzEqWCWE+a+j/AD259z6lZqUIvn2c/r6txKS1KAa/
I2rPQ3Et07SfK/Ofp3nS8/s/mX6Y1UXIhYpjqbLPN9XyfWMcM8TaJWE+LOjq5vlj6j6j4r7e3Ysk
Aw1TyPP2w87y/W5avF0efrOr0NXTtMc+fPbo12Jebo5GnVybDc2cuM8/qeX6meezXt0a1xdPlenG
Xneh5rGXVz9nW6uft4nSDpW3X1Yl4+rXnOix10s3xt5t8555kvXUsqdOOzDlnk2a8+nHuwznl7cn
dj9tu/Ha/rvkebyvR870fZwxzmuddXTy9GrRc7MbMzm09vF14ejt4e3h0unbwte31c/p9celTpgQ
qDlu7ks2N+ium83TLAShoxzxTfz76ac9SuiWSwIhSWAAHLvuizm9HkyN+no0m2YbCA47v57M9mOs
x63GdpJalIspABEsAOTrnLW/j9DRG+cPccvRlx11pSKIAUw4fQ1J5PtefznsadxfF9K+Ynqa2w2u
XpAqKAIABLAARCwhKqAsEoiwSkiwASjTzd2Jjs49pv5ugvxHF9j4nl9PldWrkzftc/iOzpju9b5v
j5dP0Db5Xp+nzcej0sPF6vL8j1vPa36+vm6Z4OTZrXHLXlLkmRlly7mO3fxdfPU2Ye96PPv2nr4X
Tti/Oed9f4Xl9fhTqy5b0bva9Lt5/J8r7KduXx31t2awG4IoIAKspAAhYhSkUTGiKJKJLjZliEoh
YqWEUklUlhAJYJSSWEWWJRARYJYjv4O7O/m+f6O+P28nqef3XXjen9B4F59/k7eyPH6/aup+dcv0
Pz3IGegAKZehqef6v0nsdccHoLvISVKGvkrv5dXScufcMbeSOvVyba1PRsfIeD+l+Ny6fKdX0vo4
6/Kdn1LWPmO31Nic/VG854xYwztkoqUkAAspCgCgClAFEUAoMDPXzZGrf08Z0efe+zj7mJlycO+X
R6+YpQCiLYW2UFJL5B6ngcf0Mef7lAVUpDzDLV0dyQKsFABSk1YcJe+7hZYWUSwWCoLy6Max9SoA
ELy3XZOnDoXJErzXVc6u0ledtxTLsFqUSicPVwHV1BkllgBwWa/Q09IlSkhq5uX2LMmIySyrKMM/
AOX6ji7xKIUef6HAnl/RqYzKLFxLq2YmfB3+XXo7LI156tyKLPF9jhT0dG8flP1G7169CZSWLrPF
z4NXHr53ocHTx3za9u7pgsz2nLv57rZu5evE59Nx3RtOjj7Ofnjn9PzvRxnr837nix0+G7eXs6Yn
l+r5OufZtwvTd1bMpriHTW3bcMYZ4XU5pu0XefVgzmZ4NTTh08zTfp64lwtzytmucO7Xux8/fzPr
PDy6Pa8mTjrzOvj6vZ5+nj6uTPa54Ol6rMs41Z6NtuWrddY870OfG8PT17PH4d/s/X+K+6zvXMsf
X5w1IocHfypt0LV6NcjapUyhOfo0GPTo2Jp2sKyz5+giyIsoCVABz9ESeb1bqycHecvTec6JYObp
hy79OFmW2w19HPtNkVYsAIEAAmGY5+jUNM7OA7po3nL1OWzqSqABFhz6O/lTm9Li5z1efoHiell5
x6fPntszcfYEqwpFEABFiAJYQUgAAJYAkAWCURZTn6Ec+7Vgu683y/Dv7XznPv5deNu1MzPCx6H3
H539JrPoZ7PG8fp6vD9vztZw09fOnldOz2Os+Ym6azcN+iNfTq6q2YY8+b631Hnel7/AlmyUQHLr
7mdY2tZgEqxFWSiUFIFWWwksRjQBbIuWMIhYVKiCFgCwECAlJJZQCCIEWCUY0skoksKgsCd3F3Y1
2ep4XL5/X08fZ27xxdG4Flho+aj0Pjenn8/SMpOkZejZ53ofS+11x4/s5N4FEuo2OHIz09e44+u2
DRznZy3sXh6ttIolBYKCoipSoFlFgqUAgAFlCUFJQAqVVgpYAOPnNzp2DXo2nP1bxJPJO3g6u+zV
sWVQFAKIWVbZDLR4/mS9HV6XcmGZSwFEmHmDt2byLAAAUiwcmOpJ6VoC0FRFAMTLyOT1zX6AAAOX
PGzFl0CsZc/OneY7bByXmsz9BZYoWFRwJzetp6EWVaJSjVw4erYLLJYPJ7MLOjZYAW40tiXDwJ79
bRFSgHLv5O4WAQfP+r8fZo9P6756Xdt+A+1mvooXOnfo3oC8W/k9AprPN9bz/RsiyWa9nPHncH0f
ncenyPR1aue8te7l30y283Ri8uFm7OzRt5zkx36N118++SY3TcO7i7fNO7xe7n6a8z0uXrzHj+x4
e+XqZYXv1twS4zdOSYx3Virby9GXCNadVRpt5d15JlddW43pNWFwzw9FMfL3w7vG6+87dNnJ5Xo+
d6fo5Tk6dbtpsu7uzwuM6ssW71XHKY1YOjWcsNcw+h9X43u1Pq8uHu68yWhIa9krz+/j32YJsN7H
KWyyIsrnxudm3m3azPG6zohEFAJRFEBpbuWzX07POPQ17Bo3c+8oNWjs5LMrr6zHkz6iXl6lSogo
ACBEBzdIwy5OuvN7Nvmp6UmS8nXOZOqUoCWAHJp9DjTR6fm4S+po3rPF7tvlV7PLl0ocnWtAlgAA
BARYkWVFgAAlECAJRARZRYNO4aPnvpeHHT4jHr0+XvllzU2at+w0yidPF0y9/Rycqevy8/BnXtep
8t2s+txfT/Lrp4um5rl7NVau3j17x94+S5vTx+t875j28vph6OKUQBYSoBSVCURRKEoAJRGUUoks
IEhSTKGKywABLCAQRLBLKSiBIsEogIWoEkCBEondw92delMsOPprm0nfPnPBzfsPD+fc97dRjTKe
lXB6H0/q9efkexW82yhp5zt5dfWcufYJWuXbOOJnh17Dm6QWVVlABSKJQWCxQCoigFJQiiKAACgC
oUUArXznTw7uxNO7Tyy7MercY5TA2cXH2HD7GYxWACwWyikALp8Y9XwMPezrz/eyWBUBUpebDlMf
UyyKIiyoUiwGBlwTdZq9CpSiUAJVgcxs8XL2rNe8lqUAaUGDrC6DLz8/QsZY2W8zlMfTxzBDJiKY
mvlx9AySi45FEt4Onirs3kJYMMvIsevjkJRFEoPP7vlju9/DKWoKBx9nnWdu0lASjVyO6yiXn+R+
2ifBfceH5Tf2ewZatnHZn16tsrh7fPs7N0S2KY6tvn5vZ+e/onz/AB6eJ0XLl0x5ejn6bm/DblxY
5XV3S4plxd3Lm7sc9dl0bubeN/bydHjdPnbPJ9E6unk7OVvhe34e+XrI790F2TG+eYE9IQz2Y83C
bZnr7LcWl3anFcdmrcyRu68MtefP6STx931PxX3e9cHhfRfO83l9/B3e/wA1ywydeW5GunHJnPMs
6a3Z6rjOHVy9G5jnM8Z6fW8b61NHVg7c8xqURJVcrfx2bt7mi9fJ1FJKJU0dHMjfgNO7XtrVt5+k
gAAAEsGGcOazos5unk6i83TiZOfoJMocd24WdGijPXngb2GcsVUABFEWEWJOXr11njp6Dze9wHo4
2nJ146E6QoEBAnFyetzGn0PJ6js5+geP2bPOX1+V12S8nSmUFFIABBEUgIqpKEogEoiwBIAACKqY
5I+X+f8A0f5Dj38rDHLh0xuWJm1i5yGNmSbNOeC9X3Hz/wBJ6vIHScvifSs6+C5/0Pz+fT4tljw6
hHp/W/Bdnbl9lo+Q0t+99B8h9d289lbylgBKEAKShKCksmSoqBFQsTIYqIuKXFBKsiiLCAQEsASC
osEogIEgpBECABIB28XdnfzvFfP8Pv2NaAkGVY9npfX9c+J7mV6c5ZpTonDkZa+zYc3SqmGo6NOj
qjmz6wC0BQAsoBUoLEFAAUCkCFsoAKAqUgAqwIUpOdOjkw7Dm7HGvXxZdsc3XKGrzDp09PaY5EFE
VWMyhKCwVOQ7PN8/ZGvd395hshalBQYl8/X0Jr9FVAqWEqosEvKmfDO6seqJaBZRYgCx5hs4+n06
lIAWUuhiXW6qtnDG7U7yLBovKTtbKCAEocG3A37gAAWcBo9jm6gslS6zkvN61gktlgFg5jzdnnfT
LaqCygcfRxenYEoEXjNHpc/QiUqWHy3o7PT1kjNnDty06UuWjXj01tqygaPF9Ox89wfRfN8eu/Vn
jy3dW7TvptZYJpyyZuMjouerZxaona3k6uNy7/S4PS8O/ovlvqviOl5urk62cPD9vxevD1sc9fb0
Nuuc7zZc7XT0dezXOTLHYaOSzXT0cOfrxz1ps6TLz+jjxvvvH3prR1zhq3aM+f05b4u/Hh19Xqxz
5b9HG+Z2cfX6+WVxa7ZMrykY3rJq38810zZM559urPd6dW1yz5v6D857es+lx7tXbnvpqBAE5unm
sY5ZVt056Y7ILCCUaMWy52c2zWu/R0aDfFIsAAEsEsHN06bNnD16Drc/QactnLZ0pZXN0w5ermxs
z6HIuW/LkTrSygRVQCAliAaLu5rOjTsyl8/u0aq7cMxzdOvUnSFgEomGyHN5/p60x6/H7jq07i+P
27fPPV472XOvPl2myooAiAAAqCEpFlJRFglEACJYWAlCWDHJXyng/pPncOvw19vz/N35Z0aLNmrZ
qpnhUrHss+r9DXs9vjlGksArj8D6uY1+dz6/5LzejEyzce31fe7cNXRZ6OJYqUQpFEWApKLLbEKY
zKBJVlgAAlhAkBCWWAlgBAIIllJYWBAiWAlASWIBAggBO/g786+R8/0fO+f9AEM/Rt1fY9HR25nN
q3z6ufPpObZuoXFbefUdGjdvOXpoqUUhQUUCpQABZQlKlIoigCoAKlilIoBaAAAYGenm6jRv3azP
RzcmddvbzbJd9adY3edzeicfq0AUFSwBFlQFw87zjPHp705vUyLRKCLKqzQZ8E9FMdwoFAKSgTiL
zbPQsw2EqUEoBUox1+abOrPoFliLABpYVlg3mdnlRs6M+iyZYpctM5zHrm4JSwAGrZwjv1biKIAY
mvm1+nWVlzageNv6rN1JZQFJKMPG6ObWej05ZShYMuLr887tsstQUDyfR5k7QoF5enzjq3UYrpTk
7ObtphnzGnv5ukuWGUtlwOLs5+nj14fA2fL4v0GGG/O9WndhrbFjps07NfJjLOty1ZcfK9uO3TqX
h9Dzrx9fKbvJvk5fU0erHF18/R596vH9by+/D19E3Z9Dzuri1uJe3To6fP8AR48pzdPnEsduj0fO
34xv2Xk589GJ37O7g25nWzueWrl3ab5/SWeXv5/paur0Z2c3p+XxvndXN0erjlY33zymPKLje82a
st3MmLU13KmzHOYz5ntT37NXq6Ortz1bebp1JZQBjkOHp1EmzX01p3cnXAqyWGGvfz2TdMDDo5+g
5unTsMpRCkAgARRydWOqzz/Vw4E9KUvN04YVuEadfVx2bdnL1nPv15Grp0jcFEEoSiLEgIDm6XLZ
1cvUl4uzjys6sMy8/RrwTeRQEomncTk5OyDr8vedmncPJ39PCepz6e41bNStiwBIsVKQCChACwAE
BLAEAiiCkoiiSs3kw9Ld5PT8a9LCTwPN+tys+QfV+Tp5X0nz/wBbvn6w9PIAIiyksHmeml+L+j9G
ZhW5AJRFgAlCUsoVBbLLCFgiFWESZKxoFkSWWSVZCCUQIlgICUUSCQBYJYRZYQJliWAlhZcUssHd
w92d/KcH0/zXg9mm5em33/SaOnvz0Xsusa9gDUbcOXcanZUwzFqC2CgoigqUWFqUAAWUgLYLcbFl
EWUVEoLKKAKKQCwXXy1McuvITn1q0afjs2+fjOPXp9v5zZjr+h7J6fbgHTnQWwUFEAE82u3xugmr
Ls7TXmqrKLBSRWPn1v58vRGSyyggVKLLEmPFVbOxIFhQACUHNq0JPSzyWiKACabhRci54eaX07lA
DW5R0Y9NERQKEXQabq9CiyKBKJ5/TqN+6DK42W6N3lps9EIFWUQHLv8AOsw36vUoqEoIObby95bE
tBUHF18XoACUa+LHvM1hOTq4bN3RhsJw9Go6M6hYW6d3Gm3bzfPY37nz2rl5b7WnPPRq2arqRdXn
38Hby1hZlucmkvTvx09fLE8vu4b5/az19vm68fi/X/MenOfTz9HG8/nd3F34Z+lxdmPV5uHRz9tw
m9Xu5PQ48+fj9Pzag67Z4duc7/M9Hm5c+WL36rB6CY+flzy4d/P6qTx9dX1/xO7rf0T4fs8/jvg6
NGz1+XPJnrvpx04tdzLDeMt2nLMmWN6TKssM1uZo+w+D+2569LDbzd+evox19uO+WLKosRzYdfJq
b8JjF35cp1KWS4jXsia9d21jjME6ebp0ruYZiAlEUQAiOfoVjw7+g0b/ADPSsaOhLrz5eqmGcjlx
2Y3PVy571nM6jDPj7CBQFgSxEsIsLjkOffrxs3ef6EObp4Ok3a9hdG/TE3kUCBHH2Dhwypl18ds6
9O1L5m3o5D0dXP2mrZhjW1YAIIBAAQUAQAACAIAlgABBUWCWIWAEqAUVEWCUQtYqIoiwSiBCwiiK
UUKiKWLABjYmK2oUioxqUgkBJlLIsIsEsBEELEoCLEgAIACLCLLIEEUEIHdw9+d93n+pjw7+d35q
WVbdWo6NM3xz78wFSgBQALKVKLLFgUBSgLKJYFACwWywKRRALBUouPkTXsz4jw87/SPO+Bud/Y34
/bnr937P5v8AZTp6PJq5evm9jzev5By8XDLHj2xWmO16819v1TLtxit8wFlFlKmMZ83Fvrm34bzl
9PYIAC2AomOPml68+gWZSrKQAAF0TmJ1Z7kCVKqUAEa0z86bq1elUoFuNigmuaqbctQmvSa/YWIs
Gu8lNmHalSywFSlSrjxZ5Js3CgFiMcvOMu7XtABzHP28voCVLCkoDkTVk1Vt7JRYEocnX552bYLZ
ZVgvP08Bv6JQBLzHL6XL1AhzSyzpl1mq6eo2XFGSVZxdGFnR53qY8t/Jeb9L43LfNtlvSY6t2d6s
c+XV09PNk316erzOecR16X0vO9Lhz4NPTo6cPW+k+a2+Pt9n+edPi+nHp7NWzi5OTp1ejh2689d9
GzzfT45rnNnXp1XZp58c9GzaeZMp17Z9+u8uOO3Ts1POnVy67Xbq7ZNzC8+PJh18e+Hq2Xy99WnL
t9GJr6+TF4tezV25d+M58ejXE777ro6M88+Tp4jvmN1nPPGyZ7dGFz2+v8Z6Un11+c+j68+nR0c+
s9CVZQCHJ181kl2Vt5rvjKc/QqWEUmjPPnrHLdpNmXP0HL1aqmwLACACWIFaMtvIZ6+vzz0GraTR
0aTa1bScvXhXP08+KdOno5zfy78Ta0bwpYBLEiwAAaNysM+feY8ffqTHdxdBt0bxo3aMjbLAolFn
B34p5/RjpN3Vq1V1687Hn5dGEdGGPPy7bt009eW8akUkAlgFJRFglEAACJYoIlEWCVUWAElIBCkA
AICkBKgFJQQFgAlEUShRKsCWBBUJJVRYAJYWIWIgDGywQLCLAQBJKqLESwlQASiASyiESiShLESi
d3D34362rdz8e+TlyrPHPaunfKFBBbAUSgAWFAsFBRFAsqgAFEUCFAsFsRklByHVq+H8fHT7fxPn
2N79CZ1UFiJUGXbwJro0RcbLrs1nlN+d459XoZ1wfoGWz1eajXMBZQBdPmnTq6uROvj3ejLhsVZM
iYrBKqVC6NXOY+jlsAhZS2FsolCcs02O65y0QKQpFgOasuDPtTX1SrALKLLDXeay9F5jLlw7DV6B
LUowvISO2xSUABZRz7eQvbp3EsosFuOs146+4oAMfJesZ2WVKACw1cF6rJr1eiALAUcW7m7qoi2W
VYMNOO5N6FEL53bxp30Vp3caTp5+qpy9XGZ9HP0FSlssaLq310827V5u/wCY+t63Dy1Mejl1vi6u
Tp1u+f18iw229Pnenwcs6i9d9eeWvnzy4ezmnn9Lbq6OfTn5d/m+jPfU8m+HXnj6+HRLOnpZ69nK
eb369+da4d8zZrsatXdjxuWqzrmWN3b5vo8vG6+/XbMbL1OPq5M+X1rcvF6OTHr+o6vmNf1Xy/Nx
ad+Pr4dXN0Y578Q7dL38HZzxhoyx3ejbydtxlry0x0y288fe8H12PU36+rcyat+8c3Rz9ACgJRyw
sdXNvjn6tUNoWWEc+9WOm7zm6NEOnl6cDOatoBAACIFMMxz5a99nB6HJTqVLydN57OhZLy5dHHZt
3cnUc/Q5TLbnxnahQIEiiLACKJzdWNVo3mjT28ybc+TpMufoGrZz7DZAsADXo69FcXoaNEu/oc1z
193nely6fF+L6Pn+P3avuPhPpvT4/X3zT6OO8KliJSwIBKVFgAlEAlEWAIgBSWACWAICgRSQpApC
ICy1FhFEURURZUtQKSwoECSZCUEsICVBCpZUkyhASWWARYRSRYQlJQipIAGNsAIKiiLEiwgEpMQO
/h7s79Pi9zz/ADerG1vAoAKSgClEoLKAALBQLLFKAooKBAUlGLKIBUq2yw+X9j4TF4EnLvUCgBAg
AACzJdnqeV9znrwfV7Mu3nDfMolgpoNvn3tODr4tho9PcKItlUBKMVxR52O6zD0KVYLFFIWVVmJl
w69tmv0MkSiiixFAYedZtwy7jHMUCWCguDlMum+eZ8z07JuM0CxxC7N1MyIogCwHMY2dVVUACDiz
tbd0RQt4ery06e6iWUpZQHHu47M9vR55u65ZZMoAjk6/Lrr6LBYKgyTTLz9/P0CgBybOX0LFllx5
clmPVMjmwyxs35WS240uvPnJ16OiXweH3fL49PLz8ju5b26d3Lrpp3aei70aenmlnXy9iTVlsznz
unm725idM68WWPP3Y7PqfJ2+C1fV/O+nG+159ebcM/Zw3yzfplmWW7n36eOcYemrBtp5ppJ6astT
br28I054aLL1uPH2cefN7GeOfh9HL938Ph0v6B8Ns1c3JMtPr8vRljOno58Orlm76HJ05zz6uvl3
Xdw9aTVZXbg2Tl53bv8AVmPQ7+fbuYdPL1deejZlzHSFEANGW7kTLHKV08nZzRvY5KBJSaN15quv
r5U6pp3LzdE0m+URRFgAlElWY8/VpNnF2aDZt5Osa9g07efdZdexLz4XOzfrx3Lz79Oac3Zr1nQC
LABLBKAANeO3VZtmOa8s6uROm83SOfoGGfPuMgRYJYaHRyGrv59Uue/PnrwPI+y+M8Ptvv8Ahex2
4/RPK6fR58+hz2dFlOn8/wDvvg/L7fd9fxvZ9HkizWQApLABLAACBEoiiCosAIUiiLCywASwixEt
IBLAsBRAAKCCwEsCUFMVgICUmUIoksREAsiiAiwAQSCoCKSBUsQAQEAAJLLBAlSAd3B3516bbq4e
kLCwAoUBYKABYKAAUAUKiXJKVBUFAsoLEUSWUBeXp4Zn4fh6uPzenAVUqkolgCAACizOa6v0v5X7
DpupenkAELjxQnR0+Ydnk9HrGjoCgWWAWoLGky87H0bNXVlDGhKAoshlZpi8d3065ZaIgoQyQXTq
0JN+3eWEoULElUxvGO55CbOd7RjkLlYiycRdttZbFiUEtIsBia9SWbdxLQAOfdw1e7VuikKcBo9X
m6wJRRZRLyGnq19VaNen0EUliwJbOTPj9QCAIqpy7uY69kstSjTu4k29GGapdZpZ42dOrbxxnhs2
VZliIF592g2dOjplx8/0Pk/L3w1eb6Obq5O/z99MOzRutnD6Hny788sLGzVkmnds0c0p2vPu0dPL
z9/2PxDy+j1/lcNnp590PPfK3c/R7fPtlmvU246eLT2+Z3LGWPbLLHdhpx5md9uG7TvmHS3LLh4O
7Sz0xHU4u3hz5vay15eHvOngnono8/VzZcfP083bj0LOvfZwdunndzZrZy4+vRq4dnH3xz6+jn06
c9VzzTbsmdvuau7tz37cubeenm6dRta9ixQBOXq0pMduqtufN1RzdHN1EWLAjm6cS6G+uPr5Nxt5
erErn6EBUogAIUhbOXpw1mrPPVZ1MM5Zy9WJlNO8nJ16rNPXo1nXy9UGjHqNWzl6SgQIolBAEArn
3Zc6b8ci8l36U6GjoJzdIxvP0FlgAlHHn08Revl2Rt3b/F8/o4vN6NWprxXWPpej43ZX1e/wtfD0
d/yWPtTT3Pj/AGO/D3m7h6ceia+NfRnyfpax7IqVAUgBCwQBLAUgpKJQiiASwAlCBIoiiAASwAUA
WBLLAFELBIsIsoACLiSWIFkABAJYJSIgmUqLAUgIEiiASiShEIqzFRAgE7uLuxv1tW7Ty9Cwlsqg
CkBUoAKSgIVKCgACgssUKUShUossARRiqo5vk866/kc9fHoSwCgULAgACi3PDbN/XfV+H7nTQdPG
Y+edfBt9A1OLUavU6aLBQUCxFAPPNvJs9GscyEogoAsBxm3Rj3E3EtAssFhFwq8ONTDuy2qhAUAs
Ql5KnbPBN+W30QIAHFU3XBM+iWVZQChUsRx7cSdOrcJRFA5jDLX21RCsTVwafbMrEqwVKKGrjdNm
7R0eab+zHKVLCSicfb5Nnb0gsRUqpcU5s+bvqkMrjZcefLKzoErm6NBN/P1Jjy5bjT0aN9SUYqNE
LOnKc+dbvmujk49fL7efdw3OHs5unSs8NMubfnzasLOomeWeiubGy9rydnD6HHz9GzUx0nHqvXPZ
I82/H7vP9P1efBlb6svN6ePO23TemvT1Vz43Dd50uI7dezPi9Dly05Y7t518GzU6bO/ze6Qs6ZcP
peZnze3Zn4vRp09P1XSfLY/oHw3O8PJ2cPr8/WO3du07uUwkdFjZGjp12MuXr5tNuyb85y7vI8uY
/StvH6HPpp13H3eXqEvN08/QgKgWWHLbus5+vj2mzT0csdUsIBKObox1009fAnewzXRuvOnRBUsA
ARFJRXN0yxy5bjT0cuRvBoy2c9nQJeTZu5bM9/J1GvHfzHRydMM3N0gCURYRRAQlWBo34YJu17Bz
XLE6Gjcrm6SS8/QsKkAlHFt2850eV0Z+fv5WHRqjk0dOjUz4t/Ty6+I9nsl+b9r0vK6c/p+Xo06n
Fq4NW87cMMLnO4bE7fofiZN/cOPs68gsiwAikgEoAiwUIsAIsoCAAixAAJQSwAAAQKFSiWUkyhAi
AIFlICCQEWACWWJYJYCJYEWAVFEWCUIAgBFxQiwFiklCFJ28Xbjfr6tmrl3oRYLYWgAAAoAAAKAA
BYLZYWC2FoFgtiKAB4fN8fjfXw7sefTnZZs6m3WEs0sCwoIAsqqsrPDY19h9R8p9N0u3g4/S6eTi
9PPzTu8q+wc3UCgBUoWFgXHHzDZs29YEoIUSZSsaCY8JlMu5JkS0Fsq2BTlNnG2pr9ClsAlAAgcV
Xpw8dNfqzvUIAHFZOhqHVSrLCgQUDHLmM8L0kUJYAYctyrZuWJUL5fXx12dMRbKqxFBePdy2Z9uK
XnvP31bLCWCWHBt4PZuQlAsFc3TyWXq07gIsvOaO7n6aol0bdFL0TUa7s1HRQgSTLTWtnLN/nehh
nXynzv6N4/Hfh5b9PLptjLe9eu46rbg5NcuPWmXJiuvz/RxdWWG7efN9PyvW58Lp9D6Lh2+Az9jy
PTz6cjhrw+7z+v18u+Tm8/o58LPV2QN3f5fqcOXPxbdXXSLvTu4duM9mO/zeXPXD0dbt02T0sblw
4zzOvk6cPbzw2eT0c3v+Jq639B+K1OV08Hfwery9cynbuufFzdmFm4zxyrZrzwky15qz3a0mWN1T
H275/o649jOzeJt5+k147uY6JYoBCYM+cTPXXXq2pdO3l6rIIiic/TqrZpyzOLt830UujeXG83TY
RLUolEsACUmjPPmq49PJZv2c3TKlhz9OrGzdp3JdOm7bN007l5elzplN/KdUxyBRLAQsQSqShq2j
Xnp2pdHf5XHr1PM8DHX7RzdPp805+mIc/SRYANewcvJ34Z152vp1cenBq69MvPctOs+jv5PrM74/
A9bxbdendoZ0Z477nVq6pvnj18Ln22ef1Th6OX6f5vV15/ob4/3/AE+b0JWsCrHJ12JRFiCkAAgA
BUAlgABFJFEACgiAMjFkXG1GKqxtElGMyJJYEUIJYggBFgJYAgJYiUQEUJYAJVICAlgBJSQICkWB
AlO3i7ca9bVt1cu6ygCwVKtBYAFSgBRCgAACyikCgFCgUCxFx1/Ip5/l5fQ+f0aeLHXc6Ls07zcc
hpZ4Y2EtlLLRGWUuvLOLitWbJ0r6/wBF2dXfhMOTnvPV6PVkCkKAKEsFA04chO7ZsAKIAoAJpx46
u7LrSUVYipSpVqcBnry6Ex65ViyAqVSUgcgauSzD0r1FSygLOUx33msvYqwRQAAVNZhkxNu0llBL
By7uex069xQMcvMNXr6txFCwUSpeZNPVp7Kc3R55v6pZVAB53o+OnZ1VUVGIqoNem06AKDl385t3
Slwy5jdr36o6ObdqXfpm9AVLBzdPLZd/N0jTu1cOvF857fx3PXsa9e6axymrW5jnhs0tGN9eG7Ux
fP6eV0vZxbV6m7h5c+b1/G9l5+j2/mubl6Lrwy78uuMeN8Xs4vV9XPbz7cuHfy5nh6uwpv7MdfDj
OL0/O3rCnXbdp9Dnnb5vbr544iejrklOnfjt8vHi5+jl9HH3std8fWdPBz9p7XPt0pq8z1PK1y9L
XNmu7g6OfWujdx91mG3XncyaegZ4Z2ZYsrnVxetu55y7/Q9DrjDVL0xOnn3l5unFa0b0ixQHP060
2cuzWZdHJ1HPsz5q6BCUQVz79WROTv8APs9CVLpy2c1nQSVZUEUAEqFa9ks05auk5c8ZZ0zHOWc/
QTHLl6qx0dGo1dOGk6dewad3PvOXrc50AQEokogoUiomjeJ8h7vwnk9nRn53Zl0e54TT7zP4v7T1
eWaOjDWc3ynrc+nqLOnICaN40cPbljXk8nfx8umPLtJ15buTn186tu8el5vo+Zz3z9Gzl7cer6Gd
Pbjz/P8A1CdPz3V9n8hx7enfE2Y1joyhu975lrP2fleF2Z17P0/B3+vxpWqCFixSRRCACFAJRKgU
RYIIIWUQC2rLURC1BYJiqosIsJMsRKskESiSwCyVFRUkogEoiiSkSwlgsoksoCLACLBBEogEogsg
J3cXbjXqat2nl3yuNqiCC2UqFWUAtgogUgBQAsApZYAoKlUCgad3w2Z5+7Hp49+rw8ct8mtluSLn
WupLdeyLrXLn0xudm8bsyzvGZ61yxz1WWa7cZ/f+d7fbn2eXPX1x5exQABYKlFlBC8WvI1enRQFQ
AKQF48cKnXnsIAAWBC69Wmpk7jHbLKAssRRKCXzqz5NfoJp349opKIWOWrtmhHWyVSAABCpRzY70
19OrcqwtSFjkQnXWQgutefTp9ayiAAFg1cs6zdbjLzNPfZRLUoMTi38fqCUQGIsS8xh0aeoAINGW
nqMwNGPQaenl6peXr5OlNHTo3iWKIa9c22aejTuMeWfPZ17/AIOjg5b9TTGOk4unj117cctOM8mF
nTr3zVeHPjxTv1ZToy6uHu5uPLi9nyPXcNkxmtYcujfXVqz1cb5Ps+N6vp5YZ619Gvk9PzM9HRz+
kTWb556stvOeYrr229uE5cpt056nDh2cmujZr7TdjMccp5/r+ReHuWZeXvpxz9TrPMz+9+a468by
PX8f1eff38HZn0cONno3e3i35m+beOYx7uHr1bs15MZ5at1mfB6nl8X2Pu/OfQc98WeG/wB/k14S
WdKJefo07bAWUSll5OjR02ef3aMa6eXqxitG8Agpz9Gk3as6cfZwd1jXsS69nN02SyxFigkoAJRj
o6NFb+TpwNPVy5WbxLjp6NNm1hmvJt2aE2bObpLy9WJlqx3mnbo2lAgAAAFhBWPxv2mjnv8ANPT5
9Pl9XZ3eP9nvHwf2vB5G+f6Hy9XT15/KafZ/P+Pf9P5fP+U68/uNPx+cfcbvjNms/YcnxfWfV8HV
mz8t1erj5teft4sc9+L3fnfp9Y4ssuZrZ6PB+h65fP8Ane542t9W2vR58dW+V+fa/ofn/N30+73+
7vl+Zz7HCa8r6nfn24xWrKAEoQCWIIWFIAABUQUlgCQAEqgS0KlgCAIhZFWWCWEFkgkUFhIAElWR
YRQlEBAQJFgAJQEspFglEIgAEBFlkCTu4e7G/V1bdPLvbLVSwAooCyiwVKLKLjYqULAUAAlAUlBY
KFYcPxOb7fjeVs5b7tO3X05zXVSkpC3DO5uuwYzLHO9uzn2c++1hqml1tcc8c+63j+q7Orpzx7ev
PfIAAAAlFgppNnmXvNPYpKCygQKsswTPz8crJ6FqhCUASoXhw22YXb1KssoFApAA8oaHr2a9WXQZ
UUAcyMkMd2O8CWpQAlAGjZprdovTFsFQCGvVMqz3kAPK6YdG0AAAHFv5Dd3YZS3l3cNnT042WyiK
HD2eZZ2dMS1KQEhY5dmqsunHKAHP0cperk6imk1dGGuzbjv5penVv5pemwARcTnXG531F5fP9KZv
znkfW+Hx6cu7K46cnNs19O3ZzbeTnMJZ13vy2Xz8/Oid+t7+Xq54mWvOc+D1fL9TnznL9rpx1+M2
bZ2556Ojl5vO9Hg7vXmQvfby7s+E5um6qg7GzXctWPU4rqs6hNt3m9+jjdPoac0xHebfJ9PzM+f3
LL4++jfqw6P0fwPDx49J43s+L6fJ6OeWm9ufX18nTpbjst7uHu4MYy38+fS9U282cbN3J2bm3Zxc
vLP1/d8T29cfZ6tWfXkZytzXslmjo5032FoFgx09HKmzm7+Cu7HXsjm6dcraIiiA5enn32Yc/Zw1
2iXDDdzWdIllBKIEWFsQSrObo1jU6eazfdG+UDn3tFm/n6C4aLsTa17I1NvNXRy9FDn6ACLAUhSA
goQ8r4L9R/P+PXy8sHLfXOXJc/Z8KW+z4wn1Hp/PeWn0PhdXMvNIQD6z18PJxPWw8/1V1cvoeh34
/PcH1rG/nvI+w+iud3n+z8dy9OzPT0evyhvncbjLNWeywQCoUhSAAAQSAiwSqSiKAgBCksEpIAVY
yEyiLjYEoWKCSWEpSIlgRYQUlEmUSAiwSiSrIBKEsIESiTKEWCWUAlEAAiBRAkWCFklE7uHuxr1d
W3Vy7rLQsCgAKKkoLC0AoBRFASkoALAsFIXwuL5rl1z5d2vl11WLjoy5ujpzss1AIFWSMptmbpwx
yx02Yb+jPThx6edJnfSusPqnt9eWnrN8agqUAAAiqHHGziz9KyZrKAsAFSg542cN31j2kAoqIpDm
rbyZDLdlsIsi2FqUtiKCWeJW3Hd6SYaLmZZ0Si2XnMcpsTFN60QoAJRFBeY178+at20hYKlLx7Bh
0augiwYZ+WX1MM4CgAEvEa+zV1pZdculp7qyVKAMTk28npWBKAlJDGtCZWb1koGrTbZuzJXL0aDp
5tsK09ZNeHTFClg5+jnTdzdGmt3P0cxu5+vi8no+E3+hyJsymvO+TXnO3Xq5unTzzyZYdO97bque
fLp7OZ07Mdmpi43VvE9Lzu/y5+15/kuDPbn9Hh7+vHLk6OTM5uzj7fQxWXrN2ndzk1btFB10sJtV
5s6SenVJV26dvGZac8KDrc/L9Py55/euM8XXHq4dHae1qYS4+N7Hja4+pqyw7993B2acXT1cveuX
Lulzz2Xe+/j7OLGMu3i69TGbcs8ez1eLr7c/Z05XU282/QOjm6Ro3jDLTuFlUBy9WKOe7jm6vN9K
xy9OldzHKEASnP0cydOvYXl6uHuGGcNG/l6bKJUoiiASxApzdOJcMNpzZzXZ1scpWvYNW3Rtpr26
ZMd80V0yxeXqx0psx3ajaxyIUgBAQCksHzn0eMv5bj63k+fsGWUlWBMoi9vHYbdX2PH15/Mjl07/
AFvmsuevo70c/O9XoeHvj6Hn5MO2foPpfP8AQ68/P+Y75vaV38sABzZ7eM7IpBQCWFgAShFgBAkK
JRFgBFVAICUkKTKFoiLCKqUgCASykoiwQRLCKqWAgQRLBLALICUJUEsQBASwCoABASxIAUgIsEWs
ZSTu4u7GvU1btPLupSxFBQCqAAsoAKAKRUoKSgmRcVJK5Df8fx+dx7W57+HfTl05YeVr93O5+ez9
bg6YTDP0cIFS5y6/S9Hg5de/m8LPPX1OLZ62sTo+l6enH4XP6rl59PE+m6NnXnRvFSgFEoBRFEjz
kyz29IsKsFsCyhQOIy573pr6IWiKhaCXHhTO3GsO6oUAWoKBYLjr8Uy7dnaNFwTLfSxUE0VcW5Jp
w7RKlAoAAANeGG6tPVq2wKSgmXIGW8yCytCac9HogAAAxNXNr9CzZTNvLu51y6scioKBx9Xnp175
VkollQFnN08tl6eXqCBjlzCzosol5ejm6q5enmJn0zGXX0adxRKBNNyTXu0dFTl2ZnFxc/mcevre
Z4Hqctbbp23fDt0dO9y4Z5nB28+5rGJvO3TtnnYYp3uWndjMTt5N3nz06McVzxymWjVt5+mMe3l6
oDOkxmmyXGmO3DrMWy8tabsuWjHpnW87om2nbdXNs07Zbgrtb5fqeY83uWZeLtzYbu7rOHZ9f43L
fleN7Pj9/P342ejvcsNmHL26rlNmvb0nFldjfZybsM41dPL1auvDPfjj5Ps9f0HXnzezhlvPPcem
ubdph02F5+jXUzVKAlhy9GnbZy5beKz0FS83Ro30gAIGjfz9CYc3ZwHcFmjo506EFSiUsCEBZSWG
jbeezo5evGOfq5NtbVkrn6MTKad1c+y4pNvN0F1baa89HTnXNt8DzuPf7G8Hd34CXNgAJZUUShq+
T+xR+VY/e/JcOnnupz1zZzO3TZtMPT877Tpy9u13x8x8n+p+fjf509jx+HXb6Pk5y+50/L5Y19Rz
eX3R9R6nz/sd8XM9HnQqKSAhSLCUJYqxYgpKIsIsIEALAQssAEKAgQAlKZS4qIsIAlqKhAgqEQCK
qWBKIRAIACAiyyVCxSSwSxCUQEoiwSyhCwEsSKEsAICCwCd3F2436undq5dgAoC2ItlAUCgFJQAA
yY2LYKFqUOb5eX2vjNPT5++jo2cnLp0bOLbvPud/n/QdOfzM2a/B2jIvn6PT8v0ctO3Z6Xo58v1P
znn8e3Z5n0HmXOXVwaNz0vV+e++3O3zXo758ffSCWUACyiwtEANeHGk7s9hKLFCwWygomPAXO9pK
RYCwUF04YVhsm8w6CJZQAChSwcmHImPp3oGvLSuPUIsso0kxnTY4ncBKBYoAAA5c8rNmidMtgVKD
E1XHA2dAAY+Xv3puyFIKQoHB0c1m/pWWWajXdXYZkloKYHLs0ehSWQBLCUGnGKz2AA5dsNfTq3DR
v5E3NvOuenLdZnz9HIdlM6WCg5ujn3po6NO6uXp5uo4ef0vO4dPnsNznvi2Sb1xdGMuplgM9O7Vh
JnlphkvCY5BN+jXvE2XDcx2458NjDLTz7NXXG7o07sUJUssy59/P7OWzLHlzruvnbOGu1zW3ow14
9Jljsaa9uuy3PVU3OdL1YaMst+WvPjcOzlkv2Gj5eTe3yvY5+3DVN11vRlm1ctO3XmLje9ZsuaXH
Pc5ezk7Fx3c958+70Pj/AHumfovT+e+h6Y59816zhLK6KS3m6edOgSgQtY8/TzJ0eZ6nObbx9hjo
6OazoCgQGOvfzJ0YZl4e3g7bMpUc3To21kiKhQASCoBq2jVt0bzXz9mg25c3bLo+cnyHl9n2Xvfm
n1nbz/Rad17ccebPYZtW0cHf5PLt8Jnt6PL6L938J9d6PP6MrvxiwELKqKIqIoliPluX7JJ8Ho+9
0Yfnm79C4rv439C5u3pzC2LDz/kPv0vhdnoLn819H7ljfg+t0N4C0CAIQUgAIsEoAiiUISkoiwBA
WLESwCksEsASUFFqSKgLBKoQqWEKgSKIBLACEoRAEsEsAItJjlESqhBLBLELABLBCkoiiLAREogA
IBLEdvD3Z362ndq5dkoAihZQlKlAWpQCiBalIAWDLX4PymdfVed8059N+Ovrxvq5dO7Nw7uHo3nm
z3clxv3cUt9bu+e9rx99nn93kSauc9XLLPUFgyuG0w9n1MGuzr+e+11d+UdfPZRFJFVAALBbC3k1
2TX6SqAAAsFBdWHIXqy3lRFSgEXEy48N9TXewmRKACAAVMFz8/HUj0W9UvIl6xaIGka3XThdgtRA
KAAAFwz4y7N3JW3cQAso5cszDdo6ShXPu81NvfjkoIAVEmXBWr0dPSBDl289Z9WGYEtQZce/jTq3
ipYSiQCXnouwylEXE0LLOmWS46ceqy82WJj18++XDDX02ZXGy5FlmGznTPHfzG/DZzVnunPLv+Q+
x8Ty9/hPbyz561abr7M9Ody1VjWzDn3szHbpt2YYk2Xm5c59LDzfS3M5hnGWWOXn6NO7RXA283XH
qZ45cdRRMcpZrmy+vny+f6nk2ZvSyTi7uTcuWUrpt07Ka5S4Zac3POZV0Md6c+Uylzz59fKdc5ss
OrHVt9GWOU5XXtxy5asrN0rr9Lsw3YzOmyd9OjXnhN2Gecdn0XyP1u89eW3VrM5+nl6Y6OXs5i7+
bpGGaXVt5+iwJUsGncrG83SnketwdVbNewadvN0oCgNeyGrbz9CYcfdwHexyHL1aa23DKFFiiLBL
LAEox1b9BuSnJzen4GOnxGvPv8/ee55upfsPT+Vvbj9I26O3DDovMdMysvxXD938R4foef73z3V0
x99l8B+mb5c7m6e/niywAsBSASwCosASKIACAAASwCgEsAAQFgQAAAguNEUYrKAhUgWBIsBKsAAQ
BBVsIASiKIokyGKiCosSAAgMVVAkWCAICkAlgJYlgICpJQlElhFlLKRYJYRSRYRYCklhFGPdxdud
etq26uPdZbEolAAUlAFqUWBQAqWLHMdHxn02HK/nU6NHPrF65rmnZzWavU4O+zLDnw1NmjOmCWyd
GhjfvePv4uHXSs7chUl7fcXwfquvTx66+fr+i6Z1b19PnCxZZVgKILIAazLz8e4w6yUsKCUAAHLN
aYd+WarBRCgs0Gzl2iM91CwCgJSSgOUy5bbMe3HolsvGTsmQC1NZNV6hx57Ey2EogsoSiUSgNZr3
arZh06t0sAKMM+Uz1Z7rMrEuSchq6+fuAUogS2Yy6uXT61lWQNNa5j1JkFWCmk07ufurJLmpYAiW
E59itXTz9AUTm6eZMs9XTSXRLr6scq41yuemXXnWjr1bbJSW2DLn26zo59+mXdy3yJff8zxfJ579
7j8vVz1vmvPjoa9tlwx6zdhr11js17SZ7dRrxz1pj0XyLzz9nwO9Nm6Z8OtsvLTm6ebTRx79HbHs
U4aEAJjk748jVtx7c9fTl0nRw+jZdOWyrrx6ZZyN0dOTJlMW7Ld5YbMWdLdra1edv7XLztXs8i4d
/i+xZo7dO7NxS+Pqoa9O3V2vZs0dNxrwznSJs13XTqdHPXg/d+H9JcevpY9M5aNzrzx03dWjo5N5
sEc++aa3iIKQNG/Tml870vMs9IS8+zPls6goAhrbOdOjDMvB3cnSmQXn6NdTNKAqUQJBQAho34RM
/M9Pmm/z67PM8vo9Hs+f9rWftvlfsvK68/Q3fN/S7xo2Y56zq9Dh6MddHz2jyfP9bktmuWPrcWzj
jH6X5f2+/l+my8z1PR5wQACAiiACksAEogRLAACFqLAshKqLAEABQIsASUIsFgSwiqgECLBLEEqg
iiSiAWVAlWUgBCoLAELASqkoixJKIoksqKSLCKIQLCTKWJYARRFhFEliARYAJZSURYJYJQlElJAT
t4+3OvV1btXHvC2JQBLKLBQALC0FSgAGn4f7f4jlNvJhjx9PPO36/efnfe9y9Mc/N6OvXL850e/4
OElk6ssQm3TnVzlm2d6+PXj1+5vj57r9Xxrj6T0eDV5vR06+v2vV5mUvs89ShUAqwUBq517Z4dzv
0uXy9Gb9Tu+a+hl2JevFYLYFlIuBeDHdZh31KoAVBWPIbNrQZdFygsUEBVCLEJzDUzrDonTA5yb8
doCjWk1ukvPcTPcS1KAQACwUDjvTZeZ0rRABdRq24arNnQSrBhwXtrbSAFgoleR0Y3PZuJpLBybN
dl6ccgAC8mzUbOmUCWkEpEQ0VLNuYohpy1ZWTpllnLv1m/R0cxr6ubrsnL18sdIUCg0dHL1nP5c+
X49Mh5evRxbMOuctd4t57cfJ3R6GGOWt54dOonLv1s55ui3KZVnRxenhJ5mv08bPH9Dfjms5l5+g
Zt4u3zNzRNO/tn10vn0gEDPLHvjm5+zb158m/Zbcc1S0oYGXPnlLw5b8OO8t2E646Euphr3xfL7t
uuPH0ehzRr9Dl5692S89Uvm2Bjx93n7dXd5np7zrmejec9vP0VjNmC4er836q+n9J8j9Z157lbzz
dGjec642dIlnP06LNwlgqAc3Rz2dGncjk6eTsscvVFObpAIBq2jHLRvMePu4k6PltnxPm9f6P6P5
P+i9vP7TDZvMAlEWIllAJYNG+Gr4D6b4Ln2btF5WN+g937n8p39M9X1vyfAv6pz+Px6z9hhz7I+c
8j2vI5fU59dwG7l2Tz6dPd62uDHy+Wz2PU+Mtz+qbPzb7Xry9SGsgACBYBUWCUkAAlgAFSglgBFg
CACrCAIACgIIgBUBFgliJRFgBKlAAALECiAlEURRjSosEUhEsCAAiwiyksRFJKIsEsAQsGOUqLAC
SkiwSwCoAsAiSqSiAgSFSdvF2536+rZr49woqJKqES2ZECggLkILAUlQ1/Ffc8mcfH+hu+i599PW
dsLKgGHxP3PJOf5zOnn59YGrs13n1u/Z6PHtzdmnhPouXV9Jvn8z89+k8+uXzv1/Te+VNZAWUqJb
jfn2dvi/N4cvT7DH6xr5Xl/RPO1w+Kx+t+H57hDZ9v8AC92O33Pr+T6/bjSdeVuNimgy4Z3Jj0ig
LKANeFNXRrg6oi2C2FoEoILp14pjjs2mvJ0FTWTLDoABqGqdY1Zc5l1SqJFBUolAADTs0JndnPbs
2ywAspOXLZWvbp6UCW8W7jTf2QtSlSgDHLyDX7mGZRKjUajfZksChHOYbNXYLKLLCwoDn6Oazdrw
6QqJhlzVW3CtxI5uvk6i8W/Qb90ppYdABFA5x048h81waN/h7wmNZas3fHn58uPTn7PkdOK9l35T
UrGzgyu3lvHpjrnIJAWMTDXjlnWvbjn59pWTxPa8Lrnl9HzvV6571nm2BJRr09nF68de6Zb5gFhU
pNOWo4fQ4tsnY5O6anFTHXnp3XRFXDNHNh14L5ezu1xMzju2OOqRXnej5m19nwfb6Z2c/RjcaMdn
D1x6OXNm3u93wMj73L576EDrzww3c5u5OznrZs5ugojRu5+ioCLCatxNefP0HJvz4K9CEcvVhjW2
WEUQGnbdCb8M9c1+b+V6Pnefu/Qfi/uunL0ujHDrjalJKIoixEKSwA4PzL9c/OOfTyRy3kxyMViA
e36nyXvL7fD1+lnfgcucd+HT1aNdsdezW46unRrcdfVydznw30cLOH3fC9zWfu0vfkAIBQACUQCU
kKQAgAAlEFCFigARBQCUCACWIBBUIAgCWEoRYBUoShKQKSiwIlgqCURRARYJZUWJFEZQkyhJRBSA
IiURYARYJRJVkAlhFiAJRFElCXECgICLESidvF2516urbp5drYMrhVyRCWWWyqAlEUVBklgAoiiW
wWCgFEsXw/jPtfhueblz9XL0vRz1cu3o/Q/F7LNnmT6vtx3fQWduQIBUoBUoBp+A9H5fFiufXL0v
L3av3Xr/AJ77OnR8P6mrlOHd3bOfXy/qcfsO3PMvbjFiDhN3NfQGcspRKAwM+WDbp+Y9TO/a3GsA
KQsGTGrlELxa91jK7Ix13pBiY6p1AFTUXVOsY3jL2yqEAALBQAQ56nVrxNXVq3IEoF1bOU26c9tZ
iJMuCzT6OjrAlUKAajm28frAhUF5riTrwzAIsHFu1WZ9JKQWwW45QC67q3WaejXsgQcueVa+jl6k
cvVyV00l5sub0LKuiWb9ewKIsHN08qdXzf0nwvPXFv17PF6JEicnX5foxzb9npdOWO+ZzdJWSUxl
ElEsoIWBw7dW7G7Tz6A1+F7ngdsa/Z8b29Tqlnn2BKhnx9nJ68duWOW+awWAOYy5M92Zoy5euXT0
8mbrln5vozl3ZYNsoVUogNG/CXXeToxvKy+bVSyzi7ufU8r3fnvc7Ttxykzo5evi647ceffq547d
K7u7ztub9f3/AAns7z9Fz7de8btG4aOjm21nSNGezmreBFSA0bsIbPN9LRW5xdpebo1G1hmQEA07
gxw7c38v5u3z+Pf2PufzP6Ppy+s8r1fmdPodvx/16VGs2BAkFJQA+e+hxPySe/4HDsGWUma47tG8
0eh5/YT7j4TqOzVv6s58/Vs5HbPDdjr06eTrXy7vrPR6+/k/Kt32XxHLtn39fux5P1f5phdfqU/O
+jpj7wb5JRLKQUBFECCAAEWAAEWUAAWQFCAICiFgiKQElVjQlCFIEgEsAAACwpCwBVgQgsQqUgEo
gqMoYqSBUpMVxApFRjlFgRKIBMsQCCyAixAIsABFsBjlEhaSwQAEsR28XbjXqaturn2AoAEoWUJS
pVWCgAtxsVKLKAFgURRy/nX6R+bc3NF5dp18vfNdOHm9Vaer6f6Drz1dRvJYkWFlpiolBZRKPmfi
v1b80znjty5+i6qJ28OeN9jm9Sa1et73rb569lduMWEnm6mt+3PquAVSLZSV4cvscnm77Jz+r5qY
e/dGOnoauPusxpcgCkrSbOTZiZbVWYN6UE0YdRQDWNLrEvGTtmQEqykUQAFSgGGnDrscs6jJLKAN
Jr6MdNXpsgYmrmndZmJVBZQB4/T0m4AENBDdWxLEoJeYxyw6wAABYLLzmc3aDphKl12c+3HGzZtJ
dF1dVl0b/Pjb16ti3l6uVOpSyggabr6LNX599b8p5uuxZ5uslGvLVt746cK3lZagLLAQEABCy4HJ
u0buXTNLxoHP4Ht+H3w+h+f+iTInDYEWVnxdfJ68d2Uy1zAscx06dUjn6N3LnOvh9LQ64XTrXq6O
fKzbv1bXOdfF2VmLYBjljLw9fJ2Z3hTy6WWGvNXznseV2ejPuyzMw07buad2zLrjHm7NK6M9ec1l
u5ss3s+o+N26z9pv+c+h3nVhnjrPRSVy9OuzOY5rFiJYNHRzHQDzvQ5s7N8ll5ujRvqKJQSjR859
J8Dy7edhLls03Uff9X51+jdMeP8AVfNe7c9DyOnV7iMgkVUWACWGn83/AE7y8385vq6/NfOw9TnX
k256NdMOvR02ey+j7O3H839nR5fHt9n8n6vnLzdfDjnWX2vw/wBP14/U56tvbm5+iAHxXm/o7Ovg
/d99cRZamUAEsoACBEAAACLAACUAIogAEsqURKIokoxWUAlEoQECRYAQpFEBYCWFQUBBZYAJYACF
BJRKlJYgglhFlAiWCUQCWCCJRBSAlEUSUkKQAgIWIFlJRAiA7eHtxr1dW7Tz7AAFAFSgCykooBQB
bhTK4oySlCgHlcWdet+bex5PJq36e2dNn1HqdfXno31rFuNW2WUCUCwirEoAGo8j4Ls48W4spvFL
LjM5Lj18tt/Ue384/Remcg5vnvZ+Jxrm+35/YbizfJQAETwufr4U2PjY6fo3qfNfUS4i48/d1edm
+pjhmtlXMs4jPLLTTrywiYTdNZXlnPp1c2zZ05WlgwGqdJTlJ047AFqUCKlIsApYi8u3Gzbq3cxs
2irEVKY8+PVZp26ukCWeft1WdHQSgUFgXTt89NnbKqURRNBU6cM4WCpSceWNmzoJQAEoWBouZM9H
SVLDl6eet3PltMsM+Qy6RJ5u7Oumyy6NmjrRZZZQhhWjp05J8Zy6t3h9AmNY7dePSc/Xz9XfFsqE
oKIUII1RtaNNdunQN1yxjmy2Y897C8NLB5vk+j53p57Pf8T3OdDlpKJZNTZq2Y+vntsyuaCaN+Bx
8m+ZmU467Y56chu09TPNu1ju2at8z0Z43UsWoBjlicXXozm8h5NhkWHi3dy+nP0efP0ZmvHdo3Ot
L35MM9Euplqxvfp6dNTPlzTp9jx98fbas8OuZ1cXz3Pr9Vo/OfVmvsN+nf288ACzXsJq2c/RWHJ2
8h1tG8mjo0G5RFgA+G+4+Pzr5i5Yct6rjlLh9D4GC/Sdvyuy67+Hs4bj9P8Anvj+XU+u+j/Lvfs+
/qdOQAAExyHk/KfoOvE+Dv0fm8Xled9b1anyHT73rdHbTd4vi/0Dx8b+C6tWnj2y1+x9r05fnX3n
pXpylNUQLAAQoAIAQChEAASwWAAACLAUiiAAgEqoAsEpMVhKhQIpCBZSWIBFEWCUAJRjQigAQsoi
wAkyEUSURYQWJYQCWCVUCJRAQEWAECJRAJRBSWEqJYEWEWUWRFVASUkA7eLtxv1dO7Vy7RZYsoAA
oALBQoJYFCgiyqsFsRYH57s2dPm3p8/6/wBPT84+87d3WC2ALKUALRCgAKJcTV8Nh42SZM9Ft1nC
pTC489VJLu/Tfy/73rfouPR8tOeHr+J99nptV044rCUANfibuTLk+b3YZvPU3nL9E/Oel2/U5zdT
CUz5fovFmve1YY2auljZh2MIaKOH4Tk5uPbb6fjXPT9J9z8w/TrMjDv5mh1ErSYbJuKAAFqUAAAS
8qXqmo19OvaQKA0bdKbdGew2WJbq2eeY+hq32BLUFQVOQ0ejydtiyygMcucmOeyzYllFEvMY5TpB
SLBQQBiaOjTLHRLKsGnZq3HP1c/QTly3JlLivlet5nqWReaMevXmZWFohzdPNW/yPY+OxfL2R4vR
YQ0dHN2m7Zjn15l0my6d5ZcStWFb8b5yZ6evnMefu2mJ0LccpGiTLjvIvLSzE8bi6uX1cuz2PL9X
jqLOeksGWF3M8sHr5ZZYZJkUi0w17cY8rm9jxZrKQ1s6OLocc+nm519Pmwkndzzdb1buXo1asEsN
PP14Z3Zlh59ZDFWI4fM9vwfRn6Ds830ourbhrOzPTu787o34WTRl0y4ybTzcOvzc30t+nbm/Tdvg
/Rdc+b+ZfrX5j5/Txezr6M7+26fC9r1+TeLAEsOffNdmzn6Yc3Tx7k2zIurbo3ksAg8j16fmPP8A
Q+Lw68GO3HNuCVt7PP3K6vN3pr19O44enGWfqezz/R78QIqosAEogCwSiLESl+A839P8zGvhvUuv
OvrO/wA30u3EFgAAJQAAgAqASxCiAAlBLAAACUAIsAEsApLAsIsQCLAAlIKRTGiRYAAShALAlEWC
WBQAAIsogElglVLKSWIlEBFgFQIlggARRAQCUkBBSWJFgBCACZSgEsIsQB2cXbjXq6turl3CxYKA
sBSFIUBVlAQFUABQIA8f09yQLbYLYW3GwBQUCwW6ts0CAX5fu+EmeXPLLHXBZYl1y5zDKauX2Xsb
5fmF9S43wd3Rq5d8tOX2FdvoR6PNUtkAlCPPTi+U7fFzGJZu58t2emhjlrPZ+lflv6drfUTOb5uX
QnJu8vy8d/ruhr3xy8b5Hx8dv074rzcOfTGd+PO8O/s7k0fpGjZ6eDRj2awNZMcOoFAAUUhSLCpQ
YmrPX0E53SllSxRF5rMenHSZdEqjRGq4dtlEoAAGHmPUrKywsoBNDIw6cMwBYJohHVKsoACCwOfo
5k6efdrXcBjlzGc386dJrXR1ato4e3zLO7aS3k6eezopFsq1ETRdtYfn31/x/n6boebqQOdu9ON0
vFrPVyaM668fM3ku7dZw+g3xjy9g0bcNZ0JguzVo3mUx0RN2Gzz9FjFYZ6bPE054ern6XpcPd592
WYoGOG2anN0c+/2Y2Z4ZsZJQc6Zed0a8y44aYTRm0y17LGOOTefZzdLls7eTqusqtqAlGvDKS56t
uvjtZeOliJ879H4PadfufO/QazccsB08nX35zl6+bWOH1PK6Zd3V5/fc6+Xu056cHfzdGL0fafEf
U6bvzf7n4Hz+r0cvM9HU+q9DLk9Xm9G/GfaRBrIg59+lN0o5c1s2te1dbZzm+WEUFHB8H+leBh8b
p63C+dj28d6MM9dvRo3aT0vV+m9Lvw+F8j9I+B4+j6H6X4n7btxC5AEoAABLAQBAEKSiKIsEyhFE
UQAACWACWUACRRKLFJARYFgAAlEURYJRBQCWCUkBKCWAEFJRJSJQliggEWAAgWABRKgIACFlhYEF
QIlEWEBFgBJViWACWACWCWAJARZSUQiShFGNBKEBChELB2cfbjfqatmrl1qWllAKFAAAWCgAKAAK
IAFABQACgWUWJbcaVKLLAFxvz54nmzXzuOMLjdmq2yqw7NO4/RfQ+G9nl39r5jL3+vn+b8b9F141
4n0J0yFioWUQE1bSfnHB+ofn94ecM9WWNXbo6Poc9b9jn4u8ep5/lelHNh6nnWa/ezzzqfnHtfDw
HPpdmvZN9nb0epz6eT9h359+DnvV05RYYaHUAAUKKACxFEA5ct1mWu+Bz6e/s5ti7Ub50hjpnRZp
2aekBZwZ5pu2xLQAAOLfxWdXSssULKMcuUzXMzAAmXMZYugoAEogANWenpObo07gBzZbUk19C3m3
6jcsMOWdtgRp2aeiliWoKDn6OfefH+Vq6vF2Y5Tntrzx3NO/n6vRjRluzs8rp7Ymvbqpm4t5tvBg
d+fndhx6+zbXmvURy9Nhp5+nRNbcl8vQIc3Twanjw9XL2uvRu8vQM0Al3OTs4u714uWOTCwTzvRr
Ojn7+GObn9Pljixm9rLTlEtmtezRei42dvBtO3bo3XVli2WGvHLnt34as/PvOy8dEDyPX8/c4/f+
d97pOvXnJNXVx9XbGzHLHpjl4+vRLPS8vtrrmvYmGjq5ee+j2fF3Y1t+K93xb0y0bNdz1/ov5d7W
89n3fz3r2dpN87EGOQ17NO8nP089Ojk7M2c/J2Wbbo3kWAlMcofGT7VnP55y/ofx2NfNbfsPEXjy
+8+T0976D8x/SNN3yn1fHL8P+h/k36NNemrfKLBKqLBYBSAgEogRKqKIAACLABKEoiwAgAqUIoiw
AAgRKIsCwsAogAIBKIKSiLBKSASiLBKIspKEqIsoQBEACVAoAASwAkyhAAJRCUCJRAJYCAJFhBRK
RYAJYJYWCICWUIJSQpCBBZYCUWAADs4+zGvT1btPLqpQFAsFSqAAsFBUsAAWUSlBCgKSygCgAoBK
LUpCwVh8zL6/wvL7nLrwaP0/xenl+DdPPOszwSmz1WvI6O/jzOjr5N+OnqfUfL/V9chvmsFsFAWE
WEKPP71z+Yc/3fw7lgvs477fqd/Ljp0eb7HBrPb4/N7VnL7TDU2eBxfIHX5NnHoEXbq3zp9H978T
93q6Oa9fXzSuOa6+bk9QKuYsAFlFhalAigmvZy2Z8fpfmcunztLj29f9E/KP05r2B387n26028+z
MzWK0beJJrw/Mpr6Ls+L2Y6/sG789+/1nYLzTLzzR6uvdQQAsphrxysx6cM5bAAx1SjogsCgLISq
YZcxnnloTbnSjA09OqmG7HI5unm6hHKc3pcvWkmXOToFBKRbrz503eZ63xudeJ2aOjxd8ZZm3I74
4+zn3dM68/Pzs79XNuWbM9icTvhq2UIACWCSrzXXvzpZfNsmUY+Z6fj7zwGXp5+9sl8nQslSkmWO
W3D3+b6Prxkxzma052bEpr4O3CZ5OH1eRNO7fzpq6NfrW+Pju54vfx1PWy4u1tmtQKUYcvXgvPt5
urlsl8+gHL1a68D2/D9Xvn2JZmac8L0nXlhO3Lh0ehhnfB1a7Z17+LquM+Xq0zWeWrZx6cPgez5F
1jmtalh7X2v5z9jp9S8r1evEEA0bprN2raOfVzerL06sPGzv0bPNs2eppw1npFkURQ4u0kWK/Nv0
ny5fzLt+r5MX1Pe+f+g6Z/NPM7OHj2+s+o/LN+s/rE4+zryKiLAoijFZUUQCURRAJQlJBQAEoRRA
JYARRFEKQAAEWUBAiUQoBFglEWAEWFgBUCEACUQCUShAJZSWIBFgUSgCpUJZSUkAlhBQQIBUCCAE
WEWIFJZCFCkBARYJRAgElgFQJATHLEgsZ45SplCAQAJ28fbnXp6d2rl1lKFBCgAWUqFoBSKgCpQC
gAKIoAKJQFIsKColqUc3T8gzyeHu6uHfT+jcPq9ZZWufB8V+iaXL8uns+fjfP9Vw+lnvl4mhx7Z/
U8H1vbmzl6cSiKIsKlBYiqxWEs+fPovg/rN+c/C/oPn9+erh9JXg33W+fP0Us8f2fnWPN+V6eadM
NW/XjeGWPtYui/R/Nzr2fffn32916ri+Q3x+l+L1fU8um/6HHLv54q5EAAKgoKlWpjGrdl4WdcHw
P3fxfLpo7ej0s7879E8H6nrz2HN1449TQZbhVnMmG3V2HwXyXveDz1evn9HHXR9hx/c9M8/Q1756
9ePclLKAlgwz0GevLYZte3G4s3hhnzplhd60AFsAAE589hdGWZlULzdHOdGjfoOjRv5zblRPJ9Tz
k9MK5ujSm9CrCFiubdhZPzf635TzdevJPN1i41MteHoxsx357xhnSwJSCXAySrKiAILydOjpl4t+
ExrbccuGhjDxPY8Trjn26ujtn3Evk2BKlJdXRNnm9fonTdHQ5spnYsIti67siMcsJcdGWVl17Kuv
V3E8vH1sEbdWxRFyatEvXhnjXn93B3Y3Ivm1KQlHz3VhPRn6Nr2ZmOno59zq26ejtz4+3Dl5b1a8
tes49/nel0zuw2Dm3advHpo8D6f56a04Za9W456zo+p+f+ls9L0fn/kd5+59/wDH/vtZ+iFy079J
t07uKXX6WGBr4+rVLj6XJ6FmnyfQ4D1FWJRCkWEJRRFgB8fzfcpPn/W6VhSxSwAAEWCVZAJRAAAR
RJRKACURRJZYBFEURYAJRACkUSUSVUWIAAgAACAEWUliARRFEAAlgBJRBUWIAAKsVEoCCUQliWFl
EBFEBBZAJRAAQECShCkCwWRYJRCJYFgQEVUBJlExmQwKMpVikgIsEonbxd2d+lr2auXVSwUiiKIo
lBYKFAqWLFABQACoKCpQAolBKAgFoGORPA9HusBasosGjz/YM6/z32vA4d30W36O6mR24AW42XK4
UzYZFEAAJlqrmw7cmYtalAUiiUJ8r9V8Qx89hN2d6McjXP0c+XLp9T4c1Z7dft/M1vpwy7Wfr/a5
Ovv5QuKgpABZQFBLEXT845877rxTHXz+fdOO9/r+t6XXlhlZ1468MOqzXlj4s17/AA/AePN/ovkf
L8eNff8Aqfltr0OV08tdnW92X2e09PmnBnlc7t0s0sFSl1bfi+Pf6y/m/Py7fX+Bx+XrPX6vzOR9
59d+e/oXblrmrPfGdCkWAogLKDA1dGrac/Rp2llho3aOhNO3TvXDHHelC8O/k9ELDRu0b7AgUBeb
fotnx3n8/f4fRZZz1Kac+7R1ennnMSbEUQljBbjhmZOfbGxLYlDG6lx1auedOrLl6sXdlhnwXGw0
+L7Hl9sXo9CazkPPsIY5Y23zO/zfRbs39HSed6enCZ7MsMtcskyAIay4a9XPczw151vxy1x17uTd
1xuuLWaBUJp3YGOeravnd+nLOsh5d0QB5fJ6PlejP0PX5/oYTXsx1m9HL0ejnv0ZYZ15mrp5Genf
hlXVt5enWdNvPz6dPkevx89eFhlhq1hnWXVzYL2ar7esfM+94VX9jcPb15XTuwlPmvB4a/SPJ8f0
us9fk5foa8j0fS+b579Lx/oPg86+xx5urvxgsKIoxWCUQVAEoCApYAQAAgCEVZYAARRACklgUQAE
FgAhZRARRFgWAABAlglVFiCFiiWAAhYCURZSURYAiUQCWCUICVUCRRCkoosQCUQgJZZYAQAEBFlg
EWAEIFgBAiKQVAJRJYJYiWFlgFBEUQEUSVUWBRJlCLBMoOzj7Ma9LXt1c+oWALEUKFWVAUCALLKB
QLKALBQAKCyqEALKAAKABRFEoDUbfA8Pwue+36zyPtp0o68ABQIBVBcaW4jK4jK4ipSxYAUBSLTD
477PnuPym+p5UuWns2c+vldGe3HTXj3edOm36r4z2dvuc9nB18l9rGrZSRRCkoAAAT5v6D4zOs9m
rZnplcS5WSPoPS+V+o1Mubbjvlt+c7vyzN9jxoxtv1IkEWFWD7r7D8X/AELevp+fbx74ZduGcqyg
AGv5r6j5zN+Z4dPDx772r2Tj3+jt59Pa+i4uj1+Xdg3XNsoAlEWCwXRu1G7Vv0RtJnd1dHHvGzO6
Eu5pJ0Y5rNPVqzeXr17NSY5cyZblIoBRpJ5nsfIZvh9nN0+L0EZNezm6TLr58++KmSZXFVjCLhoc
9sdOzN27M9XTG+zLcAx0ZcU16PNydjXD2c/Xi5WXgso5fP6p3592OfPW8ebYElla+D0/P9D0PL6u
Xqld+dzpw03h2NG25zkqMLzTWWrPHlqcno4dGnLO4urq5s8u7HDPvyoLKJhlhLz6tupvm6NGOevq
XDPhgMFg0+F9H4Pael63i+zIxrU19nB0d8btGyM8nLusm1s5jt3ad+2HH3cWdb8sM+W/Kw7cOWPM
4vb5+muXHDf016H6N839R24fBfL/AKj+X46fZ/Y/lP6nqZwudHyn2cy/Pu32/A4vsvd/Ovtr1zmH
zLp5/wBPOrr5sMnLb1SrAIBLBCooiwLABQSiSwBAUCLAEiqioIoAAlIsAIogASALAKSiKIABKIsA
IsIspKSLAABKIBKIogIAKixAIokogIogEAKlWJULFIBAAgpKSASwAgILAIACKJKEoxCJYBUWEURY
SUQqSWmNUxUSZYiBZQBFhZYJRC1JkidnJ2Z16Orbq59SkillEBVlACiKACiAAoKlAFBZQAFogCgA
FCUAqUAA5/gf0L4LnPN1et9Tjvu9Q7cwuSyKAAFoAFAAAEAqDNgXYwpmwsZsaeP+efrX5/eXjcXp
YcPT570MV4+nLmO3r8nfL9d9b+UfqfVsZTXGVCpQAAAADn+K+v8ADjj2a8uffNjlGexZXocHVXrd
LT14fNfAfs/x0z8Tn+i/PZ383gZ2CAAPrfk/udX6TZq7unIqUAAfMS9/zF8udvGjPGOr0+b6edun
s9qdPPqy0/Gr9f2+J7dgXNIVKJRAa+zi65ejy/J+T8vp28evPGvoPqfzz9K9HG8+2duOfPtibfA9
j8t59e/2PlN/n9P2/v8Axv2fp8+OjbhvjtCgAOffoTb+b/a/DcOnXkvm7QBo19ptc+/eeirrMA4e
3hxZr6ZNc2zZ1HLs59tnVdF1NuvXpavJ63mNN+npjHdMuMtMUDye7h9X08nLvwxrYs4aCpMoY83V
juaeToy73l9Pj26nJiTplnv0M7sc+dLJ6NzhzamvN25cPTne1hra156Mrjp6vL3r6Dkzm97Rgb9E
0taYxnb1eDD1GOHt8v1ORF4yLC+f3zbh9Pze/U6JVzz7dOrrPQY798/H1+jjJyVnZs7vP6tZ6ePr
0zpNmrZz3b0fYOfw3B+j46z+U6v0b5a78r7b896HT9M/Of0L5/fP4n9H/N/bxv8ASky6c1lVq2pO
HxvppL8n9B2LLCnnejore17IEBBCgAIoAlABLBKIoiwBIolBAAAlSqIiiCkogEogAAIogQVYAKCI
qoqJLjYBFlJRFJAJYAAAQAAEAlliUQCWBRJYAACFAgAARYIWRRAJQlglEAIBZAACAEmUIoxUkAlg
llARRFEVGKqxmWKShQJQlEUsUQiWQXr4uzOvU1btXLrBQAoAAUAKAApFgWBYKACgBQLBRKAsoABQ
ALBQKDg70nhe6KAAoAWKoAFSihFECAFgBKACwWzzjt+B8rp59dHZ+jbNc/gMvvifnfifr/yMnyPL
Zz77P1L80/T9a9Ab4gRVRQlEUQI4+zw08/VyeFN/R2ZcuuWzXvzbnNaz2fF+ssZfJ/VdOWw8rWX5
X28ONQZoAAG39P8Ag/0vbbsNYAAAx/NP0v42XgZzp24d+5Ezhr7Tf8f9hjnfhPsPPxPd2HTmAqAC
wXXs5jdyfNeJx7fT/O8/n8Onb1+F9Rt2fZcvT6PPq26d2s6d01Z1p/J/t/l+PS79uXHv6n2fz/v+
vzaN/P0b5AoAGtjsT5Lw27yd81nLbXnxba+3DDsw2aN8nXebbrG3HDQbN3Drk27EMuXfy6y7OLsT
n19WnPo13YW3VsTPZhljOWUvK2LLcMsE8j08XbOG3yPazcocqFghFxrHT6PL2nBevG1zb9tmHN1Z
2a2eVnNs3YS8uPZr3x5vR8/vueXTYno6NW9rk2as2enl2dGfTwYuudeXo54ex4/dypr9LlzTR6On
bylHIEDHU59909s+kjLn4+3n3ejt5t/Xlcdks5Md+Ca+vm3XO/DOTXLu058+mz7H436Gz2R1zPL9
WH5Z+j9kZeX6i38iw+n+T5dPd9v5763ePoRpFgliAARYACFiUAAAAsoEQUBFEWACCAAsokWEVQQs
pARVQEAAQWAAAAAlEAWApJYQWRRARYBUUkWCWAAAAEWEWCWUWIlCWAEAlEUQEFCxFEWCURRJRBZJ
lACShKIBKIBLLEoSiShLBKJMoRSSZFxWIBFgAlEmQxZQiwAsiiBBISrLB2cXbnXq6tmvj2CwAAUl
CUBQBZQlABSLCgALBQlABSUUlAAUAAqUAFAgBYKAAAVQFgtgoAAAAEpIoAA1/m36F8hM/L/cfPfp
M65jWQHx+fzPLpzdPT7/AC7dP0uN9PmtxJmwpkxpbjZbFIokylT576L5eY+Q5OjWu/3/AJXZnt9Z
u830OW9XDyZ437WfPzV7v0PJ1+jzT81/RvzRnx92yWadfVhLzNmvHQIbcKvr/p/wf321GuYAA0E+
W5fN3vrurY63KWMdHRx8N9f3/wCcfQc3F9h8f9v15SnTiAAKRRPkfqPgo+X38zn09HgxS5fq35T+
m7nv6b42s+3q38lzfzfr8Ph379vmbc6+i9Px/v7jTsz0ejjnsAAAYmvg9D5PLwO7m6vF6ESXDk6e
btNujV2dNdHNridF4unLV2ed02cuzDLXn7fP7OSts2Yy4dU2zXNzd2eemibMYy269mWms8lMaVC6
tmlPW9Pzevh3+M9XzfS9XnDmEosJMpV13DpMrg1NjBWWOSLlhjZnZjLjc0ameOpjhtus4W56nDv6
Lc6sd0z08/uuDfLl1apdevpi69ue6Zg4KJSIuGapo2PXy7WGfHWvVv06aOjRj0vqzXs3x5l0899O
XPt1npjHecNeWHPpl6fm3lfvWGfq5xYRRFHLy+oucaKEqVZAAQAAglEFJRKpBBLSkQEFAARYAAgE
AAlgAWFIACCFAJSRRACrAFgsACUJQBJZUCAQCURRABZFglEWBYJRJRFEWCWWAIABAAgCAVSxFEUS
ZQhAACCxLACLACLCLACLEAlCLKiiKiKIoxmQxZSoCESoKhUREsoBJUgEsoBLB2cnXjXqa9mrl1pK
oFlABSKAAFAIqUlFAKSUAAFFAAFAAKAUgFAAUCFgoAAAFFFAAFlBRKIsFgKIEUWKSKNewClnk+l+
e4vH0PovP6tX1rL0+aK1kIAEq3EZsUZsaUK+S+txc/yR9z8OmOvZnnr3d2rn5dcOrZljpPsMfT7c
A3j5r5X6r4tjfq1ZGq9XNbhq3SOSzo5ejXjt9hr6j6XDPryBkAB8v9R8zZ8RnlyXXo9nhdrr6mfP
0NTV35cOvjYbenz36r2ZfZ4gsAAoB56fIfJ/ZfJxzMscdAju/TPzn6a65vU8T35ff8r0vG68Pz/E
x1269mGddX6X+ZfqOp0aNjfLMCwANO7SmX5v9j81x6a85fL2ksNfL38fSY5c16279macvZWZt14a
kz4vTzy4sunEwyOk1beXfi4TZjm6t+vOXfhhc3G2QspDWbOayvb6vJ08u3D0+du78e5y55m9qhtu
vIyjGMoWY4540q7irzuOvcrXlQpACVWOOcsuWGfp56de9w3lji0s1bDOYM1082zUZY3jSxShKNG/
g5vRn2ejh9HnMNPVprlyOt6c/O67lz7sYmGeuPUmjf2582vp5s3p1bJx39R63yv1XfEVqRYRYAQW
BKJYIUAhYCUQEoAIULEUEAlWAAAlAEUQEWIAsAACURYJZUoJYAARQASgCWAoBFglhBYQAQAACWAW
AShFggJQlgAlgFkUQEUSURYJRCCgoFKJCWWQEWAEWIJQEAABJRKEURRFGK0wuQkyS4qskBERLKgI
ESqgECLBBEoASiASwdnH2Z16mrdq5dosQKqZRLBUoAsFlAKUCkAUAgKKRQKRRKAACygAFAAsQoAL
KSgAAsFC1KACkoKABRCkUYqQAApUsLOX5HOs/Hx+n4d9v0h6PPUWVBRAAVFJCgLUGVwsZeH7S4/K
en7DwMOLKY+b2dv23wv2nbHoDpxYZfALh5PD0M5Y5N8p0aJLu4ers5d/H7p3zp532nw36Lt7yy8l
CKIonzH1HzTHxWjYs047sG/S7vn92fT7mzzNHHv3ex4P3uufoK6+OLBRQALq2E/N/I+v+W1y5efr
wz05dmvu5dtfrc/Fjp+g9nwPqdcfc/BfX/meuWvbp6cdNWerdnW39L/Nv0bTp6Ofo6cUogAJqmxP
kuSa/L2g49JMsC4Zc250XCameJGvLZawwyzxZkZsWDLHLU0bLJRMqxztxnTuTgx9rfp4ez6Hor5r
q9jZb5u7Zi3u+S+o+Rt83d1epn1/Nzqx6efXj9F4vPrzZa/X3z81v4mOm7sM7l4995b8ufQejOLO
O2+bWfTnn4x6l83NO6cZO1y5xvmmJumNsysRSxruUqogqyVZYoAA5PM9fx+z1fU8T3M5io59W7Pp
OLbdHVr69G43acdkxt6eLs3lydfGu/Ph6+W9v3Hw31Os+kOmZLKShKJLAEEqxSAAgAIoiwSiKoWE
sEsIKBAUQqCpQAqMVVFEWAEUkWCWAlAJRFgACRRFCWFC0kVFFExyGKxIKiiLCUIACKJZSAgsgBCw
AIsAQFSywBLBKICTISqJRGUIBAQCwgIokqyLABKItMVRGRcWQxZDG0QhYEIiSVZFiWAEiIBQQUik
BAIIAlhFVAOzj7Ma9fVs18e8okZDFkrFkMVEUS0SgKSgKRRFAEoCkUFQKRRJlCUoAoCAAFiqlgBZ
QChSiKIBZQCwLYKAWAAAqKSKAAHmen+eSeVp0zj6Oj9H/Mfod6/Qx086wWKABAApBYAAAxy+cTy/
L8b6bj24fL9zy5OT9F9D0OucmOqz4z5Dp5MbvRo+/X0flf0THp5/yXH6z5y459XqePw9fZw4/UV0
/c457wAAAA1bSfF/Lfrnha4/nuTrz05r1avP9DLHFOvq/ofler38Qa5AAAAAavjvtl5/kmn6byk8
Xfn28fRw5/c+3vH55n4Prr6ny/Vxme6acdJv5emPT+w+O9i6+0z+S9zePRlmuYCXz7OXR8r7ONee
5evx97FzZjliNW7LUmvLXTK3KVJQlS5GM6eivMvt9Gp4G33eq357s7NjXH16vOdfdx5fFm/o+Lm8
+b9Lp8bfnrp7vF9iduPd5/Vnp1/O+n4Pp+T3dHlevj0+dtvD0x73m7tfL0c3ufO9W+Xd4/Xw6x9N
o4Lw9fne/wDP+t28nb8763lTftbufZx9fhd/F7Ho8E8b6HwsdcvRw9HPX53Dr19/Bnp9ji5+ri6M
femfnp2cm+Gc9nyefXDLV6escV16tcurK8ud9U4+lz33iwZ9KcVl7r5uVz6M4ZJ0+R2cnTO/2vC9
bM7d/L0YcuG/DpnHDbq6TPLT1azw7NmFzo6+bos6eXfzt7denpmt/s+H14n2Y7YiykCKJKSLFCwC
ALCUJQgAAAAEokoxVUCAJQCoJQosEoBFpiokqoCBICUpASwAALACUQFqCiAEsqSkiwSqiwiiKIAC
AQEqyAiwAiiAASwSrIBKIsAEoAqJbLAEkylRRAQEWCUJRFEUS0RlJYohTG0YrilkxrLBLLEsAShA
RDFYiqRYJZQglECFikCWIllAjs4+zG/W156+PapQCpQCpQAUiiKIoSiUBSKBSLTGgAABUoAAUARR
FEUACgCgKoBRAACkoChKKQAAURQBKCUTg9CXPwHz/wCvfNZz8Bs69OO31P2nyP13ShcABAFAAlEF
gADVtHk+sJhnRRNCHhfnH2/w2L9B+j+b6W1T5VO343x2Oiz6GTo/QJlvFSqAKRQlEUT4n0fjcdPo
L6n0esfPfDfrH5Xy7avqvK/QnTOnbyRRFgKRYAACmPzv0kY+M+t3VZ8z9L+Steb7vhfe4vzPm+vb
nz9nVzcu2/u+eNfWeJzfRbx5P3HmetZ7MN4QRz9CvzDH6b5bhr0c68nolxyJMsVTZsNGPdu08vL2
eqvnt3ua2vP6fW8Z16d+PkN+/wAnPxTp6uHmbs9NfoeL6c353R5nozo03Ueh5Hsarm806pvl9Dl1
zfVo6PKT1dXFWvH6/O7vb8Hb7XznZw+j3+J08Vn0uXlOHt4fQ8X3O/h7PE9zyOfq1ery+lHzuPqc
/bydfn+1o4+3w/R0fQb8vzD3fE3yy5vo8cd/C38/rb8/kY/U/L2bsebfrne7zOjPXbr7/Nzv2ODm
2zo9fwdxs04OnH3fOaeXq5/ovnfSvPR5/fw9PN9BydGHn+l4fq+X9B28PB5v0fzdxfp/nf0Hp4vl
eT7nysXwujHlxrvaNmubVt1dMJsFmWw8zZ183Xns0pd9eGnWdfRydfJ9h1+F7vbCWVFEABFEUSZR
IKSwAFICUAEsAEsAIqookoiiKIolCMkRYAAVBYEmUJKsxWEFCAIKoCUSoAAgLUFEARVSZQiwSkgI
oijFVYrBLALIACLBKEoiwSiABIoiypQiiKiKIKCEogpKIqMWSsWQxtsYMiygpEmUIsqJiZTGWWJZ
CpJRBSWCUQJASZCLCLCUqLAQSxAIBKIKiknZx9mNepr26uXcCywqUqC3EZMaWxFsFsFSgKUAJaRQ
ACAoJKACiygWIKCBRFEUAFCyqpBYFEWCVUUAUQUCkUJRKAEWCyiUAAYM1yLNQBYFgKSgABKEURVg
SgKApj8V9T+e5cfV5/6Fnr76unKfB/e/BJ8ncvpefTT+h3PpgBYLcRkiLcRkxGTEfN/G/q2DPifR
a+Z05vjuTXx7/pPbzdXXhiyJiyGKqgAEoigAAB8j9d+es/MfsH5D+s56/LfKfrX5veHlY+rOfo8/
t2cSdm3y+tcd3n3efu/pfx37W6+tWXkB8f4Hs/Meb0fRbOj2OevD2expXi6uTsnbPRxSd+jp8Xsz
22Xx+6dN/Pp1zXXp2+eer5Xucyb/ADenavJ0c26a5PR5+Nr0/O7PKj155m2zZ1fL9+/PlPPnTzev
5OzVcej5+WxObdl1b5eZO7VrGXJ6Gtrfp6ddz5XVe+Xz9PqcEu7Xvzm+Lv8AN3Z6Z6Ozi1x9HVx9
ee/N7Hj6T2vN3a7n3J8518vZh7Pia+nm+r8THmz13e3897vP0c+u+Zc/WePs8+a2e5877c353Pu1
9PP7Pj+tw8/V5/q8Xr3l4vL7fn74bOH6vjx18Do0/Q9OHzuz3/nLno4Pb6508r1/I256e16vhY46
fQcHL6Hb53n+b9Z52/N4mzXoxe1p265rVzdG3VVuHV0nHq7OWs+vVtw7frvhftd52LOkEAFlBBUL
LBjYBYlEoJYACACWCgABAJRFEUAJYVBYABYJRFgBYEmSsJnExZQiqlIAgpLAEAllJRQKlAgACTKE
WUABJYkllJRFiRVQAEUQEoRRARSQAKlBRJlCMoRRFEBGQxZUwZ2MGcXFYkWUlhQIhccZZljBBZFE
WIBJlCKICCksBEBUsQCAgqUIACSkRSSqgJ2cfbjXq6tmvj2CkoiwKRLAKtllqC2DJEWwZJVqWFgo
AEoiqiiKJQAUAAABSLEWFoChZYUCiLACKJUFACgqUAAAAAAAAUABSAAAAAFJQiiKAIoigUlDH5b6
sz8T9qs0wvw6vltvTz6+/wDa6t3XkCAAAAAAALA4O8n53P0VDPE1sa6bGCNklEoiiKMWQxZQiqlC
Y5xPi/b9kPi/tMGfx/s/SuiPzvs+8WfD7vsh+Tc/6D8hjp5vscPfz7/pDh7u/FLGfmPi/sPkPJ6v
S+5+E97WvY8X1fF5+jXs5Onh9HRv5u2a8b3eZrHJ1dXnzXXrcWevr8/HdctnR8t6e+GXZ8/nvl6X
Jhz3l63kbZcdHPs6rnzui9iedt6Czm7xyumXOjZhqrq1aOuZ49P0rXHwMvU8p1bfLterPMymvSy8
vJv0nmpnr2c9dO3jw2TGe7VLMJu2Ou3T2c+/P5vucfp1weL7/ny9en0senn+Y9fj9bl6OPyvpfEs
z2+r0ZfJ9GPsNeTp+r+XTp17ts35/oeZua6OT1/IZ9vi4NmPRo9vy9GuXv8AhepxL63V8v6nL18H
X5nf38HvtGzw/e59+3UvR3+LOnl+i8men3+X8rz/AFvg646d3Lqy9DHXuvPRtyusscM6w3YZ2X6T
5v07PqIdsopCgCURRFGKwSkiiKJKqSwIKgpCoKlBAAAAAUiwAAASiAELAEMmIygSZKxUYzKIBFlA
JRFgCAoiLBUq0QShRJlCSqiwmOUSCkogIoixEsoCFIsAEoiwAARRSEtIpYUlWJbVxWBjEzxwVlMS
WRVSiXEsguKWQJBUURYJRFgliCCWUIWCJYARQlhFgBFEWCVUlJCApJROzk6869TVs1cu1uNSpVlg
WEqUAUUUAoFIZQVC1BkgtlgACpQACWWhYiiKAAAChZQBQoCwAgAABQBZQACwABSAAAAoAKgWAUSh
LAoAAAAAAAULKEogMPgP0LWz8K9LHj2+uV25wpFgWABRFgWBRFgABQAAVBllrS7LrGxhYyuNLFWL
EFJMoRlCKJKqKIo4fh/rPheW9+7T0ef1ez9V+f8A33p4Za9uG+PyXy/2vn+D6vF3c/Rz+hr6/F6j
Ld5Wi59d5PDef0Xk6uffD3vH15a57ccd158e7ZuueDb0U1YdtXDn9HM4HZhZo3YaGe2+Tpufc1eX
mz06cdlnNj6G6Tze7ek3YaOHT3dvhdi9XLpzs1cXoaszjnbDjdcObDrWcU71ec9CVwuzFefPJWDP
JcMd8ax3c6a358eNz2bvOynTty4avoOFJ0+t86m/s/leWnv+18RZnp6/OzdfS8yZXGXXz4pddrf0
HFw4Oejv0rdXLuw3yw7+TRy9H0vX8z2ef6PsY83Ry9XPl1abrZwb508nn793H6fmY4bNzi28evD0
ctOd57sDWbnhbPuLwd/fEVQApCFQWAlEoASWIJSAlEoShFEoJYCkUSgURRFEWACAgIUggABYtEAS
W1ioSiSxEsoACKIEAgFlAVZYJRZSAgJMlYMokmUqALCAikkoiwlKSiKICKIoioKWKACUEAIQSkgq
SiWwMYXEsAQIsQBKIKhCxEAEApKJLCKIoiwAgCAACKSSiKMVVjUR18XZnXp6tmvn1WC3GlQVBQVK
AW42W2UWWFgoACllEWFtgqWLFAFgAClgqCiKQsoWUKJQUClSxCAsCgAAUAKAIoSwsUigBKAAAAFl
AAAAACiKIolAABYKIACgJhsAAAAAAAAAAAEoAAAAAChKCFuKM2AzuursaxsYDZMbFQAAcPn+8uOD
5n7XTnfwH1PidPH0/W6/P6+3HyODu8/5n3/M8/t5fT8/Xp6rvlo19medcl6OlfPy7t6eXn3YJzbb
ps6dvlc7Pu6/GzufT18exnDm9LanlZ+qy8zPvuby7dsMcsc8WLZVlhMoauPr4PRrC7MLnp2ef6Fz
jjlOWSsaAFEogQDGZ2tWO+Vob1mlutaG+JzTqtcd68dTVlnFmWCujnyppnVdTiw9KV5z0NRw5brq
aMOkcOv0cc68/b1/U3PzvH9xq78Pm9XueVw9XE9Dnx34fS5MJv38/n93n+h9Dz6uvj7NDdlb53P6
OPTz6+jm7O/yvI1fXcbh5O7hxj0brz1j3PoPjvsO/INwUhCxSKIoiiLAQSxEqoogJQShKIsBSAoA
ACwAgEsEogIsosIpAUABAoIqJKqKSSiSiKJUAEyGKqiiKIoWIsACKIBLKktMVIlipcbAICKSKWKI
yhFABAsAhYAAAhYCKktLisEY2WMUsKQEEBUpIBCksEokpJMoARRAJYItJZCWUggAKlEAAQCIllJc
UnZx9mdejr2aufS2CpSpVJRZSwFBYMkS5ILYKlFiLYKlAKlVZQBYKgqWBSLKAAqUqIySigoLC1BY
IlEUAAKApAUAAhYCwLKAAACgAAAAFAmUIoAAAAAAAoAAABYgoUgAAAAAAAACwAUEsAAAFgqCwAAF
QAWC3EZMUZsKuVxGSDxfA9z4vlt9T8/7PPp3eb2auPu4ebT5Pfyevu8XCPYnl7U7MdOTOrT6G6PJ
y9eyeVs9Fm8uzdMpkZqW5sspI11mwzE26eucayMcsGbtywz51V05ePo1d60dutrj9Pj9LXPlhxzU
uaAuOWpGi7m6Y3NXG2ZMbm5Y1Zp2Z49s3LDLzbsCWUWWqxxrbjjYyQTBNzddWWblisY47ZWrDfLO
VvGP1nyv1Xr47Jsno4Tw/f8AKmvmOvZz+f0dnLzdmO3DPV55vk7eWZ6+z0fN7uPt+p1ef38fXp1d
SdNXZx6d8Pa4MPf9HyPjNuOnfD0/sPz/AO53y7x2wlgBAUBYAQCURRCAWAACAAhUFAQLBQAAAQEU
QEUSURRFglUECkKRYJYBUURYRSQAAhUoKRS4rEgolFWIpYsASS40QIWRRCkUQAAhYAAhYBBUhQgL
VsYs4YkokLJLLiCFggBAAQIQAIUBJQBFEBFgliARYJZQEoRRFRCUIAiWCWAVMcok7ePszrv1btXP
pKUVAAFsLUosFAUBFC1KUFSwBQAtIVKLAsqALKqwUEUABFsyAAKlBCxQABZQABZQAAQAAKAABSKJ
QAAKIUgFgoFlliklCUAIqoUJQIAFIoiiLAspSEogoogAAAKAAABKIAAABYAAAAAAAAAAKlODl9kz
hq6Obn08zs5+tr5z5b6PxszDq2bfJ2wztzYshccojXlVjOsNPTx9c9KXjogsuvTLZz7uuefdqzMm
u1njjhGc1bo2ZYZcdXZr26ebydPH3dmezXj1c3reP7G/Pw3HLnyozUpMc9evtNtw1W78NWVmzPDD
czmViVUx2S6mNww5a3Xk25u6YZZtTAuOc6Tk7ZkmWDDNiuucWOebNurLN3MbytgTHLEn1Xyn1Xs4
9cyejhPM9XzM68/Da8vfh5fXw08nvvHp06eHrx21avW1zfm7rqmu3v8An9vL2/T4eb2+f34ev51u
efXv971fJ7+mZer5qwFElEFCFAAAlEBJlCLALEsEsLAAFJYKgoCiKIAAgAAiwASiSiKAAIsBCgAF
MVGLKVARYiURQAQULJmMLakUslggiFSURYFgBFWRYRRAJYAJRFEUSZCMhjagYmUkqsRYxLEsQEss
SwLBLCgiwiiBIsAEAKiyBSSqgIsEokpEoiwBQIEkyhJVRYJYiWAVFg6+PsxfQ1bdeOoIAUQCigLB
bBbBbLCyqsoALAACxQFgUFRGSChaCoFlAipS2UiwqCpQBUKBZQApQAAkSyhSKJVIoAAAAAAWUAAA
ACFgoUEBQQAAAUlCUAAAAAAEoASiKqUAAgAAAsEqgIsAFQsUiwAFIogAABSKAAGjfM3yOrn36z8h
w9Gnh26NmOzw9iwmGbc1M9W5jt17OmdezDZmufbh1zne+c75zXtsi4255TExw2aK2c+He1ybOb0V
5tvH1c87rjeGs9vP06ePydXJ6N+vn43tcfbxex53o78/kZ4Z48lEqWWOSZ+hzzdd3CdXPZOrn648
7p0egYassLy07tObn18ff5tuWzV3HPnv4M668cc5vZlhc3ZpY7mTXZdt5tPPXbhz7TZs1JNmWOXG
0lMM8DX9Z8n9Z7OPaXvxeZ6nly8EY+TvmxqsLmc/H6mOp4PR6XHq7p56b26O3bOvk7t/PN9npfPz
j6/p75/dw9303Z5vpfR/P0b84hUoBJVQFgVBlAAgJLALIokolAACFIoAqCxZYogsihKIoiwAiwii
UIogEoiiAFJQIKQSjFlCLKixAIoVVVYilxUmMyhFlSUSVZiyhFEIJYiVUmQxZDFkMbRKQCkiViqg
SwRiWRYAAlGKxAEsoQLABAgASKIBKIogEsAEoiyookyhFEBAARYQJFlQEWCUklEWVOvk6830dWzX
z6VKVC0piolULAokyEoVMoAqVbAtxpUsEUsoAKSgAsFEVBbBUosqiwsFSkWFSlSgCygFBUqggShY
lApKhRAAUAAABRAKACAFLFEogAKAURRFiKLKAAAICggCUSgAAAAAAURYAVKAAQBYJVAAARQAAAAA
EAAAMM9Nefu13WfiZjn5PR07MM/H1lxtc2WWPoznNWk6br5tzvy1k5rp79c+bDn6rjfx9HMvfq5u
ub17/N6Z109unmXHdq6Kw6dGqXfNmiY6s9W7zay383VXg6durtro75hx927t+c9/r5PKzwy5+fKE
rn6ODs3zs83vMerQm+nl7MdY5Zcs7w344p3cWeevPzZYnLr5KXDt0ZrnzDLO4tZY+j5WfR1a8dhy
+t5ncvn9nJFz19/GTq06Mz07p2+dkTJjZWr635L6z2ce+y9+LzfS86Xy+fo4/J6NfVz7609/HI2b
uLrs34ZWXn5fRx1PF0e9zac/RycbfpXm7c9cPW5fWx7Pa65fR8tY1ipQIsBLKiwAELcaVjQUkyhJ
aYqSKIKhSKCpZLLAACC2CiWUIoSwSrJKIoiwLBKIAAsAAEsAACiAQEsqKJVFtgSXLFBCkEiwiyks
QgQpMhjaIBKIRKxhlIqoBTFlBZDKSCJYBFgKURJlDGZKxUklEFJYJRFgBFEWACAlJFgBFEURYoEC
QEWUlgBAQIhUABCAIllTr5OvN+rh5PUAoAAAAAKEBQAAAAAAAUAAAAKCAAqiAoAAAAAAFAEAAAAA
AAAAUAAAAAAAAAAAAAAAAAAAAAAFBAAAAAAAAAAAAKCAAoQAAAAAAAAAAAAAAAAADEMYWcNMaypj
QJMDSUrLALsK1ZDOvMSBWYYCbuJTMTEGWJGWRkyDRiau7Az0m81y1DMBWs026iywazpc4ZkIFwEC
5gXZiSoWWBs1k2zDCBQZ6gUTLIzaJICdZubBqNYacTGpkUwIuRWQgBCyaSrrJejIXszNZCAAAAAJ
SkCwAKIACkAAAAAAAAAEBQAgKAAAAAQAAAAAAAQFAAAAQShQQFAAQAAQACAAAAAQAAoAIsAAKgAA
AAAAAAEAAEAABYEAAAAAAAAAQAAAAAIAAAACCgAJmR//2gAIAQIAAQUAEaW8aV40rxpXjSvGleNK
8aV40rxJXiSvEleJK8SV40rxpXjSvGleNK8aV40rxJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJ
XiSvEleJK8SV4krxJXiSvEleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleN
K8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rx
pXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40oxpTizUKRUI8aV40rxpXjSvGleN
K8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rx
pXjSvGleNK8aV40rxJXjSvEleJK8SV4krxpXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEle
JK8SV4krxJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEleJK8SV4kr
xJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEleJK8SV4krxJXiSvEl
eJK8SV4krxJXiSvEleJK8SV40rxJXiSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40
rxpXjSvGleNK8aV40rxpXjSvGleNK8aV40rxpXjWvGleNa8a12LXYtdi12LXYtMihV6f3TMtmoVE
3P8A/ET/AKV6f3TMt10U2KuD/wDiJ/0r0/uk9GFjQqM8/wD8Qv8ApXp/dUy2NCloG4//ABA/6V6f
3VKOR0U0h/8AwUTankJqK9vpH/SvT+6mFwdFpTzHqAg/34TapHvUa9xAtozhaR+76J/0r0/uo04s
bUKBpOnpXFMxV0buH98k2qR6AJKL2imawZrmH6J/0r0/tVplFJKCfpJhY6Cka1D0nPa7G5hPP+97
0WpmoKWKoF0ZrBmJNRD7foX/AEr0/tJpVBMy08hbWNrr9HNqKFIbj0ZxpEPu/vS4q9E2otRNW7iq
gCi1qdr6KLlRYfQv+len9oyyW4o37T9G63GoNRt6Ui3U1COf95Gme9FqEjCu69Xomo15UzUxo0Be
o0sPon/SvT+0HbtBNzrer6JKLBgfopJABarCuYoUppXB9E9GHOIUP7vuKMiijLTSE0TqKJq9BgFL
XomjXbcpGB9I/wClen9oSoWHjeijCgKOlzXdV67jQmYUJgaDqfVLAVJLwEV00VrUrXHG5spNyFNI
v91XouooyijKaLmixq9XocAo6XpejNVyajWw+kf9K9P7SIBqwoopoxIa8ApoKZCtA1fS5oORQmNC
UGgwPoE2p2JOl6GvSgb0rWIN+OduYpRy/ucsBRlFGQ0XJq9Xq9X4wNDreiDUcZv9K/6V6f2wVBp4
KKkVegdVq9K5FBweEkAPIW1NAcQN6RrVfhY2BNyguw/uYm1NIaJq9Xq9Xq/qxxXrxLQQD6d/0jp/
bTIGqSEiiLVegaFXq9A0slq8woODRYAO5Y+keVK16RrHgmbSEfd/c0jUxq9Xq/rqtyBYfUP+len9
uPEGp4iKA470SSfSvRF6tyVqRtTTNc1COX9yubAmj9AEJpYSaVAv1T/pXp/bzxCipFHjvVvQJoDg
U0r6TNYaRiy/3LIeZ9W1LCxoQCljUVb6x/0r0/t8gGnj5aX4Lc+O9AcSfcASpla5vSi5A5f3IxsC
aNHUUTxAUsTGljC/0B/0r0/uExqakjABHAQaB4wONWseREo0iXn/AHLI2h1AvxBSaSCljUf0J/0r
0/uMopqSMCrUKJ0B4OtD0DUT1KedRD7f7kJsGNzROo4ACaWFrhQB/Q3/AEr0/uQ00Yoix060Dpb0
+hNmHabqLD+4r13CpHFiaY1fW+qAFlRR/Rn/AEr0/uU1GoLNFRXX39RGsSaVr/3A0gFeU13SGj3U
I2NMCDaiOIi9RC7/ANGf9C9P7mP2srA0VBp4rV00B9QilPcE5f1xmCjzPUcncP6BLJakioADR1pW
5E86Op1BtUIF/wCjP+hen9zSKaUclfRkBpoyvrXKkGl6f1qc6Qfq/oCjuk1bpe1X0OpoawMAf6M/
6F6f3MRehdCyhgrlTemahHemS1W9RB/XJGu1Qfq+vdu0LJYjgkFDQ6mloirVD+v+jP8AoXp/c5AI
BK0bMDdaQX0Zbj1I+Z/rUrWXSBeX1xa1OXYoLUp5auRdutHUDmORo1F+r+jP+hen90OLgcq7QaIK
FWBFSLaj6cX9aZgBI/caFILL9YWAr7jQQV2imQUXK0kqtRYCi7GiTTir8I0NRfr/AKM/6F6f3QaI
a6kiuRDKRRmamdmpb0fSiH9RJtQYEFgK8i0ZlFecV5689GZjRJOsQu/1BIFBgdC1fcaCAcBFdovJ
EVpGBLyCjXUHhFdNE5OP6M/6F6f3SaFc1oEGu0UAKkW46jg68CR2pf6jKftBIruNXq/oQDn9NJIF
AmamckhiCjlyFA4WmUUcig/dXntTEE1egeIaEVfmhuv9Ff8AQvT+6veiCtK19XXtJ41Fjel6f1CZ
uXEKEbGhA1NCQLGok7R9KTUrlmBq+kZs2pNqkmJ1Q2Ljn6QNcjR6x/p/osn6F6f3U1DRltSsDo63
HXiAtoL2Xp/UJ+ulq7GoRsaWA0sarwW+mvRa9Njk0mO5MkZQigbEG4pmCiSUtwtzX0waJqM/b/RZ
P0L0/su/15pdZF5I9qZxb34ALUqXrsFHovT+oTH7tBSMGH1ZarEkADS4rIKkUKiN0ZgoeQseEHl6
sX6f6LJ+hen9lGh/QPeiwFNLeiauauaHOudBTQTkoFtG6Dp/UHN21hNm+pLWrmaCgaMwAeYmu40e
lA0kpUO5Y8Q6+pHGWIH9Gk/QvT+y7Vf692Aoyk1zOl6Jq9L0ApVtRpemjdP6aSBRmUV56aa44FNq
jk7vpywFXJoLbgle5voelL9LHEWpVAH9Gk/QvT+zLfXPKBVydL1YmlhNLGtvEteMUEGo1b+mNIop
pWNEk0AKPbxA0hsQfpCauTQXhbo3UGu6i19B9JFDegAP6RJ+hen90E2p5b0NUjvQUDReIanr/SWY
ANIW4L1fjBtUcgP0RNdxNBfQmhN+037asasaAoD6AAmiAKjh/pUn6F6f3OzABnLGr0FY0IiKGv8A
8uEddPf+kyNdqGhNE1egfQjl9e9FjQX0jUyi9qWEtQgpsciiCOK1CNzRBHGBckhaiit/S5P0L0/u
UsBRlo3aihFLExoRKKtRoanrw++nv/SJGspPOr1er6gejHKRQIPqFq7b0Bb0SQKaQX+9qEQoRqKt
qVBpoVNDHoRIK8a14koKo0eMNTxleEqaAJqKLt/pkn6F6f3GTankNE0Beo0txe+h4vfT3/pEv6W6
3q9X1A9NJCpBBHoFqsTVrehei4FdzGhGTQQD1+4USCGhau00sbNUcIFFQQsQB/pkn6F6f3Gy91P2
qoBJRQBxHU9B04Dr7/U3q49ZhdWHLQC9dooD1YTy4i1czQAHoFgK7712saCAetcCu8VdjXaTQUa2
/qMn6F6f3G7MKszGNO30DQ1HCdR6LSG8bX9ckAZEp7zOScWVnHqGnBBsdAaB9aIWHATXM0BbjuBR
kq7NQjoAD1L0XFdzGrMa7BQAH9Xk/QvT+8R14DqPQd67r0OVeQihLQcHgvQIPDfSS3bLYoWVqwmY
t6skd6IIoreu00Bar6EUD6Ci9EVH+nQmuZoDjLUSa7SaCAeneiwruJr7jXYKAA/rcn6F6f3cdZJA
lNOOyCbv1Oo4LgUZBXkWne9NegbEtQbS5Fd7V3tXlai7Ex1cU0oFMzGhIVBlJKy9tTyr2hr0zEGK
QqQwtf0yaMnM3rvrvNNc6X0NA8QF6taiDZP03q5NAcVxVzRrmaCj0u4V3V9xrtoAf1+T9C9P6E7d
o8ppXB/so0NMsXjVwRjMRLoaBGg6SEihKBRlokmiCatahTW4Qb1eueq3NFjpzq1MhonRerMVINjG
x7Y3sQQaY2CtfhLAUrgmnPKK13IAq+hF6tbXlV+ACgLUTzbkFva3Der1YmrAVeu30O4Cu6vurtoA
D+w5P0L0/oLsFDv3UNI2JGnet/7ImXujtcxEKzyWozMtGUtQY15TSyCzSKR2irWo1ZqKmrVyNdgr
sFdtWoXoAaFRRWrkUO2rCrVamJFN1odb2AaoxypWIPkBUMwIlNeQ0JRTSCiaVrEPegKdOYYU1hV+
6r2oHQihqDqq6GwpAWPBeuZoLV7Veu3iuBXdX3V212j+x5P0L0/oMrXbVXIpW7gxstLIwpXDD+xC
QKafmMjm6EFeYMhILFqRbUL6WNdpsrVa9AaWoLVvS7RRWjy0a9iDY6LQBBUk0hJANKoNFKU8wRRa
idDehStala4bobkkEUQbgAURV9LG9qtoDqTYIpYgWFXq5NW1vXbwEgUWFXau0mu0f2XJ+hen9APQ
m54Eexke40BIKyKR/YTyG7OSCxVmNy9wVvdSO1eokAoG+imu69BKH0Bo3rurnRp1Onv22ZS14q7K
B5FuQU3C12imS1CjV6BoMaVgasKkW69ODpV6burvagSdFGn6ioCi5q2t6uatpeu4Vc1Ymu0f2fJ+
hen9BMQNGE00bD0VcrSyg1f+tXq9M1qVrm9qeQ3vRp1uCLU1+03DpyF7UDQq9KDQHMD0b8F+Ei9d
tqINh0NHr7MAGJIeJh3dKFDnQGp512V2iuwUVtV6ABCsy0sqmmte1X0WMtXgFMpUqY7Hsr3HRjek
jsLakiuZq1XFXNWNdoq39pSfoXp/RCqmvEtGIUUINjxAkUspFCVT/U3e1IeV67hXcKZwKMpoksUJ
FM5NWoi1Gj+km55lZr2U2JpaGgYgg31v6FvROpNMbkU4XsYKaTlUbAg0o5Cr11oaHS1FaAogjQa9
KiYaMt6MIpltS9WawiTW+grnXbVv7Wk/QvT+kEA00QogjjWQgCVqDAj+nE2BFze1Emu6iTpcVyru
sBzrlRIFEk0BTCxTmHUssQ+y9wptXUWoCgPWuK7q7q7qvV6vRNE0VNqFAmxt41ChA16V6U3q9W1v
XXh60U5cAHNJCDLkBRNKWjjDALSL3G4FFqsxoLVv7ak/QOn9KKg0YlNNGwq3GrEUH5eVa71NXFX1
JoH+iytzDWruq+l6Ck12iu2ipqxojXrUgFJ+phUdgV/SppTaiQaFXq4q9d1XruruNXq9c9O6u6u6
r0dOeoNAKT2qasKccxQvf7gY7Co1ue0iloUNCaGp4XWgdL0xtSFSCRXawIvS8qUGwQf29J+hen9N
Kg00QplIPDc20vXca72oSGvJXcb+S1B1NX/oLGwbmb6AaAC9uE0TXtRsR0Lj7hZZE0Vr0KU111Gt
qtVhpyrlVhVqsKsODtq1EUv2jrUlgNHUCi/OMkHu5WFGg1d2gq9XvVuEm9dtWq1EV3BaaQUrNeSX
tCZRDI3cv9vSfoXp/TyAaaKipGl9OVcqtVqseG9xQJFd5pZKBvV/qyQKaWu4mjVqC1YV2ircRFWo
cqNCmveSn5Ov6jSAXsKEYsQKI4b1f0ibV31313V3VzJI+2W9q9kNwqkstg7LyBuNb1egavrerk1b
QmjXWmogXLAUCSZ2FREl16f29J+hen9QMq00ncPRFXrlQ7QbCu2rGrGlNqubo3K4q/07MAGctpc1
1oD0yKNGrcpBzP6Jb9qm6mo1voklqtzA59ooi1L225VarUBVqsTXYa5ir13CiwoNei1CgRQ7asKu
RXeab7qZCNEWxcEHxuKiFlKWIIvyt0oc6tVtOel6B1tTV00brS/qnILYzP3j+35P0Dp/UO0VYVL+
r1b1ermga9waCUBagfpGNh5LUzFia7TVqA9MCiQKYXPby+2zryHNGF0hP2D9KmxUimQmrMKDGrmr
G1tO61XoE1cila1M5ruJomuVMQKDg0XoS15aErUZ+Qla9+6u5u0FzXNSly5C0HNF7KkpsHV6ABDC
lvV6vw3q9X1YURYstxal6zlu7GN5I3uf7dJADyliGcASXq9XH9Ql/V9ANVc1fTnV/oj0YC5oL6xa
1Frnydoue0c6c8k6g2pSxZeQpDy77UZARV6PKg4NGQCu5bCQUhuHcKPKa7ya8jFR3E+4VmCoSexh
SxchEBXirsUAKq0UU0AKLCib07dgV+5u1LsFJdCas6iFiG8wuSK51ca3q+o0vRNE0TXuv6si4MQI
e9ikl6v/AGv3Cu4VcVer1KwsaBFqNE1Z6aRxQlehK9B2oMD/AEk8g5ufoB10V+0gg+qzBQsisAQd
XcCvIbmVqLk13UB6paix0HOuVE0LgNev/kRzsQygWJsRbtLsQXYhGar0Q7IisSVYFY/tSIVYCmsQ
FVaKqaBAHcKLLXkWvKteRaMtGU15DXe1d7ULimctS0tMnfShYwWYkORQkpb25V4TToaLNSmwRwaD
8zYgCxNhrfXsJqQEAGuhnA7QbEHuq9q7zZJAD/aV6MiijMxpU7h4a8Jp1dSoch76XNeVxX5NCVTQ
kWu5aJVqa6t5DSOKdjaOWu8V3Cr/ANEIuGFj9ANW6Qm6+mzBRPKxMBbtgaxJ5PK1Bu6r0ATXbauZ
IHoXoHQsBRer1fmBRIFE0oFdwpqLCiackUCRRQNTPyslXW3eBRZa8q0ZlrzLXmFeWgzFfK1d7UCx
po2A50ASUTmwsdbVYk8kokk0qk0oABNSnQc6VOYkWgRcdxruUVcGigYGIqFbmZiSJbV3XHcRQe5Y
0CDRpb3cC3u1SXaMVim6utxQvULH+zSwFeZK8qU06ijkGmkY1fSBxbQgEP3ISCeDlRRTXjFWcUC4
oyGiymkHOI3rtFdorsFFDVmFXau+u8VcVeiwryLQe/1j/q+gHBGbH0L65Fu2TrCfsNJNZWctSggV
e1E3oUNb8JoGixo0KNchQBIC8yBTMAXc1zNdpNMAASCE6IbrJyBdq7jV9bagE0qHtEYoQi4UDQgE
BQKFEA00YogihSpVgKaMGljJPaoIrma6V2s1WUUn3MIlAKgjyKKMjHQXFd5oSUGFdqUY2BPcD5Wo
SgBXBoG9c6uRQF6mtR6LcqRZsRhemU36VEfu/sK9Xq4ruWu9a7xXfXcauaYuAyyNXjelgc0MejEg
DgE2qNL00JFRyd2jMBUncQfSsDQpXK0JzSyKaeUCg1+Bu0U8qUskhI7iQLUGFdwq/wBU/wCr6AcC
/q9C2l6k7WWUWaMgMxvXt7q9q7r1a9W1vwXAouK776Wq1WrlRNEiuQpnAHlNXNKlhcCmnUGRiyQG
6xxAgizSj7DqEY143GscV6AA1Gl9QOfbRplDUIgNLGuVEsT3Coxc07c/sNBVFWNF7B3ZiKCk12mu
diLaEEUHYUJKDg0yhqMYsylaV7VHMbiQNQblL19lNjPGbwt2sjdy2p0pDYqwI/qt6uK7hXcK7xXd
Xcau1fdQ7qs1dprsrsFdgrtFWFW0MqCvKlB1PBJKEp2LHSEaSrYrLcAVKwsfXXrela1XFSShalZm
pQzGJAaAA0FdorsFdpr7q7jXcKuPoyyivItMQW+kiW7elamQESxpdkYKGDDRTztVyKHOrUdSQK7j
RJq1cqB0vRNFuZN6L2pmue27BACzAU+QBTOzaQnuWI9sn6RTcwVuzxEFIgNXTmsRBoLRHFagKPTr
Vtb1bk73pajsAxHa7ElEsARRoijGbhAKtVqtXaDXaKdbgi1WogigxFCShIKupDRghVeiWBLF6P2m
9Ie5SO04ZJGjCxR7Ef0a9Xq4ruFdwruFd1XNXNXavuqzVY1212Cu0V2iu0Vb18qbsCXNXoE0s7Ck
cMCbA9bVahypXILfcBEKYWZnuCatXt6o1Y8j1LWr3SQqUcNR47V2iuyrEVdq7q7hV+My8xLRmoux
rvaiT9QgsPTl/TIK72QpKLsnMgikHPnVqGhNF6JvVjXSjVqtQBt0otR6F6JIPY1doALBA8/NmLcE
Bs4ivMG76VCaNOCCspY0ALcCHQgCjwA6EE1a1Xo07EUDYO5JpRzYFQWAouBSPcX166Wo9eDtFdop
wb6gkUJTSzCgwNLGlNGSRYCC4BTuGLG6nQqDTraoWJP096vV6uKvV6vVzVzXOvuqxqxrtrtrtFdo
qwq31uXbyI9h30HtQYGlcqWm7gOG9Xq9XotzDUSfW9gNDR6kUepFRSFSjBqA9Qiu0V2191XNd4rv
WpG+31e2u1q5CvsrtFAE12ni9qVSaEZFD0ibANcO5YkcnUUhNXZl7+SBb21uavRFdKvR07aAFMwF
d16Z6YlqWMmgvNpAKec0WJOiqWpYK8C0kZRj2KkLJcdZB9zi4X7WAvoDehRGii2janQdaBtRsaPS
jRBU3pQSUTm1wSOZpWAHcx0A5Uo50QOIgimHGoNxcAXNFRTnkGuYJL0DqwuEJRgwI9K4q9Xq9Xq5
rnXOudWNWq1Wq1WFWHrX0vV6uKuK7hXcK7hXcK7hXcK7hXcK7q7q7jQY1c1c1dqu1Xavvr7q+6gG
tZq7TXaanxu6ihU8wS5Nd1BjcGhqavV9WPI1egfWFA6MeVGrcx0QXqG4PrmQ13HUGj6TXoGlC0O2
rijajGpox2oK1HuAEl65Ue2rGrUBeghoAAenNJYiSyWvTU4vRUBoSSCLFWse+gwOjcqtc9tWrtqw
07jXdamcdoY3swofaGlVQ07GijgcEa2WiCKIuJWcVBJ2urAiQg0RTildgWftqNj3UvMVer0x0JoE
HUHkTQJFE8iaN6s5plUAPSqxDBiO0gleSBb2F6Ucu0VaibAng7b0FpiLGihpUoqKYWNWvSr2gCu6
1MLhAa7T3KTG8cncAdDTUHICSFqvVzVzV/Ruaua51zrnXOrmudc651zrnVjVjVjVjVjVjVjVjVjV
jXbXbXbXaK7RXaK7RXaK7RXaK7RXaKsKtVvpZoQw/GsDjKAYObxlSgoaFqNdtdtG9fcKPca56A+s
OtXpnruvXfXdzZr0CagX6CRbHUUeIAmuxq7DXYaVFooprxpXjWvHRVqPcKub9/OSS9RlQHaibV3G
u0hUHL1JJLV1LHnysRzJqTrD+turUgJIS1chV70LUx5XvV6uK7hRkotcqtzyWnmUU07HSOPupluB
ElGNDTQKaEDXUWAovyqY3aou1k8rAg9wcUaRbmyihS9C2lzqw5IDQpVBphbgtXKiTTqbAUQ6h1ek
U2Ci3YoIpbUXoMa7hRYmrHQ6L0JsL6gUeVOLlqVe0AaMwUPIzVHIUIsR2hqVXDKwNXpjQTkRao2I
M0hRUnvFDlUkiuP6a0oB81edaEqV3rXcKvw3q9Xq9XFXFXFdwruFSZAUtOTQnU0HBMouRV6LUbVe
9X4DqOvqroaYWpq6KBVrVEhZkXtHrsoIYEHVVJrxCuxa8YoIoq3BarCitWNc6ua7quK612CgiipL
A91dwq/NixpSLX9FmsS6imlNXtXdRNDS1SAGkNMbg9EYqQbi4Iq9LRIogWbkGHIRgUSAGmUU0rNU
SdxaFDQhS1raKt6KcLyhSH7qa4JrFswlAVkYmmFKPuMPNVC0PQtbQG1XvpariidWJsvVi4DBu4Xt
oOImgavS9aPTtOlxoxsCblFtSqSfHykYqrdzHtavG9ReRCBYk3HkZWD3peZp+pFSSu1BjTIDHhy8
/wCg3q9XFXFdwrvWu9a71ryLXlWvIKMlMT3JJY/qox3ponFLzPjerSilkJPaa7a7K7K7RXaK7RXa
KsKsKtUsCyU2GaOK60itTyc++u6jzoKa8b0ylaeRVoToaMwoS85GKgZBusjM/rBjXcKcE0b3BvQQ
Erj0iBfopeugBNKLevarVarVzqRSaMb0sbEiIVYUQK7K+4V3EUGFX4XPM6dTarUdGFg/VR3BWLJ7
RsAUNiyAV21yFFhRcmgGJUkAeMFwwBDuEjLEwElVCi+ltEvc0RY0BpJF3EwsAVYF0KiE2eVFCo5F
EUFvR0HADV6vSAURerczVtDqdGNLTuxIZiRV9BrarVaiKtbQV3mla9OaNRqBVh2s6GlCV9lAABpV
o867KYBaJSrJURRSAKKhqjXtpHBonlRJIKGuw0AQkAIcOtd4ruFdwrvFdwruFXFdwruFdwrvFd4r
vrvrvrvNdxruNdxruartV2q7Vdq++vvr76s9drV2tXYa7DXjrxivGK8YrxrXYtdi12LXaKsKtVqk
Nm8YYdjx0kynR4lagZI68ookFvVto+MjU+I4Jx5RSxS3ij7VqSMMJ4QxCWoVY1GaMPMRAD8ZSHVk
oLJa5q4qxrnV+EVyrpRNC9NGWr8ZgY4u0fRuvcCLGoxz+ltVqtwXrlXaKKivuFXNBhTsLMed6NLq
RoxvTjmDYxAla6GM9wvXfavIa7WNFAKJADy2p3LUsrrUcpeu1aINX4OVLbQqDXjFWADCxpwxAiN3
TuBUqUUOl7EHuANqGltL1egKY0DXeVouSFIs2rmwVgdDTGwBvSMVDyNSsb0BpfQDVVuGSjpfQGxY
3pRcy86LE0q30sA0k/JFoA0alZr97V3mu5ailVaA5yIWCF0buBHK4pxVhUpsuIvc9qtXbXaK7BXY
K7BXYK7RXYK7RXYK7BXbXbXbVjVjVjVmr7q51zq5q5q5ruNd1d1d1dwruFdwruFXFXq/oE0v3vo8
StX+SOklVhK1lZiSrEVHODV/pD0mBidlDhInIEIqOKxWEmnQBBezoxYSEV5lq8TUYUoqwLBhR6Wr
nwA11IQAW+ndb6R/XWq1c9OVP0NdoorQFuC1WqQVaoreO9GlIUM96UFgBzZgKLmmvRNG9417iqgA
Cr1yoLclSp0BoG+jPahIaLAiiKIq9TC6433iRe1kcEe9DrRoNzAFharWFMt65jS5oaEXoLbQ05vQ
6B+0M5pTXXQmlve1Wq+iuAGeuVcq5Vyq1KBRARWbkBcgVdCr/bTEMVItfRoiWEApoSK7QoqCXsru
ZTLdqjk7S5Ktan6BjecmsBKtVqtVqtVtLVaraWqxqxqxqxrnXOudc651zrnXOudc6uauauaua58N
qtVqsasas1ffX+SryU7MBGGA7nrueu5qLmpBzZiRY0a51HMwoSE13NV3r76++vvr76++vvr7q++r
PVnqz1Z6s9WegJKs9WanhLAY3IRMKWBaCgaS/oHSiAaMKGvAteMgsou96vRNqvXM1ZqF68bmo4rH
6lkuVW39DlYGmYikLGra8q7atTWsb0OZS4priia52ChQXsCxNG9FrV3XNMtQCw0tQApT90huwtre
u46gGipGp0iNpJY+0IwueimvejQXnYk933NfRTXM0LGrgUDfhY6AUzDtZ70Oi6gaC9uLnV67jSi5
ZiSw5gWAdVLdtE/YgcVGGJ0AvRjsKKk0Y2FMCDDMBVypkXkkgFciHvcdZ/1YQHj/AKZIQzAWHBNb
tJq9d1d1A0v3HxPQVgPvofTSfoHTh7BeSPkbghWahAaVAoZb0qAVYVarGudc651er1er/wBVme1X
rtuQvJunCTQC00X3WZTyau0KetFeRBDAi0hNl5hI6tRQGgttAdBSixbnp5AGJrutXdyL3oMaQDtf
mKto3Qse/sLopAPI01xStcaAUSaApr0OthVxYrYnqOA9D1persppn7tE6UOE8Ja1CxqxojtVugFq
5gO6tQBu0ZpL2oJ9vaaVbU5Og0kv3VBNXQyKAInIDDkeRm/VhEf0x+Swi78E2QEoSu1E17GhRqCK
x+pk/QvTjKKaAA4bVarVY1z0vV65VbS1W4L1fS/9KJsGa5vUdCiL0wsdBoaHIO5oPciS1Kwaiorm
KZQwsRQW5VLG1WoVz0VSSIxRSgDRuCxplDBR9rCx1gaiOXvRpuh6wEsjL2spBVhQJUg3o0tEc6tR
CgewBtYmulXJI1ZudCktTsoPvUZFWoa24DqVuaBsTOBQkJpT3V5BYuhodlfZVlpe0UCK5UTRuasd
L00amnRVFQzA6OgFRNcP+qbmmHfv/pcrWWAWXWV+xCSSnQ0xo9VFKvcwFh9TJ+hen0tqtVq51z0v
V+G1W/o5p5CaNAUo5CibDqdS1hSksO29SNY9/NSLAmrmu6iFahGy0Sbrar0WtQbRDz0Y2CEljGDT
Y4skdqmiB4AbVHJ3VLAS3SjTmwNY7ENIjCkK0aIpTYixA5E6+5IFE2FMK7QNBR6MLGhSBbMVFAg6
KDc8AQ0VI9BmtQBJAvRDKZGIAW4tQGoNiXomrmixruau413Wp5E7u+KrwUssbVZbGI0I2NGBjHjq
6SdwruFXq/8ASJjcqLDXLc6J0tR0HTGXn9VJ+gdPrra8/wCkyKAR1ApelP01NWvRNKqhZLXdO4rG
wrxtQDirGljFnQg9aYstCQGu5TTKxpVKkcjelkoEGiL0idpvV6NTt2gAmiNLUCRUb9wdKN6k/TSm
zMruqdoY0RpESCaB4L0AbEUReuegom1MbmhVl7XKULWAvSi3AtFrHvJ42eutWsAn2s7qOdxoDpbQ
8qvV9AtxZVpzTG50hhaVliUAAAWGt6BFWrtq1WNWNc651zq9XNXq9XNXq9d1dwruFXFXq9Xq/wBE
TSfdLrepW7noGhR0NQrZPqpP0L0/sgsBTSgU7A0nOhSmr0xuTqSKuKFSNYM5YqKvQvV2FCQio5Qa
sDToAaZbUovVgK72FBrjkatSEg63o0yhlSIpToppoDXiIorYoSpBuHSpCANImdkK2NwQaNJQPK+t
6BFcqYkktYW0FSajq6gK3ZfSPg/SDQ663okAM99FFgguSCKYlmUWq9gWJNAmhowoC2ipamNgSTRF
6EQFSILWvWLGUSr1eiaLk1er1G1x6FqsKtVqtVjVjXOuenKuVfbVhVqtVjVjX3V91fdVzVzVzXdV
67q7hXcK7hTOAC5JRWNf5hXlkFfkCmnBXtNdpoC1FjQII7aH6lHL6qT9C9P7DuavV6uKuKdyxNEU
osKDcr8tSaJ5rfvBuZwaKV2mgt6CV2imjU0p7SkzVKWWlLPXSnQ13cwAaFXq9IwBWQFqEhLA6sO4
dxWhY0bU0VGM1ETpkoQ2kDkVIpDJ2hTRoEiozcaGlvpeyq33Naw1YXFjog+50sbLegL0BagKFjQW
mNcqA5aXrlTuL9y0pU1dTR7TTutIUJBWvtNXWrqTdaWxPaBTLXKrgUr3oi9faKutfbT9llWMPH00
vRPBELD6S1dortFdtWNWaudXNd1dwq44rVYUUBBhAIjdaWe1Bg1PCrVJGUo3ruNXNXq9I1wsRYgW
H1Un6F6f2IzqC7hVSQOJmIF6Fq99b1fQ2FOy0oVqsopO2+RKe8G6sKi5C4qwroJOt6Vr1IoBQg6N
GLqQQLcEjlSZn7YRoKvoV7gqsp7hfmKZLjudSOYZAwkxQtGB6RJFeQO4jAuwo6R9BqLg9zGiRa1D
kwuKGjMBRYGhUYuWhUgwlSFNBbUAa6C9q7iNABTG+l6uaaS1FwaBU19gr/GAyiincEjIrtOg5HnR
tQYgmU13XomuVKBdjenLio0ZgI1FdorsWlFuEjQClFh9bau0V2iu2rNX3Vc13V3CrjVkVqMLKRMR
UiLKv40wLQMAIQSsMQK9tCNzUcfb9ZJ+gdP7DdgqtJ5WkYgLMyCSYsqjlQHO2proLmmYUeYAsrvQ
Y12V9wI5120Ber3LsQe7uoKauKjYKWW5DrYcqYWoG9GhoyBqCilQAaW0B0dDcORQ50QDQFtMlbxg
15HFLIzKt7yLyYUFBoCw0N66UKNAc7VbVjzoVCKJBDML9y0rA0FFjxmibAnRRagQKcoa7u0qpNKL
Lc13GgxomiUFXQ0ApoKBVqCGijCmYqVfukAsOC9X5ka2paVv6NYV2iu0V2mrNX3UwvRBVlmYF5AV
SAW/GFKir9dJ+gdP7DyL9sTWaTrXsjVermlHI8qFWoimcLRJLDkrvyUCltQTnYCuVECw0IrtFMFp
RXjuVZkooQFkJoi1MCKDkHlrGnoG1itAWq9EUKZe4fjydxjjSopAaaJmBVlBFXtSuRoD6B0ZaItQ
pLgd1N+rtNIpon0TRFqUXPUllKvar3IUDUGr3I50yqaAtQBJ6Veo+tMtxGjd1qtwAVeieEGg9A3q
/wDSrC5UGgij+gyfoHT+w5kLB7qVbuBFiDSmzUKHQi+lqLU4JKqQXa1daC2oAigKUgVarEaWqxqw
NWqwq3Kr8nSwQk10Mkd6uyFZQaQ3I6egBVqIoaut1kteCE0LmpBRo1Y0KHoGiKa4NCj3AOrGlvZS
b2sOM0Tai5om9DkEBFSMLXNKpOp0AvSg1yv70e4sKU2I0GgrtBoroDR4gaFjQYqQa51zq9XHFer/
ANhSfoHT+wyLjIRVIZlLAEUORFWoNer1cCpJL0SRQJ7XeiL1GKAAo0OVe3TS1X0POvaudXoikfnI
FJQqKvanjuCjVjRMPUBIruFWFWNCpV7X72iVJB2vICWo0GtQF6HpSdaUc2LAMrXF6jtRYVYVbS3C
970ovSjuIFhIAx8QoDQ0aVebBavZbEVbg7xSSWoEHgB5VYUevoXvUZ5Xq4rlVhXbVjXOr1euWlqt
XOudXq/9ck/QOn9iOgYSRm8LWphY0GoGhRNqJpmN1UlWYAczVqRedW5XNXruFA2q405UKJoi1C1G
1cxUi1DMFJUsUQECMCrCgPXuavUsYMjEXhl+1rkgfadB099L6Xq9AUQNHNCk6uzk2YEdRVtL6ctC
TV6NgC6mrIa7eSL2mViB1IJt3VyNdpoK1C9WoWFBwRyrto0e9qVQtKLkN2FJFfUaE8vSjXlVqsKt
XPW4q4qwq1fdXca7hVxrarVY1zq9Xq4/pd7UZhXnFeehkChOhoOpq+sn6B0/sXJQXZSKYEr1CX7g
LAGutOy3ABdmtQXuLFe4C1AEgVcim63o8FtWFwX7aV+7Qi4IsYJSCBy+jIUBu28LWaU9xRSAaHKg
eQAoijwCr0TRa1dwIHVL3kZiQDcEClN9Dw86BpyK7QaCEUqsaaQgdz0pe4auVWFALe1LamIAsaAp
5gKaQsU7jQYCiLCQim7mZAVKtfgPpKASAANbVbgsK7RXbVjV2ruq6mrVzq9Xq9XFX0tVqtXOudXq
4q4/okslyTxB2FCdhSzrTyKUHT+xZ1urDlGVKLyIPYwPK96dgAq91Gwoc6N67C1BCKANrGuw2HMH
QtYqb6ijoVBoKBRFWNSITUQIcdNb+uekwJBFipsZGuqA9xr3DWoNV6voeA0+gqMkUWJLdaQUDRPE
Tcs3MDT7hTMKRUq1AWplvQJFXDUoJPeoq9d1qeawEl6CqaFjQUWYgKIy9KgA8dIotZgaFqYeiBel
iHoW4b1fSwNdortNWarmu6r1er0NL8NhXaKsa51c1eu4Vf6yZ+0GrD0h0/sRjYSvcuDeAmnFm/8A
mp5EgVa5AVVuGrso2pHIJNwGAAIOjDn70VBoC1BLhktRbn1pjel6X51GoAq3ARp7hvVc8utOhJ6V
HIOyxoijRpOmpYDUm1F6BNEUKRiocsaUHQNbiuaJovR5kcgikjuIqwagAANL1cVcGlIFMFJ7RUgY
12kFEtXMkXRpJLUi3JYLUb3oGrcg1q+00Rar+jEvrWq2t6vV9LVaioorXaa+8V3NXeKuDpzq5q9X
4e0V21Y1zq5ruruFX+kZgoLE+oOn9iSkWNqcC0bhXkcgl2Ko9wyg1YLTPcgCmktQBagrCo7lLGrc
wAQRyq1CrgEyqKeZSL3INWvR5VbkLiozy0aRFp81RSZfce5xSzBqDgmgbUDf0ibAtRNq8hueZgYA
yc2QfbVtO4ilYaG1AgAuKZiatoKPRW7VctZb6q1tLc6NdwpjfQCupZSKN7oLAjU0AdCavRY15rle
0VYNRQWlLEpGQAK7BVqBtXfV6FXq/oKtyOQ+htVuC9X1tVjoQtdgrtaruK7zXepq66c6uavVxwkC
u0V21Y1zq5oyKtNkGlyDSyoaDA8XSvJc9zihIKuKmbuPufTHT+xJj91OaS5km6g8lNXFpHrvsRNY
d16jAsedRUygaWNEc+hIrnTd12N6UE0FNWNfbR50WAoPeiTSObMxNSxkkqaAIqKRmSGJVqRLEMaD
g0Ca7jQPoO9yTRFFDeozZpm7hH1o0atpc1c6WrtrtrtoLTC1BlCsxFC9tQTXca7zRJOoFdKjUGmI
AZr0nAdSaeQ3uSEUKFUsQwAaVmKA0CaFq5GiKCimFqBIpWvV9WNh+TGCrgi+sagDS30lqtVuC9Xr
lVhVq51erLXYK7K7a51dqLUGWgwq9Xruq9X4HjVqbHrwvRjcV2uK7mFeRqEjihO4pJgxcqVgItRA
NSXBub31jj76aAiipHGOn9iTJcE01Qg+SU/cp5qLEC9NHTRU0ZFRRk0psAajNmnk7WBoGib6e9q7
RVwKHcaNxRK37q60el6SQNUZuCDfnRS9WCh5gDBODTC5teiL0sxUghqBtQPFK1ho72oNejzFqLL2
KLEirURXbQQ14jQiNeKhEK8a12LVlFdy1KQaJXtYm6C69goxCvEa7GFWNWq1Wq1AV1NltIVJXt07
qFXq+hosAbXKqFCqWLqFEjM9RA1a+oIANFjXdXI0LCjV6J5LMpMkCPSRlUJse8Aqt6AsOK1WrnV/
orVbguavpYVarHS4qwqxo1Za7Aa7TTSsp7rhpgCsoau413V5FoEcBUGpISKItpECXmQCoz2t3MSA
TUvXUAkooVatRjQ1IqKbCrajp/Sr6X+oNSKA0kI7UuCyh6VGv2sSQaN6vRCmjHzKsD3vUbPecA0C
iUsgNXo0L1e1XBrlTX1XoaCk0QLk9gx2UqzAmudTAle2kHZUciuDYAdGW4BZSk4NBhXOrmrjR5bV
39xpiADcm1Wq1RlQHH3RLdewV2iuVFlFeRa8gryGjI1d7V3Gr6HowTta4K91gxFeRq8tB1Ndymu0
GjGKMVeM01La79oo2JHX2q1DnVqNSP2jmSqgUFJq6ovkLUEFWoCgh1NX1vau7Qv2jvYlZnuCDRUG
mdUGNkFz69/obVaudctOdXq40sKsa51euVWqcc/IQLioEBrxijGakupDXpJCD5DXea8gFPMTV76Y
4F5JCzSRFVErrXnaib8GP+q9dwq9Sydg5kgW0Ioih09MmwDA6K1x9Ferir6c6562+qJsG/SesqFl
MIjHIHzMKudLGrV4qKWruNfcaEJJyr9qjkq0AKvautHnSowYVa9WrsuApuVNcrMpNdnKMWU910vZ
iKFiPGBTgWVyCGVkHJrg0y3oISUXtBIFCQUbGmewPOrBaL2oksbUBSxk0IxRAFTdt4mIBdqJNXo6
Cr1cWvVxVxXdozDtMhq9A1er0KteulB2ryGhIKbrZbMDcXOinlzvp3UzDt5tSqFpRc37C7O1KLAU
FrvtQc30Jq+hNqaYAB0uXFhdh4VFCwoOaLWEis1RgpIDcehfjtVq51er/QWq1XtV6dyGE7V+Qa/I
r8haM6005p3Jq9RoWJ7EBkai7GjzpYgaIK0khFKUIkIHBc1fmWJFGu2uddw0xj9wYs9kFG1MxYqO
WoT7R0q9XFdwoyCgwPEbUxK0khNRHlf0r1cVer1zrnVqtVh9dY1bS9EclP2gdxtYZAuFS5jUF5pi
jCWdiglr7RXlWwcU5Fdykdqimeib1ZhXfzBq+ltL62F7UEsT1IpAQhANXuPHegvK1qaNWoxFSjFS
SZaR+2hSIBTOFDyFi7sK8jivMa8hNG5IFBb0IqCAUXAoyE0DzkVeyIHuYW0vV65mu1q7eQjJpkIq
xogih1XrIzE9xJsasaEZNGM0VIFyKvV6vV6PMuFIW1ylqtoGrvAoSLTsSVvYKBQBNMR23LsqgCla
xZ70FNdKvo4audWJpkNvGSREBQFqUOa6G96SmKgCZbqrSyjkPUv6Nq51er+leriiaZwD3inNzwFq
7jVq7RWOB2PGDpfmDoCpRepe/Db0Yu67AgpGGDxKKKmgwFXq9DmXAEY6VIeYOh5UGtQntSzIa7hX
kFMxAZyTIaQc42sqG4q4q4q9XNc651arVb+i3q+ljVqIFjcVFfva1p17gidoVQC7KGbIUUZpWpIH
NBVFAcnd67qWwKyGrqRY1yNWNFmBEouJFNBhXWrUCaBoGjQVu6wryKVFjQUCiOQW1GudEXpkN0cx
kRFgi2DkgML1aitzau2gKVCaWMVyUGWixOpNKwKliKvyNWNdhpVAFtL0aIpxS1GCTKWpAKUcqFqP
WmFwVOg510q9qZ0JUUrA0yURoRVqC3oACgLm/aSe8qtqNGgLmwAJonUGii2LClIAveu0V3IpbKAL
Sd1QqxNEAmLGJaCARevaudXq/o2FWrnV6vwObKq3XvFFjTm9X4iBRUUbiixNYx5EGzxMKdSK8lwG
tQN9CL1cigfUgBLSXBjcgO9j3Xo0BVmun6n/ANY6VIlwDar8gwNN1ZGqzCldhXc1xIWRb3Zr1ftW
Md1BQKsKsP6Df0bGrVyrlV6uasatVqPJiwuqXXIYAr+hL2eNb3hFLIxCNega93oggnnQvRruYUGW
u0nQoDTRcyGB8rAxO5rrpcVcUwJYaAEcBOrXpca9KAokmYEsHQURRWu29CI0EAouoryMaNHlV67q
vpCxBmBVozcGlq3HIOQ6KB2yWtGNBXLQroaJuVFgouXLgElquaDXpWIogNTIRVqC1a1dasO1n7mV
LAC2hFAVeidPKlLL3Ht7QXJq1XAoyi3kdqVSKdRdVIpVAVrgEL248XjT0LVb0bVaudX9O1WNc6aQ
Clc2NFqY1313iu8V3iu41aQ0IZTQxpKONJYYslRQBB21zqSNXo4i2OM4pFAGhF6KmgWv6S8yQDXS
jz0NXq9J+qT/AFg8i4v383RTXYaPIjSxo3q16BAq9j1o3do1sCfr76tQ0vwcqvRJrnVuC9Xq50lF
EG6cknI8gYdrNaM6AkUkjGhfR4w1KigMncRyNdtG9AtdXU12E1a1dqGijCjYEzkUkzBgbgGrg1au
dA6G9Xo86FGl7QFYlZ3JqIqwvY9QvOuwUbCjLTOTpcCu69WYnxGvGaCc2Fihs0wHbGxuaX0Hr2JX
tkJJQctL0DXdR5mTkFFdSe0qxFwlCMmipFByKBpXBox3o2WjS/bUrc0XnoBVtXlC0XZqWOuQpGai
RZnF7MWKMWAAp5KBN4F7mo8zAsjSdtqv9Ber8Nq51er+lIQSKNGrUVFWF4sdWoQRigij0rCpBZfX
telFhR66HS9Rc2k/Qv3BltSqrCw0lQV224rCu0UEtQkZKWUNQN/pb1er8FxXOrV20WoG9MNSL1e1
XGvLS40ueCxq1cqltY9U5CRyzkWE5tCa7TUcPIKKI0C3oqaK1YVau2rVyFKfuAa4kWrEi5FFQaaO
ipBhvRYVcGumg0NHgBAp56NE9gYqVjPPoWkNidCRTPSIzUihdAaOjLerWr7Si3BNDUCgUJK6MbVe
5Au0pe97mM2F6vpau2ibUSWJoAqJGApFuPbu5CpUvQYrSSBqRiKY3KqSXftRFJbpoKvRNXqRiBHE
TQCrViasBTSiiCxVKC0RarVIgFAVEvajGwPJYIwqg0bVaixFd4v3Vy9S2t6vwWq1c6vV+BjYXZqE
Qpl7SdSbnktY8n3eoQCJY17dbVzrnV6uKLir1cVcUGFC5pRaiavTVer1ejpB+uT9CPamlvQcihKa
8l6POit6K120VPCTai1A2ZXse4X4TRkYUshoG/Deiwruruq9Xq9XoNQNXo1bg5UND1GjKTXMUjX4
rHW9XouoMzEDzKabr5SAebt1yzZaie9ClIFWBrsFAWpidLirmudXFXWmJs3epScUDehIK7bimjBr
tZC7lijMGvyoEUTrz1NMbstG1RJ2U4NKQRa4AJo3FGljJIsF4nqFjUq2dDdelA0Ke4FKbiTqz0vM
xgEyWpLXB1HM9vK9MST+kIBcsQO5mI5aAgVe9PzVlIoC9IrWq1lkYO8ahRQ1d7AdzEIBR7rBbEyA
D7mpAKAA0QGxUUSKZhUSBnNMQTErM3B7WAPbc2q9X4OdXq9/QtVq51er08gWvMTXlahLQvV6aRQD
L3UtrVL1OhoAANc0CVZG7l9V4DfxPRRhQhc0Mdq/GpYFFZEY7vFzDCxer0L0DXcavV67r0avV660
FJqGLtqT9FHUUOY1tVqI1I0kFL0S7HilXQEig5oSUZDXcTV6vV6uK7hReu+r1egaB1vV9DparHQG
ripDzQ8xpyq9X4WYKJHWRncml63uGJtHfu6tlH7rXqKE1arUDYd1dwom9WNW0PMCJQbURRQESRkE
FlpZbhSLrIDTIwrrTRA12lC03NJFajVuA9BcU8tKg0VLUzUrXRh2FT3VyNMtCwI4Tyo2q9PSGxmC
1GTc0DQNO1xSuQGNNQFgtgrmlpRy0XkXkNFjQFqVS1PICB+pRYWrsY0RalPNuQYlyiBaverdpdxV
rV+oChR5VJLSozVyFLGTTsFVy70sd6CgAKBR07zRYkUzc4V7VY2B/TBH2pwWoqDVhrbS/B21zq/p
WqX9WschFSuQjfp71qOULRmWzMTV6Io2pjzJvVr1jMQPpGRWD41w8bKbmgbV3UvMBSaMT2L0rE0A
SBGxpcdjSwKKCgaSfo4UPGwq9Xomie6o4nNKgUcRFwUK8FxQ7rWarNXa1eNqETUIRRiW1ChV6Bvw
DgNE2ruNE0TSnmPRk/RFfvkUg0h5EXpEPeo+6bxlxIi1HJ3qdbGuQq9c9L620taioNMgIdCrITUS
kG4NSRi1iAQDTxUt1KS0zqKDA10q4q4qR6jHLtJoAKGcmjS0LMea0GBrrTLSdOEihTHQ3aNSAToD
ROpNG1Cn7grG7IAT2ADt5W5NRNKK6k2FMS1JcjtVQhBLEAO1yAaIZqAC0ASR22MpNBCQRQftpZFa
mlsWLMVQLVyaRVFGQ06AkKBRNqB586J1vUjG0SdzUxucdA0nqW0vw2rnV9L8U8RNd1qDC+hkNgaK
KaCAUBRvYm1BqPPVRYRykSfTMoYHFjNfhx0MSOhCgoKBVqMamgoFeNbgW4ZP0cI63q9XoGiavUrW
oGrGo4y1JCqekwuCLUBelQUAB6BHI0DQ56A2oNer1ehV6JruoMTTVer0TS9Vvb0DzEqhW5OpQAxl
aZqQk1H1kYFlQmkQCraE1z07WuoJrxrTJ2lVJphY6kXq1FQaAAo007KRlFwWtQIYEEUVBox2p+40
rEFiSF7mPZXYKQXrkoJJPSgLU4JCrzIZq+5GBuOtG4q9X05ar0YaREWfkwN1NDS1EUTerUo5yFbo
LlTYg2Aana1O16AJrrR7kPdy5Go1AUkkxrYStem5UATVwK7TZrLTlmVzeoD3RsKYUq2pY6uBQW5L
9ou7EdAKtRoCx7waKighopydgKLXqBO1WNgSRUSdq8HL0b6dKvV+C1W0vV6vwTw0I2B1Ioagix0N
MLFRTMSYYXMn9Fk/Rxd1d1d1d1Xq9AAkRoKCircV6vpfVlBpVtVj6LGwJ0BoG+pNKdL6A0KYA03X
SJRx2q1cqvUkQYMCjOO5FNi5vUduxOSmIChLGgilL0ovXbarA0Uoi1BaVgK7zRa9B6PMWoLevexq
1GrUaeANQgC00hVkdWoGiATeioNFKK8u0iu42VC1MwWgb0eoUCrGiLkKCLMByKkFGB7h1ogig/K9
+AAkstqNIbHIU1EaOim4JFMDY0KS4DsTSjkq3ryGixAJJI56XVQ7WAHO1dxIUgF5hYSEkJz5mlUX
Z+0c2KoBUihhAe2RxYhCTZVokmlWu6vGbiwAtQ6mrWoi+iiixp5SKJJMSdzU12MCd7cd+C/BbgvV
+K2l6vrN+ng99OlXo6WBpjUEN6A/o0n6OLxsaMbDQaCo0txXFE13Veudc9bejeiwrvpm4Sxv5BXe
DQYCu8V3Cu4UDXea770VrtrtFDppY6cqvxZEdxCwuUa5AFItldWKE83W1I5UxyBq7qB0J510q+qC
47SATYBiCHNFgeDtog0VBoQC4ZlbuYUGDG1qBrtBrspY7Uz2q9MwpBcgDQKa7aIFmU0rK1W7aUhq
60y0CRSm+qmjV+fuwVkUkMaCkkiw7aBFnUUBye3bcdyoAKtTC62uelAAB2e/MUvTQ1ZmICrViasL
PLcql6CgVajRNjyYFtAvKSQsY1K0eZHOrV0Jq+vdpIFI7bVAnarGwJqGPsX6G3DehxWq1c6nva9X
q+g4/eFe6RQAP6NJ+jhVSxZ1UO5bVIyaCKtNOBSzgnuFd1XJrnXbXbVuEkAeUUHU1ccJYUXok1fU
6HTutXfXcaua5191WNAWog0pNDpfRel6ufRsaZQQwKPKL6K3K/KaL7+29JDeljUVyFXruo6c7nWN
rFm53rtBrtNcwOAjUk0zOrKwYI9FeQNRtepHvozVGLkD7eV+fAejx0sgY8wVdWFFbVex7hXWhyrr
RQirVETZ1AdTde6wFXomvcDnK1ILkCr0TXca6UFIDP8AcF7h220LWok2VKvQABL9hJLUqWrkKJru
p+a2vUN/HSLUj3qNNLVaiaAo866UzWryGlZTViaKNSISb2p2F4ULt6F/VtoL6X9AqDUkNWOnPhNF
uVmIWMmoIOw/0eT9Gqx3HaA8jdoJ0ApLAl7B5C1CkQk9oq3pS63q5rvNd5ok1eu4V3Ci1XvXa1vT
GoPI0ov6Nq5VerGuVZagO12iW1zUjdryQhjKqIY3J1YNXeorvFwdLU6toOrDlV6vpbQakUKNFQak
ZUMUqk91qIFA0erK1r2CKKFqXiZeTx845O4EEMjqRTLRpSbsKC1cimNwhschTUJuDV6I5c6Cmrcm
QmuwilFE1zNWtQ5F2S6jupVsCKc2AUmgAtBS1AAh5LgLdQoFE1eiauALu1LGq0HtQUmpH7aRTreg
b0ZEB8i13XrnRW9di0FFJY03RejGwLm+OAE9O3qWqx1vQPG0atXgFeA14DRiYUVIpUmY+M9wjvSw
ClRV/pMn6OxzQiajGBSSUSSSrGjGwrtrpQVjSx/YIDdYlFW9QjuBUji8TGvBXhFeEUIloIBRF6aM
AekNedKpNAW4rVyq+t9cwch/pXmQPuyEJd/vjItQ7rlnoydokd2og0hUUpBGl9FBNEfbarmr1fTn
pfW+s8dxUM3NRait6AsT2mmiqzKVa4HTgNBuRsaZftjkNilirgG1qK0oFybm/ImjQp17hGpDMEFd
y1315GoMToTYdzV5GrvodtrWo03MqLC9FuZF9O3tp3VaZy5VOYFqIvRFGhzrtFXqxoAAmUAWuVFq
JruFM5NBXNCMCgAKAJrtrtFXVa747FlINxXcaL3DKKjN0UGwN/Tv6tqtpyrpQPpFQQISKWJRQ/pc
n6AWt2NXiFCJRRUUBbSwqwq30JANdi12LXYteMUEA9BhcAGx9A9BoBSoBw2rlV9eVX4cy/aD/ijA
70HNmJpDcEG/bRJqxq1WFdq0FFc6u1f5KvTzOKaeU0s7ihkiy5ANBgdLmr6X0voWALkBajXua9gD
dle9faRcirg0Uq5FBxXfVxVqNEVarcm7lZJGWmQELJ2mxFEXrpV6TnXaBXcoruuACaKMT4hXagq8
YrvWvIBXkrvFdyV9hoAURRbn9ppu64UmgKVQTcANIbdt6VABVwA01L5GIRRXdQUmgAKMosWchU5q
KZwKZi1KlgABVia7RRkQUZaZ2tHe0tqHKo4gK61yqVJKhxmY25Acq6/VFRXMULGrGr1f0TVtL/0m
T9A6f0h10tVuE9LaIOevKr68vRzF5izRr3ih9tdaUdtFua2sWQVK6WMrEGS4Lqa+w0UWitqIYUe6
uVFFNGEGmhNFCKRipaUBVyOYYEamr0VBM7WFY6WDtyLG8chahXcRXI1Yiu6iitRRlruYUsvMG9C1
zTMAAwau0VyWmTuCOUKtegoaj2LRYmgjGhFV0WjLV2NWo9V51am5lNG5kAEdooE0SKKUnUjQmvuc
9ldliZqLvSo5AVFruJoJc/aFaWj3NUa90uQQAHKi7GggoC9dlqMiCjKSbO1LDXjtVhRc35moorVY
sSb0iCQKLDit6tvVIBrtIoNVqvar8V+C2t/6NJ+gdPqLfQEXpkIrnVjQU8IUmhGaAA4L1ar+hY1y
q9ZCdyWkjHe1KL0EqQdp7zXQxrclb0UFdjghSAOhU9vMAM1eRqMgNXQ12C/awJoqhowijERQkKqm
QwpZFIuLdwNO4UHJpmLFRc27VmYGkjZzHD2VaulEAm7AdwtYVdhV1NeOirgszhVmfuBDBVANqNGu
0NQAFBq7Ca+xaMhrrVqtcqdBzP6Tel5k/axNgouF5FjyAsaA+6mNqYkkECrgUHBpkdyIkWu4CrM1
BRYsoDSkntJoJTEKMYXqT7msBQua7QKWUWf7lEPLxLVqVSaYXprCj1ijrm1E1HH3kC1WB4emg+ov
wEXrttV6sDXOgav6VqtV6v8A0OT9A6f0oxg14xXYblDXa9+xqYEFBb1rVe1Ek62qeDyVLAY6QHtX
mJYw6clo3qOS1d2jJeu0WC2og1aioNFa7SKHcKEgpZCQJeYiDUYWFWYVYGjEpowkUt1FmFX7gUN+
01BH9zG1cmaOMJTE6W59tdlqbpyJ7yp7kNWNu419howg0o7a7jfuFXFGxBalDEFlWixNC1W5irig
QKJF+5bKRRsRfkvIMLi96AtTciLlmonkBTDl1DKaETGvGooFRX3Gu0WNhRkFxdqCUIwF6VenfuKg
JFQWmlArtZyqHhDEUzXpnvUUVqteiajj7iABwWoGuv1t+C1Wq+ltL620vV+G1c6vV/6BJ+gdPWvc
f0Ei/qWrlVzQq5uSVYW0uKJplDAB+8fawrIjse3tpnFRhmqx0AFGr6EGlDX7RXbTwKSMdbCFwUkN
zP200pag612qa7SNO1TXjFeOvHVu1ZWNo+bW5E6EUXFFia60UrtIoMRQAsXYUGQlRcdzV9prxg0y
BK5sVULRYmiopmFkPLuPoXrvNCQmkPMVJ+mP9LdFF6NHnRFKASatQIBMgFBi1JGWATmENHtFEmu4
CixNRwXMtgAtgx7ysdBbDTkQRqz1FFahzpmqKMlgAOEG1MDSkn622l6vwW4QavparVc0DxWrnV6v
9bJ+gdPRvz/o1vStwW1cXCMLd1+CeIXDl6Q069yutiq3oDl9tWFCmtRq+t7UGrkaNhVidCt6MAuE
IFiCGIrvBBQVzFXpWF5SLykllvdSbVcUTcdprsNAEUTfS9NGSQJBQFqsLCR7AmgRTjurkoJ5lqvR
NiRcDQIaEZoItdimjHXjbVb3N+5XIoPek6MTSdG6KKYU/ILJfRlBUoauKa9LYHysT9xqxqwq9Amu
lFy5RAFJGt6UXJApuVM96iitX6qLVDHdgABx2A/oFquavwW4r6kXqxruoEHitXOr6mu4fTyfoHTi
P9RtXKr62Ol+VWq9NyI1tYEAghke4uDesiOuS1GxYc6vRJq9XtVwaEZIdWU2oCibUDzvR50CKIJq
wq1MtFaBYDvUsUOlu4vFcmIg9zihICKFquKJFE1er0aYNaPv0IpgRSSk0rA0WtRbgN72HYgBI0FM
bBbg6EA08YATq5F9FYikYGlNhe5q16m/QjOCHq5sUkalhtQjQV9grur7jXYaCirqKaWrM1Iljp3V
egRRajJYhr0kYB/USRaGM9wAA9TrxX+otXMVccFtOVWq+nSr6EUUFfcKDjQyKK8yUJkNAg6WokgO
4duwW5rQYH6WT9A6UTwXof0+1XHBYaX0tV9WFxGTar6zKCsZLKhtRAIkUqY5AK5HVEBp4+aKBXeB
TMTob3q9qNd1WoXBC3rtI1IFEEUrdtDtYBbEg1c1yqy12iu012NRR7nyUWNB6ZpLpI1BgR5Vu0qi
jMxIuaFkW96C3rsFWFGjcnl2R9e4WQcv0sB3M4uO/kF5DrJ+mPqTYxoCPDTRkGxFBq5UTevZqvVz
X312sa7DXYK7QKLKKMwppWoLI9eEmuxQSyihKoryAm9FrV5L1zpnGkUdh1okARREsAAPWvw2oH6k
gGrEUGq9GRRSurVauYrlVqLWoOp08jX7yKDA0QDUj2JNXq9I/bSyqRcUzABrdwYU7CyEKpmdmEzi
lyAaWRT9DJ+gdP6lauVX4Oul9LVe3BarV22PBashArnmFNxkR3BHNSAKC0OVWvRBJ6aW0tVhRUUR
pyJ6V9xoA0dBRtQlIKfcFZiFc271vdDVkNFK8bX7WqzVz05V9tWWu1K7EpEQmJfuZu4qKvzo3FMe
XuxHbHQH31La0f6aW3fUnKn/AEJ+phYxdNCL06dpBoDkafrVwKMwuZuRkeu5jSKTXiFFVFRxhqlk
7R3saCmu0V2igBp2iuwV2tXaaji7aH3EnksZNR9PoOn9AtUpINKxBSUHS1cxTAMGBBR2FRn7ioNM
CpeVm0bkGY0HIoODQaixNd3LvJpAhBVbMxuOioGpoJFq7ClyGFLkKaDqauK7h6kn6F6f1G/AdOVX
0tV7cNxV+G1XAq9OoYD7XU9p61LEQw7VIkpXvV6VjR6E3OhJFXq9E1c1ejar0GFXptbAGcANgtzn
jtLEjUVrx3rxGuxgLNYM1d7WEjV5TYyGjJ90dnJAqwpyBQI8aczRHJOdP+k/pI5sbrGK7eStcE9z
Ke1pG5FeQa4P3FuSJ+puRi6Akk0xoi4Ze0o1wJEonmbU4YkKK7RQUUEQULCme1RRl6d+2rkkRsa7
RX2V3x15I68i0JENdimiBSm1MLm4qGL7gBbtsfor8I+qmWxq/MGopL13CrijapHUlbXsQ9iAzElj
YhxV67YyrIa7TX3Cg5oODoGIrvNFhQar1G11IBowoakxgB991gktHDY+nJ+gdP6bY1y4L8VtL6Wv
Vqvx8hRN+CaMUrF1RriZO5CKWKkNwKvUchvN9tA3q9XruoyqKOQa/JamnJoTGlkBBnsVcEdwq9Fg
KkksGJJx3KyZq84bW0FC1Flpu012igopowwdbUrBiosKkFI4FG3Yg5Cj0i6NzLCw93JNR6XIoC1M
OS8zRuCBYSfpU/caiPKMm56Ke4inW4Q0AK++mMhr76DtXkag9Fqu1LCzEntDMgPmJpHaxjJIivXi
IoRUy2oA1FE1FaNxRa4xlBcDTrRup+jtegat9RfVlDCRGWpDziakJBmcUs9qM4ZIox2kWKyEM8hb
RgCCjCgxFCSgwoi9WN25UF1NG1MDQD0GcUuSRS5CmvMleSO/nS/qSfoHT+l2q+vLS3DargUdLVyq
/Ha1E8UlmUgpSnncWlHZRJuHsUf7TNQEjseUZDE/dXOigNdi12LXaldiV2LXjWjEK8ZA8TCiJaYO
QocUy3oIbv8A5MeJ+1gQQDV6IvXZQXS9FgKeZDSstdwt5hTeR6VCoY3EfMD9bUD2sKk6LY0xY1Hp
2khTem5m3axNh2kgG4k/Sv6m6w0ejNyj5Bjajyr3Fr9hFWartVzXca7jVzQJrvYU8hYrHehCoqwA
FdAxtXcaSJmpI1XRnAHaxqTHYtBD4xqQDQ5fSXvXSute9dPqCAafFVqWModGiN1QqFdgCb0RXSga
GkcPc0kRUgmg5pXudbUaPUE3jlW11ajCho4yGmxWowutRwEn1JP0Dp9cfT5Vfhtw2q+ttL0TfitV
+PlVhU0d6VbBCDToHUxEERCrCghNdrCmPcQEq6WLIAWQHuF+8EF1sWQVdLqFceBaaJQeyvGaKsK5
1er1ARUikNFJ2kTJcOK7xTTAU85ozSGvI5FpTRguREooKgoV91HuFOSai6dKvcvQPKTotrve6che
9AWB+1oxTrcXLUBy/SzG6p+p7ExVbkouehY3ZugvXO4kNeYUJhXlWhItCRBXlSvKtNIWqNBZRRNq
vQa1F6IZykIGhYCrMXSIilAA4xy/p9tZv1aMSArm40OnWuYpXJOOTYIC0mOQVUGnUALKa7uStYCX
lzo8qRbaBiCs7CvPTZBoF3ZF7R6kn6B0/o9+O/Bar6W0v6Fqvx8+GZSCr3oNdSL123pQLvMsYeUy
V4yQ0bVZiwjN+yuyuxa7Rfx0YgQihSxJHjNyrXBIHkYAtXarAqRSNZnCMexa7ErtWrLVl0+6u168
bV46tGK7lrvBonke40wYsvInovQDle1SUlPfuj6RWvUnMqQQxsIzY1Lant2p+pyC0Vd3JVtTdFXk
x+0Xv2iuwXEamjGoHYaWA0YVNeGjGb9gq1C9GriiwpIjS2AJAq5YxxEhVAHpcwfp+ul/prVKmp6d
ChuKOrdI+mN9yFa5ipFsXBNBbVY3ubBSKvRBaoY+4tjizROpElqYqa7TeMooabmpuPTk/QOn9B7h
XcKvXLW3oWrlV9LVyrn6PIetL3d5RlYfaOZoLcSo4BJv5BYSig165UovTLbS1W1FqC1203KgRXaD
RQU0ZAEjKfsI+y9kr/Ha8Yq6A9615DXe1FmrnXaTQU2KWCXJLAAq3cL9/wD8wLluTWsGI7pKjUks
PuUXVhau4dqikNmc9zMOQYdoHcWFlT9TkFoqAu7A2PMKLAj7j+s93f8AaKMlKjNQ7VDTigwNXNCr
XrkK8gppASsbPSoq0Tau65WPvAjAHAP6Fb6h4QaaJxT9wo9YjqNGPJIO9UQIujIDTQWLIVoMKAFM
dYFsuhjU0cdaYFa7iajhJIHqSfoHT+gM2hpCVYH0bVy1tV/StV/XkS4Znue6Mglat3V3kGSBXDxs
puwqHqoBoKKNqa2qrevc0GIosTXbXaBQq+hUGildhpUuTA1lYh+zkEFdgoqLAqT2irUakcLRkcFT
IxVPuQfcw+1ejnmTyA5SdYyLN+qPoRQvcU9IOVG9wOUn6U/VJzaKlUqbVGPvPIqOchHlb9VnNRxh
ad7Akmioocgz0rE1ema9KjOUiVdO7mq99LEKtQ58Z7gRe3EP6kQDU8VqU1cVegdCjPSL2rwSOEV8
qNqVgQ0xBRrrXVlFhwOgYJCqn1Zf0Dp9Xeu4VfR2tV9WHKIHt476WrlXOuVX9C1X4+XoAVerUY1Y
lVNePtoHl3cu0irq4kxyKBIpJLmr1bQCi1hw3rnVzVzXdV70TRcARZFzJEsgLPGwcGu6u6u6jJan
yLFZJSfHchQK7DUaA0vVv0ofsAvV7in6xgkt+qPoxsCpCg8rdzD7WY2Hae1TcSfpX9Uhu0PUjkH+
yMdpk6AWWQ/fIPuWUqPOxJm+zzGgyNXbRW9WK1ZjSQUOVEgVZmCRXAAGnWri59a39Ukj7x4GB8JN
DHWhAlKoA4WUMHwTf8aRKbHJKwuAIZLRQEH6ST9A6fUE2N+G9Nz4Al66cFuC2nP0rVyo8+K3o2Ar
mdOVX1aNWLL2kAgHtau5lp4kkDRupWU1caWoUVoira3ppFWvPRkY0DJTBmqz27GrsrxikkKCySLL
E0bCZrCSQ13SGioNACgjGu1Fryc/1UoHalgWFLy0HWn6xg3PUGwA7mtyv20i2Eg5fqNqH2ljdV/V
IQWh/UKteRhyY91LzViS0xswUmhGKIFvFTLakcg3VlCgkKFokCu4sVj7gFUHT2q1652/s+3Ff6KT
9A6eper+iWAoyAV3Xbjau1jSpbW2l9beryFX4rUfRJsG7qVgavxEA08dyb2ViAFFE1LGGA7wVJA8
ouHuAaZrU0oFeRa8i0WJrsoKKCtXaaCqaEHIiMUDGaIjrsU0Ay15AVNq7WNCI1/jFeSrs1CO9CIC
pABVx2KPtBvX/wAjTL9t70/WMGvc27YrWpj9wPJzYRcjUtOLKv6pCC0PUNyUG3URir2Dd15gLxoO
1mtXk5ksQsLGhEgq4NXtQckIjOFjAPAOug/p3Sgf6zJ+gdPRvV67qFuC9XFFwK8teU0XJrnQ50qG
rEcbA1G1xauVX+gtXKieK2l/UZOYN/QKg0UII6XBqxFdq3lDCgLUslqEimnkAoKTQjFBDX2Cu4UW
aiGJ7KVb0xuPFehGKaIGmSgXWg615KLyGhEzEYxt4XFWAruoOakYXPb2L+leQtyTmfZTTdYwbV23
oHtZ2sAvKM033MwtXcLAdxYWRf1SEEw/qI+63K9lUdpfrKDU6/ar2HY70IVFXANnNdq0CSzAhUMh
IHLgPOh1/qNqv/WJP0DpreiwoyCjJXkNd7UWNXOgJFBzXkpn5XNEmr6BCaEVNGAEIB7hRN/QB7WB
uPoOXHahV/Stwst6U+i0YYlSp7itXDUKkh7gyEEIaCUIjZpCKtclGssdwqURarCrWomrm3OrUwNd
pNdhuEAruAryWr8mkyFau9GpoUNPDIp5imK2SiPuqPq5q1N1QntHUUy3C3Y03IxiwIvVyKAAEn6V
/VIR3Q/q7TcdLf5G6dTLepQOyIqBdjXaKHMhCyiIW7Re1wEAHGdR/TzVrf1eT9A6FwKMld5ruPrE
UAL/APyq9N0tQ4b8BFR/p9W1cvSv6V+O3Far6Ag0QDUkJuftIawsRUhuvbeksKkmJo3JWa1GUUrA
0t6sDTW0HMluV9CRa9E08naCzMQlBTQiawCChMopZaJuX7aj5gn7mroRzJ6GkLdg6jo55AFWq3c0
ZomudC1pP0r1l7bw/qtyQ2AvdukY+2S9nVTFCRcIzUIvtCi9tRoeIcR+i6/W2rp/VZP0M1lB9IAm
ipHCNTXc1c7ljYdL8V+ACw0v6Nq5fSX4xxgUTqRQN9HjBDxsoElqdwQoJpQBRVTTY6GmxWpomFKS
pEtA30NqAqwogUeVE13ijIK7SSqciUWu96YkxpHevDenhIoWJcr3J0I5DmW6L0bkCaDkrQeu/mz9
1eQ0rla7ubOSO9bIwAdgVHWXtvD+oU/XtABY9qCwlvcIrRwJHbW/Bf0Ov03WuldfqL6X4LaAj+pS
foZSVtb0ojyoqDRjFGM0QRQ1toRRr2Bq41vwqvL0uVX9EelbQmuVvUAonTpqwpTfV1ZSe+yXsGYL
313LXOr0URqOOteFwD5FpWvRlCkz3pJL0TTchzakSu5VokmlW9Koq1C4rup25C3cbFx1PReVdwv3
8ySaCWSragVaghNWtVuEoCVRVIdaP3EG4/8An0MgJpUUpj+McHQA319u4UDf6/r9c7ADuN45r0D9
Sfp5P0eXkTc+ha9InaNb0eY0FXoGiaJruF713GgavV9CwoJcBefoWrl6NvVvobfUFQaEairCzRqw
Md2MdqsQt2BDiu4Gher0yJTY6mmxnFGNlrnX3Go4r1IbHuoOlvIopHuFYUXGjC4Qr3KR3Xsxeib6
xkAv+nVYySUFhGBRANFRYx00fLUdZXIKm4BtSyEBWBLdHBKIoKQLGDqKGoo0PpetdfS6/Uc6B4Jr
3oizJIVEc5JBBq/pMbCGQsP6DJ+jjLAUZGuiu9KgGhcCjIaJJ15ir1eu8V3mixqxNWtQo0DQNE0W
vUcdvQtXKr+pf0T6Q9LpwX9A6lQQVsxjFmjbtAPcrG3cKuK56GNDRUXSmgufxjX4xr8Y0IT2+CSv
DJXjlrxvZVAI5aAE1bUUSTGKSO9di0NWYgqTbQxAl4rA0OrxhqC21FBzV7qigrj+IN7anqf6L1+k
vwGr6z8xRNFgQDSSEVFzHcVPetGRRRyUpZ1J4J5Qax+voX+pk/RxeMdoxzcQIKJCgymixPERV6JW
r1bW1DkaNexNRIb8Nq5Vf07fW9NemttOmt+AcBOhFqDXqwoxqQ0YJ8ZoKwAZu7yA0x5qBbtFdgrs
oLXbRSu1qs9TXoAilQmhEKVAKeK58Is0faEjBqT9Mf6hV67uSm4puZQ3Fd3O+kijtpmCi96Ck0UI
Kxm1q5iltUMUavwew6amh6g/prEAfk80cMCKYJYkGl6FbAE3itaVwoWTtLdzsq2NCc2M5rztReRq
8MlQqytRYA/Xyfo4ke9EgU0gpmJ4VUmhHRjoqRRoihwnQilBNLDQFvpL39K31BNEW1Aoi9AkanXo
NTa/D2ip1IZGcusNq8R7DEa8X3iJqEbdhiPd4/uKdqdaVb0oCi/BNSPYyn7Y/wBV6Y0BYA2LHkBY
dCTyWuhqT9I6yg3j6gAa2FNHVqhjAk9tRQo+qfXtx2+qyi3ai2pHIKPcMvcGBBAIK9CgFBSKkJNH
9fTQmlauRpFSlKCu4UZloygKSxZf0/XSfo4r2q54ljoC3Ayg0wseC1WpTQWlWw+jv9Xar62o6AUT
XTgOnUWIoajhIFcxQIseAgGgqj0HLAzMSAL0osH6L0Jokgk2E1KCak5LF+puRXmaYcl+40wuAb0K
PTu5MPsrlYgBr0WtQar0GFMl9I3uOA0dRoPUHpjlRH11tCAakUq1Y9zUjNGwZZVIKkNV71enPO2t
6IpY5CLstA30N9Ik7m+vk/RVjQQWaQ3HcxAtxKhNKoHHIlwRVqtQFz2qKJWiDcJ9F1q/p25/QAVf
itRN66cHtqBfgJ4PbTtF+E17UBRvV9WnKvKbmNeVHnS0OZYXAPcZusRFpqgHNzSHlTGluDTGwHI0
aB+5/wBHvIxAUm5NiTcnQdRTrYoSCvd3cHt9H19Drr14rfUkAiSMAqSD5AwQdjOisrr2lWIoNcHW
3I1jp3EACioNSQWNJCxowqQiBR9fJ+gIa7TS2NGFTQiUBhY6hSaWMD0njvXa1djmhFQRRVh/VBR0
6cQ4LVfXpwnUVfUUNPahpYasw7iLt0APMU/Igcm5BORmPNCVM3WGu3meRvS8yw5A3HVmFwpuGNdv
Jv0e7p3KFsW6R6E8rWF6NiF6ySyBlJI0Fe/AdD9T19EfSW0lHPViShidT9wVSa7hVxQ6mgOca9q6
+Nb/ANCk/R23q1W0LAUxuaCk0sYH9Dv6RP0416aAcJ9TpxirVfQ6HQ6EgD7GH/yputPzINweZ7ec
36o47VN1g0YXAY0osKY9pQWFE9pTno/6b872DG5a5AFjX/zomhR5MHHCa9+Aeh7ddDoNOnqdeHr9
S63B4SBQAsIe8mCSgGWudIbMZlskjM39Fk/QOlxTOBRkNEk0qk0sYH9O5k2+p6emNQKPoDg9q60N
BRoasvcoh7a916P19gOQ5Ug0kP3xt3CX9UHSjVz3DRrsUa4qU3MZ+2n/AE3qUmyHmOasOV6A0/8A
lTdWdEpWDLwex1Hpn+oSR8QqJbLRRTTQVa2iSKoaZjURJX+hyfo7jVzqsf8AQeVdfU510+tJ4LcJ
trflwX4DwHU6e1e2jA9tmFL0HS1wDfSTqNHP3RD7pP1wdAb07EkqO2M1I1gi2AJV2awjF6/S9N+m
gLqy9pT9NX525Ch1OjQq9RoFXUV78Q4x9bf6WSMW71oEHVRcjgZAadCuiQ/0WT9GgBNItv6EWAoS
Kf6NfS1HgHD016cI4BwDUanQ6zs4oOxReYr2Ujuor3VGbrfnbuZV7Q/6of0huwxi+jjtZPuapBcX
LkU4uPJ9oB7K7gEL91R/pq/303KgOV+R6dpNL0PEdRofQ9uv9QZQwfEFDFYV45BXY1oV+7hIBCxq
v9Gk/QVIoKTQFv6BfQsBRN6Kiom7l4x61+K3pk8J9QUeA8ftQo0NRTsBTuCqDkP1NyBHaWPICwLd
rIOV7GN7hv1RfplNyhBFSE1EeVO1hGe00asPI36adS0ag0v+tm+0L9qm4frXu5qTyFYA3b7ajpXt
r7+j0J0OvSunpA0fQ6/2RJ+gDlbgt9QSBRcV5DRdqExBV70zWGnM1GnavHf1LUT9CfVPoj0D6DlL
yAWUWA/UeZYXVCWNTVf7bkGJTR6pyjROSfaxNhGLlx2tVu55F5K1w5svjst7pSn7ZFFJ/rUfdQNm
XmVq/wBzgWV7BDcDgGnQ6+1Gj6g0H0xNq/IS6sGH9hSfoHT6oyAUXNEk8BAOpNqFyUSw+jt9Z7cf
Th9tbctANBoKOpqSEsZVIVCSrcglNyEXImgoarWSILf2qP8ATUoNFu6gAA4uA5Cxr2g0v2t+p6Is
t677L3E0htGoINScgo5dCBTdFKgJIjaHhPT219/SPH1+i6V1qa/Yg5QkcF/6/J+hL2+pZrD0ewsU
QKPpL8NvpuXoDhHCdToNBwsH7pCe5edP+lP0tzL/AGlzyAAEv6Be6tdaQWWnIAhtfQkeQaTVELLT
dDXZ3LY3jF4yOSm4b7inR+i9G5hUDLHCEo17a+1Dg9gdPbiFCj9UeYZCjR9Qb6uS0i3t/XZP0Dp9
QaN7+jGPqrVfht9AOA8J9G2o4BwsbUJ27nYB0NyRcrypeZcXWP7jU36Be4XtSl6VISzMtqBBEjWH
j+2NrimHe0fI03Q0n6JCtof0V+lkF6HItzpeh6p3EICK9hwDQ0eIeh7cQrp9Ix7iDQJBMqipZi1R
oxpWYUGvRe39ck/QOn1BYCmNz6BqI3Wr/UX+hHAKv6HShz4Bwka+2vtwGrJTJ3MBYin5MBYObCLk
am/RD22f9ApzZe8dsYphcRm1D73o/a7tYItg4sQeV7g13lVubwfoqW1kHJqW5YdSbFHqPIkMnvR4
Pc0OD2H9EPOiLaA06m/jayNamYkixFqvYiX7gb/1mT9A6fTs9qJvQFEegaiFl/oVvXA4Rxe2oo6n
XqeEi4KWpAe56AqQ86PNnADM1hN+kNYs14x1Iuq3JGkosYxZadbhbu1MLjvICqQppVDIQbwn7Abg
juZOjdI+h5V2ghLAhlJo9fbX20PCdCPQFH6F5lUjIJIPEyA0RbTrR5USO7uq5FK16eQXJ5pKVpZF
agb0WAoG4/qkn6B0+mNMbkIaFGiOC+oFyosPXAq/re3pcuAcB4hwD0Bw+2g0yA5VPIFiepOdX5Wu
qt9qCnF1UljNQj7i4CovVjYAFSKNW7zEdJDYJdW0Yjv9iKj/AEyFbQ/pDdojHLoznkvIv+lf0qv3
hFB0HEa9uAdK9vqZE/yEABZGpW7hSSBjq63FtDzqMcgSSLMFUANGrUYitEA0OVBmFMGekXtX+qSf
oHT6UmwLElUAo0dCKNXq+gq1ItvoSb8NqPFb6G3B7cHTT24xwHT2NGjr7GmbtpnBEYFrXoHlX/yF
ObCLkZusTC0vRP1Mtyy3ER5SNyRbBx2tQ+55FuEa4c2Aj+1L9ppTZCSag/TJ+qn6Ke5jTdAbBWJb
sl7xe3Ea9hxHQ8YocR9ORLhgTUQAKNYiRb/aH8nPyGvIKDg0y3DkqAeTN9vSulY/eT0rrTrZli5G
I3VAo/qsn6B0+kZgKJLUoAF+E0RQFWoClW309+P24T9GNBwDiOgo6dTw+9N2EyAAIO0eyj76Y/fT
c2Zfum/UjdplPKP9Wjfa6fc1OLjvPbGpANL9rH7nq1qPVRdCpBgPIL3UhuD0iojkDc1yDh/Q9hp7
amhoPqpF1BIom/D5mVldS6IrDxgt2BQxvUadorp/WpP0Dp9G8lqvc0rX4DVxRN6vQIOiLb+njl6J
PEfSGntqNBUuO7PIGCpcn2W4ZjYFfsVvtQXo1L+pEvUpFox93cBXcKlItF+mjVx3jSUC0Q+2j0PW
P9MnbaDoBahyZzyA7TSWuaA58qHUUeEV0JocA0Oh+pIvTAg8JjNqeINQVloSMtJOe1m7qjQX1t/W
ZP0Dp9Denkomi1q81JKpoG+hNqZyatQBrxivCLqgH9Y6cQ4RR9A6+1ezSEGRtD0YcibmrHuAsO4U
4u3MaJ+qT9VBb0oYFiQpkYi/JZeXkFOQxjYCu4Vej1Q2Uvc4+jih9zMORPJRam6EG4TT309uD24T
Q+jZgoWcmlYEcRANOvbV9Yhc00YNEWNr0Ywa8RNRwkf16T9A6fQEgU8lC5oIKKKVIsahd6LgU3c1
LXKlFv60PoDQr3ocR5V5Y2LgFu2yjmW6JyBcUSS3cajvcqKJOicnb9VL1C8mW9GM0IjSxgV2iigo
otvGKMZplIr27TSq0dCWmlW0TgaXpujdGa1d5oG49tBwCgedDgGh9RjYeR2Mbk8Ey9yFrVA1xxkA
141ow8/FSr2jQqpqwsI1oAD+vyfoHT1iwFFyaYmlSuVCiLiWPtMag0AANLUzKlR9zH+pijwk6D0R
xe1uRr24BTC4/HIaW/d3jtVqNzXbVqKivHQW1OQAaXqOdHQGxV+4Vb0CAaaKhG1+24ZO2mNyDai3
2Kfu60Or2oyIqpIrgaHrR4DR4xoPSk/STYRse7yEPoehX7lutCY0CCP7Ok/QOnqvKBV6JtQ5jnQX
V1BEaWOhIA/KTujhJP8AVD6h4fY6Cvaj10PCXUU0wADsQeZVa7atwmpetDqvLgiPLW2vcLcIqSxX
RSCg6KaHWVTdoe9YY+wV0OntwD6NhcFb1GoDSJcKbipP0gUaY1A4sDf+zpP0Dp6kh5G1KpNCNasK
Itremai1eQilZmMoeRsfECeh3rTSUJgSDf8AqXt64o0OA1JIVJJJT7qK12CgNL8ANM1E3NIOZFl1
jUirVbgPTsNLyGh1kNxolCl6miSQC1kJtwDgGh6g8+Ikl0a49CQkarJQexElO62Bo12igtqDkUHN
KwP9mSfoHT1GW4WPnbUi9HloxpmoLejCWEMLLQRQeA0WY0qvRS9FLaOt6iUhf6qfTPGaka7VEeXB
2sSBwG1Na9RD7pP0ax8xw3oVfgtVqkOsf6jyZTY2BrtFA2Ky3bQUevCa9uH2Y2AJqM8/QkHLgNE1
er0NbckIDf2XJ+gdPomIpm5XJoJSR+g5toGIruFnkBJekUn0u77vrzwngOo9I0NDwipTZDpF0J9G
TppCOcv6dYv0cBNA6e/C6ijohsz9aQ31NK9Dg9uAcbEAWqOwDsQqN3LxsLHgNdhq3O9KLnsFipBM
TWWQihJc/wBkyfoHT6FyRoRcLHQQD0XHK2jAmhGTSwga3F+A0NClKf6t7CjQ4Pac86ApBYW9GbWI
cpumg6oLLreiL0KtxyfpOgp+lLQFEaEUrWAN9RxHikAJoGr9yoLLxstwSQQ2gooLAlS5Bq1R2B18
akhQP7Kk/QOn0BYCme9AXIQD02NDmKtQNjo7WCJbhuCTQOjC9A3HAzBR50oG4+kt6o9EaDT2r21N
SNdqj6+lLrGPtm66L1HAeIcDfpOo5xik66X5kUBV7GpJO2kdieA8bdNL0rGwJBL0rknhZAaMT0Im
pI7GnW4IpY7hVA/s+T9A6euae5ZYjQUD0y1WoV0onRD9rMAFF+FzagLEURQOh5cLgMSikQgj6Unh
P0Rr2o6HUU5spOkS+nJqn6Zf1aJ+peA8Q4D0kFjpEbqKXqOlEVbRqU8me799ypuNfajxMLHhHX0z
HQVwf7Pk/QOn0Fhf0L6k2oDQkirk12CmDAeUikBc8KijXTQ9KNDkadjXcwHfzDFiq2/o44jQ0HB7
TnQCkFl0txsbBjc0KHSQ/dovUcuA1bQcdqlGsJ5nqKU3GrU3SM12ESJHc9Ardw9J1uLGgdexRUos
VN1/tuT9A6fVmup1CgUWa4Nw0KkgADhOhFA2JYGgTpanNtDXjJKRhfVaRVpZA39B9uE1K12pBzHD
bhb9Oi9abm2ifq9tSPSFS9dIj9z/AKqjPC4pTY9TStSC3ETYDhKg0Y68bUVACkEMtxHyH9tyfoHT
6okCiSxC8rHQGmbRBYcYoimaw7b0oFiDoRaj1o0jaEgUDf0mNdopFAbhY2ruNBhqeEn1zoeJzZT1
qJaHpP8Ap0jH3UdU/UeAiraAXo8cw1U2L6IdCw1Ipuo1Q/d7cFgaK2oXJ4L6EXARhXsx+5XJ/tuT
9A6fUnp3Fiq2FdaIo0WqNOXoE2C91yhJCAUOWgq96YaN0AuveAGDOwFh6DOAA96Z6iBtwt1LAUqq
RxXryLf0xwDjmbnpHyHpOft0iHNumsYNxxqwuTxzDlqeaUnWnHNSSBRp+q6qefdQIPD7dACD6Nh/
bkn6B0+qsOC9SNSR2+kPOulAXodPQNqIBBU0kQHAWFdxNdxpi1LGxIFuGRmq8lLzpokNR8l+lka5
pevCSLA8F6YctIqc/bqnQCunC7WqO5LEhlII4BT9NU5oKXqOjUBQOjjhAJoGxo9PIVIYNV7UzXqx
JH9ySfoHT65nFIlvoQaItRajVqUC/oseANzpjar8KtfiZgB2hqY9oDMaUWHoF1BBB1Nd445DZW0j
HPgLEl7gr0160xsNIhyl6aoeTGgL1Yir6XplJaNe0yKSyAhtTXOiKdbHSI8vcUDyNA6Cmphz50po
UOjilNxRUGipUlr1a9KoUe/9xyfoHT62S9kW/r2q2vsTrzpenoMBY1er0NCt6K2HAvXgJsCvctGE
lgiinbtANxwt0aQljaoRy0kJsygVH+nhnbVBahq55RqQWXuK8her0dHOqdJTy1RrEG9KbUb0QRoB
erURahzq1dKvV9QKk6aJzPQ0h5VahoejilRSPEtdlgLmmHJDY6lQaCgaDr/ccn6B0+utb6MsTQRa
7RRS4pPRI5GidA1ird2jG/Co4SvdpaiaVgacdzcUzEUouTGbovaKLAU0qmixNRiy8MrXNIunver6
CrVYak0zUdRIoMp4BQBUd1d9XvqGoW1Ne+hq9OdUNmdSDSdBRoasKjPPQAAUwsVBv/dMn6B0/qDA
kKttZJAKXuYgADjPMUy3o8qJoDupRYN0VRYrRFtVHLgAtrOxqOQKsVyOE9CL0FApTargE1zemjFQ
opHC5srdairqTVqvzrloTV6JoHm51HVj/kk66g1e+nbRcV3XHSu+g4ryCkN6J0vV6uKPQm+oo80p
TQNXom1KdG5hTZ+C5/uuT9A6f1N27QqFiqhR6AoiulMbm16RO3Q8yRagaYCxoczwcgL6zGlW5AsO
AC5K317wKCN3UdEHFMdUNA1caXrvryUpuHax76LVfgA5+FyWU12mrVbUE0z3FAkUSeBWsQVI7jfv
okWrusOBW+2gaU1eiL0eVA3BFMKQ3X+7pP0Dp/UmYCucjAAD0hyqXkL1GBqGGhF6YkValFuHrVra
FwCxuYwPQZb0TagCaBJq/InRTz4PaRtQOXOr2ruFi19VciiSavXWu012GggoBa71B7vukls3cO1J
FZpCq0oVh9tygACg0VF+yu0mu012mrVarnW/HH0970tC2hFLTUwqM/2zf6aT9A6f1E8gedR2HqlW
ciIghQKtTdVF6DUeQN7gX47g0elzfrSLYcQNqbkACaCCk6nlR613clN9QLmU9oYnUNYBuRN9Liud
drV2Gggodly6qXktXeSqsxCghiBdV5yhAQFeOMIryxlmiWyLEwadSRApFEP3ykhICTUrkMCOxGJa
VwpXtKI6sX7VPYCAASVArsvXbXYa7TSC1N1pTS1egb1ajRFLyb+27Vb6KT9A6f1IqDXa1e3rsb1a
2hagL+g5tS/plAtGnpCwo8gouWNh91FCaX7QpB1FTtdtL1Y12k0IzRVQWZFLyBQZftV2KoWuF5t2
lmYCibKGPal+7tbuHSZGJiUgdj3mv2Q37pGbvDsY45GDTOQyN3qs1mlKqI3DjtVTIqmlZGRFVWlj
7mCnxJGyvOrFo7+Ne8POSBAxIYgFheiCKBBpTYg13WKsCDo3UG4/te1W+lk/QOn9hsOSjnR6Kt/R
IBq1Mbt6fOy9GruFfcaK0ABQ1b7j467FAV0sJQaWVu7ubucEsQpLEAFrDu+0FrILsUa7RGhGOxUV
QqhdAavRItRtQAFMitQAsI1BeMPSxhR+P90sfcsURUzhu4KzRRK3dMCHHcIoie6Z2DI5KJM95X7K
jfvrzhWcEssrClkVgRcfcCGB0BtV6BpusZ5f2Zer1er8VvqJP0Dp/YZsQBaibUTc+n2rparenyrt
FchXKmazGSxB0j6EgVKR2iwAIoMLBmpQS3a12hNzCpARQqqFoADS+tjVqvVga7Qa7aKmu01Zqsa5
0DTEARMDTsA3Kw7SWsKvRsa5UqqC6K1KoAWFQzxhwkYWjjsWZbLa2iyG6upHYDX3Cu4Grm16NrIe
f9l8656Wq310n6B0/sUgEBWDfQWq1W9OQ0RYxNdT08vaHPcSrWEbdohJCRgBUCmwvV+CxrtrtrkK
71FeRdRRftPlFeVa8iV3pXctXFWFdtBQKKA1bkEFFK7a7DXbXaasRRoX1RvubqyhqaErRBFc7JMQ
VkRwYwa+4UGB0Bsf7tk/QOn9uWq1Wq3DemUMEurFh2kmkAsbcNq7TXbVhXcooyLRlryMaLNV6vQ6
nqKHWT9XFc13NXewryNXmavM1CY15RXlFCRa71rvWgymvtrtpgAI17iYlNPGV0KhqeAgWIr2SZlo
SK1EA19woMDSG4/uyT9A6f3NJ1cN2xx9xK2q167a5Cu5BXlWjNRmajI1Ek1egCatSLYFRdktovVu
tKeb/q0VS1eOuw06WoqQIxdpIxbgsTR0VTR0ua7jXeag6U3QqDRBGjIppoKII0WRgUfuoior2/uy
T9A6f3PL+iFe1JaMhouxom+hvSi5aMAaKpJWwopzLVe9OwtSfqNd1J1brpEaKm6gjS3IIBXWmi5m
I0YzQQ3H20yghE5kiiAKKgjWDpTfpuRQerA0QRoQDTwA0QRSuVqJ1egAP6ler/T2/ob/AKB0/udl
7gBYSmjoENKliYwSI7MRcCMV2LQUCgvOgtWp1FqT9VOvaYzzPWlFyq2q1WGlqAq1Wq1W07bm1EG3
Yasa7u0awdKbpoDag9ciSuhANNApplZTEbp/dj/oHT+6DU3Sl6tzpVA1tVqtXbqKPUimFin6jRJZ
o0AJ0jHLoB1I1B5Gr1fUi2g6jRkuGWx0g6UelHQC4oOauDRSulMneVUKP7sk/R/dR1HqnqOtDqNR
9EdV4hxD+7j0/9oACAEDAAEFADI1/I9eR68r15XryvXlevK9eV68r15XryvXlevK9eV68r15XryP
XkevI9eR68r15XryvXlevK9eV68r15HryvXkevK9eR68j15HryPXkevI9eR68j15HryPXkevI9eR
68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15Hry
PXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9e
R68j15HryNXkavI1eRqEjXWxFhRFOzA+R68j15GryPXkevI9eR68j15HryPXkevI9eR68j15HryP
XkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevI9eR68j15HryPXkevK9eR
68j15HryPXlevI9eV68r15XryvXlevK9eR68r15XryPXlevK9eV68r15XryvXlevK9eV68r15Hry
vXlevK9eV68r15XryvXlevK9eV68r15XryvXlevK9eV68r15XryvXlevLJXlkryyV5XryyV5ZK8s
leWSvLJXlkryyV5pK8sleWSvNJXmkrzSV5pK80leaSvNJXmkrzSV5pK80leWSvLJXlkryyV5Xryv
XlevK9eV68r15XryvXlevI9eR68j15HryPXkevI9eR68j15HryPXkevI1eRq8j15HryNXkavI1d7
V3tXe1d7V3tXe1d7V3tXe1d7V3tXe1d7V3tXe1d7UrG56/3TC11Okq8j/wDiJerdaP8AdELWbRlu
HQj/APES/qPX+6VNipuKNSj7f/xCv6j1/uqFrijTUwsf/wAEjQ/Sr+o9f7qhPMUaapB/+ClF6VAK
mtf6Rf1N14x/caGxGhphcEeoQR/fgF6RLVI/aCb6KhNSL2n6Ff1N1/uoVGbgnQipBY+lY0qhkZe0
/wB8hSaRLUSBTt3GkS9AWE3X6Ff1N1/tQmliY00RUfSQm4tpapEvR9JB3IosJl5f3uBelSlWmcKG
ctoi3IAAqU3b6Ff1N+r0x/Y6ozDxvSRWOki2b6OGjQ0NOLH0YegqT9P96WOgUmlS1AUx7QzEmlW9
IttGNgTc/Qp+puv9JH9Ijj7jbhde4fRxtY6kVKvpRtZhUxst/wC8gL0qWoAUY1J7bUBQqVrmlSlG
nSpHufok/U3Xjt/ZKqWIFhxPGb2I+iRSTc0Ca66SGijA+gguzJ21MOR/u8A0ImNCGljUUBqde0sy
pahQosBTyE/SJ+puv9oRsAe4VcHgCiitdtFQaMCmjCwoqw9ZIyaAAq+gNdatTC4ZSp44v1s1zIws
xH9Wt/VLUEY0ITQgFCNRQUVarUeA0NSOYWrCpGufpE/U3XjH9lhiK72ryNQkahNSzigwNWq2lhRj
U0YRXhaipHoWvSIFHTS1Hhdbgix4oFomnN2/ucKTQhJoQihGBVtLVbhGl+OSQAfSp+pv1cVv7SVi
KSegQatVtTRFNGDRQjhAvSR21HKr1araddDUiX4gLkCwY2B/uYC9LEKCgVbS1Wq3qvKFrzNRkY/T
p+o/q/tpHK0kwNA1arUatVqIpo714TYqwoAmkS1dODpQ0tqKItUiAjghXnUp+3+5oksFFWq1W9d2
7QTc/UJ+o/q/tu9JKVpZFbgtwECu0ChVr6da6VbhtXMV7kU6kHQUgstTH+5oxdgKH0BYCmmUU8hb
6pP1N+r+3QeaSmgwNDiNWNXrr6V9GFMlColu2kpu39yxLyHq3pplFHIozMaJJ+sT9Tdf6Af6+GIp
JeetqtRFX5D0zqx7SyA1CtgRR5UTc/3IguQOXCBxE08yrTylqv8AXp+puv8AcIlYCOS5HACKIoH0
SeBluASKhNxUrfb/AHLEmg9EsBTzgU0rNV/6Cn6m6/0A/wBhChIwqOS9Xo3NBaNEUDxXonhFSpao
RZalP3f3IBcqLChoeEkCnmUBmJP9DT9Tdat9bf8AsYG1CU0rXF9DyojS+t6J4+oW6kMDTm7f3Fau
01GhBFAUbcUhIVpGP9GT9TdeK39vimaypKDQOnSieQ0J9JxcCmBB/t9YyaEIrxoKULcyICLW5UDx
XqU2T+jJ+puv9zD7lIK0rEUkoOnSiKv6YNEdpkF/64qFj4UqSMqf6BFHenlouTojWpl+7QaGl/To
Rep2Nv6Mn6j1/uaMiiRRS2iyMKVw2h9QgMDaj1/rUI+2p/0/XgVI3bHqvW19RqvQ65CE/wBHT9R6
/wBzCv1BWILIGHSkU3LgUGB9QUwFH+tAXpV7Vqf9H18adxeO44IjR0Gq/pNDSf8AQf6Mn6j1/udW
ILANQJUjtamPbRNK1j6knT+tQpdtJzz+uC3KKqiQkk9dUBAH6aGq9NZx9n9GT9Tdf7oX9RvVyCCG
DKRohvQ9OTp/WVQsVUKNHN2+sCk1ZRRkrvNCQmhEGp4mWlUsVjUHtFo2vVuBeCb/AF/0ZP1Hr/dI
sAwvQNKwNeFaCKtN6cp/qIBJZSCFJrxvQhc1+Oa/HNfj0IUFWA1kNk+oCk0VI0CmvtWmcngU8+4i
kkV6dWAjjOg5NwDTrXSpOaf0ZP1nr/dRqwauYPcauaRrHpw9OB3tTn+ow/qsDQAq1W1PTgnPL6aK
LvowJSIFDqCHjCAsTwiFzQx6MfafBzF7VanFe3GRcOvaf6Kn6z1/ur2oWaipGqm44gKVAQ6lS39R
gW+gPCehYCjKlLIraStc/SgXqNAq6EVKLpqATUcIGri6qbj1PaT9X9FT9Z6/3UNQb0y20U2NW4AK
HMhbCci7df6hj27DV6vXcK71ppwKaZjV9L13G30tqUWK5K006ARuHU0wuLW0VSxSMJwpyND1HFm/
osf6z1/su39AOqNzdL0kZueljqBQFy8oQ/kPRYsx6/1CIWRtD0ZSp+rC1cCixOnOsXuGjCpBZ0Qs
UQKOEj7qHqSfq/oqfrbr/ZQo/wBA9qAJKxVbTtrpRAr7aZxTMdQOZ/qAocgdZhdR0+oC1yALE6Kh
YpjgUFAodaYU8IYoiqDxNe3t6csgUH+jR/rPX+y71b69FJoRCgANLUBVqbqaZjodV6n+mgXoRMa8
NJFY8DjuqROz6cCrAUW1AqGPtHKiOY60dBR+iklC0zEnit/QI/1nr/Zd6v8AXJGWoAAVaulNKtNK
wrzNXkNd51Oo/pioxoRCgAKN6APERenFx9JarCieXAn6l6MKtVtSKPoc65EehJLaiSf6RH+s9f7p
SLW1M4UM5bQ8R6aD+lKpYrEBXboBQWmHGRepEKn6ECrWq/GDaopga7hXfQNXFE0TV+O3oXoG9Szf
0qP9Z6/3OASUQLpajYUZRRJOvtw+2g6f0mNbLRq1AalePrUkRX17UFFFh6QvUTEimmC0cilnB9Dv
UV7cRNgLmpZb/wBLj/Wev9yhSaEJoWWlINNIooysaJJNHUdPQ9v6RGLt7WtVqtwM3oyRX9ULV7UT
f0bUsZIsi15TTSMavqrstLORRnozPXmevM9F2OkUxWgwbhBBokASy939Mj/W3X+5I4qAFGnYGr8u
D20HF7ae39Ii6qeXATTN6ckYaufogGuQ9G1BDXYBRcCi5PrhWpbqRKh0Z1WpJiaDEF5Sw/pkf6z1
/uNGC0jM7E2Ej39IUeEa+31NqsfWQ/cvWr0WtXeaJJ9WZR3cVjQsKv6AUmuy1dyqDIToT6lr12Gg
FFdwFFjV6vXcf6jH+s9f7jjQNQCqJJAfUPoe3oKgsy29cC9QqO3xgDJRV9UGxW1XBo8NvStUp+7g
tXIVfjsTQj5WUUZKLE+paghrtAq6iu+iSav/AFaP9Z6/3Jf1fbgHpqlEWo867KKGipHqJfuj5MoN
ZQXt9VHFqF9berJ+rS1chRPGFrtFXAouTV/StXaa7AAe0V3UST/W4/1t1/vGNC9LCe+aPs4DoeHt
NdjUiV22oigKIq1WvXatdgrxiggAfQRk0FFFASEUBowaiQ3tal51IO6iOfqBDQ5V2mgtDl6nWiab
rVqvxWrlQrpRb0u0121yFd1En+vx/rbr/QgCx8QooR/ZuMbOVs04Hj1OhpADRjNCOgAKvagb0Dai
STqSLEVauWrAUFGt6B1NLcgnm4F3W9WoC5II4QCaII0T9UpIpP1Vy1vr0q1+MG9EC9+G1Wq4q5Ol
6PGATXbX213Cib1f+wo/1t1/oIBZo07dXUDWzW/siM2c1KvcoW9CMGhGBRArx82Q0qm9zV6FXWri
r1e1d9FjQc37hRIok6BiKDV1o3q5pUNHqBqelySb3ktcUy3AUglQQYxXjFFGoKaAorcFSKvSNcFW
FA3FraEcXXhPMOQo4AK5Cr6Wq9uK1dtWWu4Cu4/2PH+tuv8AQDUC2XVkBplIoi5pkU0ykf2KBQjo
xUpLAgV2WIFE6kiu4XIom1E30vRar0a9tL8PcaDUOeg68tWom4c2Mlu4imJFBxRHI3oChQtfloy3
oi1LQ5Dka5Vega6+lI/aL30tVtbGrWq9uACgpqwq4Fdx/suP9Z6/0Buiiy8DLcRxkNoQDTIR/Yao
CAgFDnonNWAtILNXbRFqABplrttRejxWvVtCKHEKFW0FA6g3VlFpet6NWtRYUWrvNI1+AiiBRHbX
caVrEG9Ea3q1C1WHAaYhQxubaHW1XtrY1agQK7jV/wCzo/1nr/QRIRXkFB1OluMqDTRkf1u1WoC9
FbClTlagaGiGgAVlGpq1MRRPInhtwAVarGgKtVqtVqAoG1Xo2uaGq3K2ujr9vXQ0TpagLUXruNBz
QarUTYlQaMbCh0vVtGcAiUigQ1N5AVL3PT26B3vwWqwFXq1WrlV/7Tj/AFnr/RA7CvI1eU0GBHEQ
DRjFGNh/U1W9HrVqtQVjQjoACmANKgGg50BXvQ6wi1MAQOAgGmFhrarVarUQKtpeuZocYOo1QnuW
jYhhox5mgL1yFE69K7qD0TQYHQ63qZTelNq816U91NQFSPfi5UTV/wC1o/1nr/SLkUJTQII4mQGv
GtMLH+njkLXoAVarDSxrnVjpzpSBVqNCr2KkBnP3EWNdNDROhGg4hVhXvaufBerirUBQoEaGuXcp
Pfcl7UVphQF6vajoBeiQK61bTnXIUr8N6eMERxdxRArsQQ9O1hau2rgVer/21H+s9f6UCRQkNB1P
oFQaZSCI3ohhw+9v6LGKIq1W0tVwKvV6Bq40vqtN0HR+dP8Aqo1Y0eAihpYa3q9d1Xq9Xq5q9X1B
okgdxsCaGvIh7kyG1dwNNRNGrUFonlQNCrUTqjV11Ao9wNr0LVcWfmWt3Fv7ej/Wev8ATQxFCSgw
PCKI56WFdorxrRjFBLV2i3ZeirD+hAXKiwtwE8F9BQGvv7Ift5skgoaGiKI1vV6FWq1FWrsNdprs
rsNdhrsrsrsNdhrtrnQFA0eZpdVJI7eT2KgczejpaiatVia7bVejRq9WoLag4oG9crVa9BaNqRA1
NCCHWzf2bb6GP9Z6/wBPFxSyUCDpbhvxEWNEA12CjGKt2/WiOgoFCr0Wq9Xq/EDV6ZrgUaHSOl5q
4+0Uaua7jXOjw3NBjQb0etdtdtWq1chQPNbae55MTyse0NzPKmBJPKiaterWonW1dt67AK7q5mgt
AVe1DS1HpEDT2Abr/b0f6z1/qAic0sLA8dtDVq503cRer1fRxeu0Guy31IBNKoHAT6YNChV6Wl/U
n6jYGibV7uhNG9rcqC3oo1FWqx0HTnQIFdwrlp2Gu00RaguljRDVYiiBXbSgir1Y0xpTcGQXlJLK
3IimU3CGmuKD1erGgNDRq1dKuaWuug0PSIWEoHaf7fi/WRz/AKh3tRY1CeXHz9DlRFAVYU7AEm/0
o69lBQKFXq9E+oBelNheudwefRgbNIv3N+qjQauRoqKsKBF6tVqAFCIUYwSyUqUVtQFA2Pdenida
CUsTM82KYx2qKhhDyZGOirftrHVPLmLGACCHIsCasKhxzI0+OFcoy01wVpgDXi52toXruNC+lqNq
vQpTV7gaHpF22luEdSP7eAJqGIKpCGjFyIt/UYf0/RCnjuOmtvo1JtROl/UtQrsubczSijRBau0d
rjnTVYGgtjYmvGaAvTxOleMkJE7NLitGpJFQIXfIxVVP0iNEMuSqCMk2R1WTJnjMZINJMUebLMi9
9CRgzzPJXcaLGrk6KvcWUAXa0UsiV3m8Jj780RGPx8kjdiRailMt67KCUeVEmr1fQWoCgKGh6QkU
/NSLhkt/a9q7TXY1dpqxq1Qx3NEG9XqFe6gYzSxRk+BDTQpRjQEg/wBJNRqVX6IVbuUgg6X9JVJL
IVJBGoUmvGK8YoIBVqPFb0AKtVrUKFciRavYNyvdXvfHg8tSIVksAcWNC+WiKSxvC6pLkzRtGbGl
lKPPlmRbmkZlMmRJIATRNXJrnVjVjVjVqtVqsKsNALUaNBu2mYtV+VqtRPO5ryVi5CKrlC4QyNNi
tGrR2AuC47gEvXbXOhXSlN67hSnT2ivf2YdpteuwXZP7TAJrsa0MCGF5e0/kUMhajKOC0YKDTlRi
RqOPSxslFGFANdVdQlmVo6kWw7QTNF2jsNWP9F90buH0YNqk668qvxqpJiRQJCO6S5FKgq1tCQKL
E10BPokG1WNdprtoLXIVzNWonRQaC0BzAFGxqOZo65ludC4o3NWNWrtrtNWq1cr2FWFchQbQmiaH
FzNdNCaJJNqXUtXaa51yFc66VFM0by5XkrrQwYxGY70IzdouZitQWmjoAAG1gedCksGNZQ5q1tDU
ij+zVQtRgkrwvS4zmhjLSxqtEUOQyEIOiOUI7ZDftLEE2q1c67mrvNXWvtIC2r7qVWkbJsJWYtQY
iu6u4V9tdq12V2miCKtQUmhG1FQPqzUf6PpCpt6FjrFUXSUHuopchbUaAopQo+iooi47dQNOQq/I
BrBCaxokMmTEire1C9KDTsLsLFeZtVuM0TzvXfRvXMUDVjXaa7K5jUmuwmrEUTVzY63Aq5NHkO4k
3qxNBRrarUa88vYGBGK8XbKULrAztLA8dEUbVyOi3oV0ZTcZKkpQYV1qQfb/AGFarV2mu012muw1
2V2irLSKha8a0XWjOgo5QoTyMYwQvSpJO2klWSpYippEJqLtB4b8NzSsVLKrExLTRsKVC1FbHVe4
0sTU6xgGwBNdhqx+qPSP9H0nt6QWk7g0ZFnUkAW1Iq1qvV/QANdtCr6X0AoCutRoWMuGFjbnXdTM
zEKTS8mfq7cz0Q89SRXcNb3rtFWBogW7TVrUOZ7KHKhzJSrUeVXJpavRvRtVqY6AUAatRtXZQAGl
6vXuDrYVarUCRUOQY2lyPIUTyNk4kap4iKI5r09yOUT09iJE7WvSNTi4II/qlqtVqsa7TXaa7DXZ
XaKstWWx7a7lruFd9d5Fd5ruNdxq50EMhHhkFFWGt6hgaQxxLGKYcpzzF6ibuDwAEtUEZGl/WPS3
MrerUkZaoVVKf7RI5q5NddAxruq4rlftog3+ivQBNeNqRSF+kc2X0r0rkUhcjuBZl7TwnW+tqAFX
oXNW0tQBoLytUeLI6kFQrmz5cjJQWgANG5FxdetHmFq/INcAFq7FFMosDar3oCwC09WBq9HothpY
0q0TYE0OdXAqxNDlTNQFGm5kDmoq9BSaIAok3vVqA0ItRNXoGhoKtVqtVyC08jUGUUzQeEIopMeR
1tRHaVuRl9KFKbh1uP6NarVau012mu01212iu0VZastfbXctd9d9dxrvNdxq9X1vV/Sw4vIXNjai
BZsdTTxlSq9zLZVvV6kbkYg1KOyjI1BbqkPa3SjXv6oQCFSStACgOSqLBu4NECHQrQ471er1yqwr
tqx4ybUI+RjNeI0EWuxasBrfQVz+hUc5CCfTT9SGu0NUkfIHhOtqAq4rrpehRIrrQFLY0RzFjUWW
qo8hdrXIWgOB+hb7elFqFA3HaBQNBVK2uCgsVsBSGrUyqNGar0DQPOncUedKml65mgNGPJSDXaa7
a6V3aXogUKtR5mxorVqtXOrGhw2orVq7mqHMCRF2ZpLFrlankUqdFJFKb1KoH1djVq7TVqtVhVhX
Kririu4V3V3mu813Grmjrf6a1W0weSOoJ7aKXoqRTxhhHCE1NGr6ADQ2AjPezLzsPWlPbjL0NCh0
pj9iNzmjDKwIq/qXq9XFfbVhXaa7WpFvJ6QJ0vV6uK5mvuq+l/Q5AGQUT6QF6IsVQAC9wTTAGjYN
ajer6GhVtLVahQWhajQUmu21AAVhsiNmSRmQmrXoLVtSbVcmrNTcxZiXB0HRa9g4A7hbuBoAUeVA
AUoto9HkLgliKBvQ606dwjjsG5BjyocxRNFqWxF7CiK5Upr/AONqVbk0w0NKLAih0IIoj0b2q5pV
IrmBkQMEI0FA2LqGUgj1LVarVarVbg5Ver1ermrn1raWqxqxqxrtNdjV2tXa1dprtNdhrtNdtdgr
trsFdortFWFALVlr7K+2vto9lErXcKDC8GR2kMGAIICCu2iooirUaPANHIuFKVej6orL5RWo0BQo
U5tEo+6Y9tS29cnksYt2LqQKHpKARRJFFmrnQY0JDQkBN1NfaT2EVc0Ca+2rCgKJUU793qRJRS7X
oClNqBuJRY6dtdp0A1IqwoaeOuw1BHeXIhQRsATegCa7RQI4eXdeiCKahapFuCDdQQBQoigL0RyS
l5iy30Y3Lmgl67TXQg3ruotegxFMwII0uKBJq1EgFTYswtejfQCgLDsXRjaiaJrmaHTtpUprWPS9
WvXaeAm9E1agaLCwYWlyPLHLF2kjQUoooCWTtq2lqtVqPHarDWwqwqwqwrlXKuWnKuVXFXFXFXFc
quKuK7hXeK7qvXdXca7jXea7zXea7zXca7jXcauavV6uavw3q/qRTshGSDS5JJE1xHIGJUUdEALK
OdqtVqvRUGrg1yoj1V65vOUVakTnauy1SfpQBB29xmb7/XjblqaHHcV3Crii9iHIPkau8130CtWU
0ABXbalitT3NIpuAK7BTMvdIbD1ES9dAK53oUtSA2FCibUW0tXOhQFWq1DlS4oZHU2DEU8jvQUmg
o0JodQrGuxquwruoAmiPtLsQzCw0PcD2KaI7SuhNc6Sl6M16vzuTRNqNKaZhoTag/K/IXNWpiKsb
WtQOilTSMKci9c6HVbXLk0GNd4om9M9BhQIuKUWB5AmutdnNVoixNCib1fQC9BbUQCLkG9qWSBoZ
ImQ0orv53vUgFokDM8ah3gDB42Q/00REjw0YGoxPXY1dpq1W1tVqtVqtVq7TXaasa7TUeL3BYAtG
BxXaQIBYs5OpBuOVDnVqtRFA2JXQjl6kK90mQ/dkaLzpLCpSSx5HqJpBGnv64JBRgw1PKvIK7zfy
Gi5Ndxq51vXca7q5VarVbS9d5pnY1F91dldhq3IBFprk+iq3oKTSxigK7atR1U8nFAWIo172I1Io
A0CaVAGDm65UircmgpNAAUxtQc0WOgUUi91eI6AcxQNEk6DSS4Kcw6ilNHp3VYmlNc6N6vzvROt7
8Ia1FidAVswvQHM9FC0pFiLlVvRFqGhNd1E0XFKKK0osy/qphcdrV0q9A0ToTTNau6hzIsKuK7hT
dpF6R+1nKyBo7U3Ic6XpSxqKsKViGyY7p/QLVY1arGrGu012muxq8bV42oRGhEa7DSgdskVwCFoS
WpZUNOSB5IzX+I08YUdwrvrvrvruNdxruNu413G1zV6iyGjC5gr8mMhmSkSwsdL2oOpoulK4pGDU
RVqZb0tFBRFhbS3pYvKRPuYirUhtSkWK2JcivyCKdyx+hh6as3L1762qNgK71ouoozCu41c13CuR
rtvRB4kHIadNL0K9x1X9LG1MAre5F6Ycla9XoAmvGwFgtRyKhmKM57jQINXAom1d4om9BRQANdlA
EVF+uiLGxuoJo/pDWoMKvQN6k/SjMSyggUTQvSo1cwe+iwq9E1fRjXTQ6jQtUZvRsAKNKqgG1lHI
3Wu+9XovRY13GixIoNyLXoUCa7zSvemPJjTG9XNwDRvXOr3oIaCmudC5qxr7qYEi5pWsciZHWSMg
gUedWFXq9dS/6SrV2tXaa7TXaasasasa7TXaa7TXaa7TXYa7K8ZrtFdldldgrtFdortWrLVlqyV9
tfbX2Vda7lruWu8V5K8leSvIa8jV3mu813tXe1FjVzV6TmgmIq8cgeJlrnUcxWisb14zc3Cerer0
mUyhMpCBNGaaWMCWTuNzUUpQxtau69Xq9Ma77UX5nIpAHQOh1+yggNFOC1KSoC9ugFHtusgFfkrZ
nJP0YYqVa4o8l+lvqRwXtXdQauRogUVNKOYFWoi1Gr6X0FLyXqJDq3KhalW9J2088PjvcCgtAAUV
BoparMKFBTftuFUqTzoWqMroyhqEK1YAEWLLahyq9A2q9x3EG1wR2mihszsAZOfXQ1ehVxVr0E5s
pFDVRcyKRpEL0RyogEoopwLAMKkYkjrRNE8ANA6XoGgbFmomlq1E6C9hFYhaVTY9ABVhVqtTKTV6
VrErjvEVIJvRoHSMXOQ1kvVzXca7jXea767zXea7jXea7q7q767qvVxVxXKrirrX21ZasKsK7RVh
XaK7a7DXaa7TXYa7TVjVjVqtpzq3CBcyHtSr0kzLVo5KaNlMS3IUCmUESQkfQXq/AQbQSLLF+kmR
bliKZ6ecAI5ZyVuZB+OIwR4GopMtCRxSkFA9wOlxVlqwoi+hWxZwqoxOl/pkPab0/Nfrzpzpb3FX
Nd1X4b0h5e0l72oUVJrEjj7cgIrmuZq3ATQFWJHaCLFa7u2mcMBe/wBwANKwIppeYlandSpFwtu5
kuxU0lSHtKHuDrYikcWmYEGkJop9rddALAcqDKFL3oNqDamYnSMEC5OgXupVFNa5fkTelW9MQFvV
9QKAq1c650L13mmYmgSSBoTQVgRchO4UGsacGg1fdVzV9HW9AA0h7SMZpY+zQURyiHPMb7eWvKri
riuWvLS9XFcquNeVcq5VyrlpyrlVhVqsKsK5aX1F6vV6vVxVxX2UAlf467UpEU1J2s3aldqV2LQj
FJchABwSQqaMYFdq1ZK+yvsr7K+yvsr7K+2rrV1q6VdKulXSrx0Wiq8ddy0kygnJF2dDTZDGi7HS
L/Yet6BIoSuKGQ9eYEd5tGqmlNwBegt6NhQK1LZT5UqSQvQH1Km1M3L+hRg3AFECraW0BokUDzFE
9oexqJQ7S4yKjC5DGrUBbS1W0vTUaHQqDSoBUnNVA7E7LM3NZDSkV3sauK7iKB5B1q/PzCkBZpV7
S/6Y3uXXkKW15lHaajPIvcMatyFexo0BoLnU0ouTyFGlWxVRR6kfbQPJzXWjqDagRoNeVEgKALDT
tLAXroxINNauuirzMRAPQmgwoc6dL0DcRZDoPFJLTKyMKPSKsy/d/Sr1zpLolzper6QE9wGtqIpi
FBljsSpJ7KPoX0vV/Ti/WevD3G0b2I7SGkjQLk2LMSQ1jI5cg1cVy05f1qNb1ageXCBXZVqub+S9
faRcpTTSOt6DUDyoUatXaKvagCaHW5FA3oVMpUNZVQEkRm3bXbQBB7aKimvQ5UxsA5vHKbN3NVuQ
cAkXHQilANSJ2sKJq3OuVXoc6ItSJ3UydtBtTSctT0VSK7bCvY9aIo8qNW16UOegW9MSCDzjAYta
9ciVUimAt3XJ50q14z22N0jtUrcyDTDnzodKdKBvWNkGOpf8zdD7RnllC6/0uKxfINl4I4S1CNVP
BeppL/VR/rPXjDtRJJ4L6XrrXLQVbQnTlwc+G4/pSrYWo8IF6ApjoeZVAKtzaMUVIq9cjQJFXvV6
HM3oiw6UOdEUzBR+QbR5IJnkUlyCEFBiKLcweWsg0cGxqJrM0gIprKynuDAhho1mHSjqRV6Bq9zy
Ve8Edoq1Wo0BqbmlBselMTVuelqtodDSaB7Bjcxx3oxWPbR5V20q12ORZhVmFI9qEwryrRnQU0qt
VxVrkirkUCTo6UDesWZUOSVdxUf6p/8AX/SLawKS0zXfWJe5hqNXawJufoLejH+s9fpb6mx9Ow1F
6563q4+vVBQGhOg0AvQFqJrrSqWM0Xicm5S1itE6WGgJFdwOl+VxV+d6VitSqHTRR3FhYdxFCbmz
3qNzV9TTraklAVxek/UQAtOKRgakBoamrchpaiKNdaFzVjRJoc6UUeo1PddQSWFtD0A5UelXq+tr
nQ0iEnpRNE2oWIQA19gAlsQ3Or3pTYmSi4NEXCLc9orxijGKCmrNVnoowNyax5kVsh0Ld4DSFWjI
NWP9Ki+1CbnXHUW4Samb6W3FH+s9fr7cHPW44rCrDXnXOudXOlxV6v8ARxkkexNHQV1ocqvR0W4M
kju6k0GtRYUWWiRVxRY3B5ULGu01zoECibhaQ2qSMEkEUDame+hFCohc3oHgde0hqQC7HlRFwrKC
wJUUNGHJKYWOpBNe6dQLUwF6HQihqCbqGpr3o10WjRFW5dtEaDQ0ovXQad33AKSnbZ35rzJosaBr
vNXJopzVr0BQjHaFVac89XcKC5uSaOtqYEVer1cVcVcVyqwqwqwq1WrtrtFdtdtdprtNWNWq30qi
5mPbHqBekXtWhyJrroakN2+qj/Wev9IsKsKtVtOdc9L1er8NhVhVhVq51zrnV9L1ccYBNKhNILUd
DoOg5VeiaFRxdwkFiguQLUdTY12g0y87kUCbUGo1erCu00GNBgKkCsNBqrdpaQNSsRQloSXq9NZh
a1L1BJ1ParA3BBBGhpQbuNVW9NyPYbA2oFquToOh1PRSbjuAJuaFOeWnXQ0RagNAL1bmBoaY0OdK
ABMe2lUkiMKCeffSnRTRNItIgFO3aC1G9C5qxGkrXOlqArtA0tTjtbivV6uavV6Bq4q4q4rlpY1z
rnXOr1eririuVfbXKuVECrVarVau29dprtNdppVuQoAkdRX+E14kNfjmlhIa/Bar0/Q9for+hH+s
9f6pYVYVarVY6c650avpcVeriuRqw0tVq50q20FE34AKIo0KUWHeQGqO1r6XoCrCuYogGigFIFYO
AtdaDURXSlvTJbRhcFLCivaDqv2mwNGheleg4p7aIbjRwKUghwbjUU+nO63v4+bG1Bbnu1W9HVjy
Vr0SwGhNqY30vQGl9LUBejyoCrGiDVjQvSAiiDeS5YAiixFXvQ5kXq5NA0L0AaDyEtG5oq1Wau1g
D3VdrP11HBIbv9Her13Gu6u6u4V9tWWrCu2u00QeG9XruNK5B8twZI2poTViKWZhSsGFWq1W0Ip5
AoP1cf6z1/sQISFXuLJ2mMAm1W4VW9Wo8q5mo4m7skFFJJpr1GtlPUGvcXq+gq1G4CnkwN6DUaAY
UwYhibhjUahh2KDIdbaA2JYEdpt1oNarKRQJFLLeu8UxUhCq09+0ajoxOhoGixq5NCu29dDS0dXN
qEhB8lxegRTEV1q1WGhNDVVFFRVq51zqzGluavai9CiaINIL0/bf2CCrCgKFxSsQWmvXdcmQLRmY
13Grmib8ZNvr7mu413VcVyohasK7K7TVqsdFdloSqwMN6R2jbzxmlkRqLkAs5prijKoqR+4/Vx/r
PX+w0XuJXsVBcFA1KnaTROltAKGh50kbmmJ7ixNAVar1y0vQr2rpV9GBruoodAaR+2jICsiEV0pJ
LUWosb6A1eraKwsVGgJFE3q9RH7q7RVgCSCFPMVehTX0NWoA0RQNA11IrpwP1HIi9WNc6ZjfgJ5j
rQFL00NEE0gYAi4Y0edWo2o9bkUATQVquSBer0GUV5FrkatYdSAKsOEaXq9GmWxv/Rbmu413V3Cv
tr7aU2q6srQqRGhUtkG/5Jszs310f6z1/sOG1SDkvLUiragasaVSagniWN2u4FGjehzoWoirc70K
N6uaFGu6wsCQwYsgFA3oGlNqZ+4PEy1aidBR4Re965VbkDRtQNj5FsGY0y2pZFBLAka9Qw5kXA1t
qKBuL6mxIWh0vV7DhIoDQDmelGugCkEV0FiaPIGjdqANOvbQQhXsAvKuppjyoG1MwsDYA6n0GWuw
1arVzrn/AEjuNg5FF2I+uvV6j/Wev9hxNahYhlCkG+ntqp0JoKxJjKKTQF9L6DowNJZlKkaCwo8q
toRcdnLs5cwUa9OBXWlauRruIqRVsfQFE6BqPW+imxWpHFdKU0NQeR0vy4g3Mar2koQAxFzRa54b
UKUV04GN6QG5FjRIAJvR6XocizE0CbAm9DtCkklYyVI4DXcRQa/o2o8qZA1WvVhparGrDht/Ycf6
z1/sMGxiYkEAixBr2OnK9rUSTSiu4Kzyl6A0auZoCu3RDZqIBpgBV9DStYtJSsO23cWUikJ7WUnR
Wq4qVxoKPolb0Qa7rVcaKbjtDErzVDYaWp3I9KxBGh6KFoMtjTm1DnQ4vdavoaY2HWlJFA3pjar3
0JokUtzTpYjpaiaJvSR3q5AZb1a3ARzq/pWp/wBQFWOlzVzVxVhVqtreuWtv67H+s9f7ERiCripF
vQNwKtRFLRq1YsMbJP2iQDVjSm1G4JtQIq1K9q7hTG5NA1yq96br3EVFKLXuXQ13imPMsToeMcA1
Iqwq1KbKOjJcr0Y/cNG6i9tbVajRau80OYGjdFCimItTWoNQ4bCrUpJNjVzV6bnSrQqwsRVzXdRa
rUGsCedjXOgKAUVcUTej0tcMhUcA9OQ/dV6udLjSwqxoXomrirCu2iDVuC9XGlqtVj/TBAwHgr8e
jjmjA4oqw4I/1nr/AGLEbgMDVxf3NXojQQyKve4HUk2AvXUkCgKsCbclXmSRXca7q7qJ1BtXj7qe
LtoXBQ9yVImlh6o4gTcXs4uI+QkNyNCOZY0GNA8DXFc6SMklbG2jUgUUxFqK3BBoHS2txRFwgNXN
XokUFFyFsFBpgBoCTVja4omgTVr1YABSaAtRsKtQ5lAaAChypDLb1mJA68ZrnXdQariuVWFFTVjV
6NWq1Wqxq2l6vV9LCrV2mrVb+hE1jQ0Tc8JRDRgU02O1JGwc9f7FhYhj1fuuenUe9R8mlykMfM0e
WncBSsO1iLjtFGRaPMG9A3pVBDLaraGhoGIokmiKjcA9wu/6SavrarcA4BqeCM6EXCCxb9I1aLur
xGliNOtqBvQFAA12LXIVfVhc2sNEpoga8QFEC9EWBbRBYAa8jQFFmoOaJo0psEJsasTQ6doACc+2
rmjyok3AuzOEouWIaxaTn9pFGh6TSk/QXIruNdwoFasK7asatRFGrVbhuavVxXKrCu2u0/WwxmRp
GHeRb0j1/sQC5jU0KlApensRzijZzLH2tzvYgd1CmFwhoqSSCKIvSCwPMWsQ1qY3rutQa9XGhN6U
8u4XonnV+O3GOK/MWFA6FfuFe+ikCrjRhQXQGw7xRJPAVBKWFORpcila9Oa7aFqUcjCprxotWoaE
8+0GulElja2lq5muYq3Jbiga7gauKJo8qsCFQmpH7VVGeivbRFe5FcxQN6t6MrerfQcFtb1egTQe
u4V9hrtFdhqxGtqtVuHuNd1XFcqsK7a7TVvpLFiAI4UVYwTz9E9f7DvSA9wvS3NSLcKtBRcrUczR
1JI0jAVegtchp0bofZiQQ3MkUTRvRvQUmgtqCgUKtdiLUVUqL2broEY0IDRi7a7VNNGRVjautEcA
1PALk9n220cXpP0v+rg7jRNAmr87niK3K2u1r6k3q/IcyHFGSixOvQKaB5SHmrc9UJGgFdgoha7K
NzV7UDSKtjIBXUg21C0RRFW05cbGw+ivV9birVbS9Xq+gZq7zXcpqyGuwV2MKIIrlVqsKtVqtwBj
Xca7quK5VYUsLGhjrTY4ponFFSKtVuD3aEoAimmjYaInjEIFr+mev9avrY1arVyqwvpEOVL0fkqU
FJIaIR+6jTtq1qtyCmpORVixs1cqAFAi1xRo0NL0TRDUb0OdWFAUyi4j5DlV9CArSOSFPK1FTVqA
FEaDh614mNRQdtSD7b6MLiIWqT9Po3q9Xq9CrEsoBprA8FqtxMTQuatapEJQdRoOVDpSDloTeiQK
tQS1OQTbTnV6JoG9da8ZNFbVarUK8bURY2q2jsWb6a9Xoa2q1EVzq/B3NXfXdV65GrCgt6KtRU1a
itdtWq3AkpWknBrypQdTV1rtWuxaKIaaBDUkJURqe/LZme9BiKVAzGxochpLL2FJlagQeM9f6lca
XNc6F6tVqsPTiaxpekn6YkLVJA6I1E0GoNQNM1A0DcS2tGLrc3uDVrVYUdDwgk6Ac7c6PVXtRcUb
UQ1BL06WpeVdKFFL1zFWo3q44YV5o3cCbUzXojVQe48xper1er1ermufCtANdQLN291c6uavV+Lo
Be6XFX58iHiXhLUt+32JvTNalPNFABuQeXDbnarEUrkU7Xq1WoobK7LRN2QV2XosBR5/WXq9Cvej
rer1caC9Xq4oVdq7yKBFCFWAXmIbq8JWiorsrxvRBFWq+iuwpJgw0mIEcD91SL3jtQAkXjN+BmCh
mLNQJFCVhUEhkPaTwHr/AEu+nOrVarD6VDdUlN3saikMTT5QdSwq9cqtVzXdVwaFhT2tGTVmJ7as
QSaYiutWNFaYUDotq91INHkaa9wpApWo6HnVirXJY8iDajY0UNEGrCrCudcqRKtaka1MSeFgbqeT
/qq4q4051zq1Wq1W4F7rrazWvarVY1zq9XGl6vQo3pQTXMClcAOSR28pAVBe4Q3pUu1E0TarEkR0
SRRckXotwi9CrKAaFEUQKKCiKBtQDMZ4Aq8Vx9Ver1yq1XrlVqtperiuVWrmKvUHNRGO4g1O5Fd5
pXF1swKU8INdgrsFLESUiVdckmliCRxOGoxq1eJRUQ7U1yDQFdpo0iGVkQAM1hpej19MC5Iq1MLN
9NbS1cvq/e1mHRHClHLPcmjEKsNOVGu+u6uVchReoaY8y1KTdrmjcUGIJkVlo86tp786HW/M9D1J
FewNjcEG9CiL0twxFxzFA1emN6Aoxmg1gq3J5UXJoDgvVzRNRk2l62FW9RVN+wEW47VarUKHdcEW
aw0vQIsq3pkBDQKKC2NE3omrXARbG4om9E1YmrCw4FNFhXberGr0TXea60RQHOAxh5yjEix4raW+
rvQNBb0BalQFfAtfjivx68DUIWpYFpVAoVI4UAM58aUFWhcUZSKBDVJEDRDgx3tqALHoAAaHXuN+
VBb0ayb0EUY92uTUKBQR2odWf7z107TXaaELEFCvEpIIUElLNLfy29Wxq1ctOVXq/wBbcVfQigbK
45HkL3OPck8mlcBUXuUqgola+412G5WlFdpFXJpRVrVcGl5V3JZo+4tGVNzQa1cjparVfkotRIJY
WF+dh2l+VXtRc0spoMCGFwpCh1voTQW9BbUBVhXbVtb1fXtojlGTeQiwPFehc8J6Iooiw1Jq/O1W
q3AncK60WudQ5ryNRN6NXvRNAG4stFwKJvRFBbUTpbQFRRa5QADuFdwFFqvRIrrVqNC5PjJHcERu
Z+ntVvStXaaVaCkgKaUWFXq9C1AVbTuNZDN3xyG9BeRFEWolleQmglqIq2t6vr7ngk7bREFpJbOj
s1KACSe3RzYIbyHrUQvVqFMKAvRx70YXFdpooRSqDQUdqX7W5063lbrpblarVyty09r8v6JbS4q9
XNc6UhhIPtW98GdYzkSh5WJNKDYR12qKLVY2ZaVRf3POigqzCrjS9K1qNzRVDTRC/bYgC5Q0RRFC
lNi3OlWuVWq1AUKK805AGmW9d4v3rbkaBq+t9L1zq4qxNW4LcwoNWsRpeltYuSSaRyKZgavQo0xt
SAU5Nied+ZewB5A82AtfgAuQpAY2o0Dw3rrRNqtcW7QzFtBoSTre1XoKTQPaeZo1ar1YkCI0FtTk
AXr2Mqqks3fpar2q+nP07elfW1W4I1BYvd+2rClHK3BarUL0CaFjQFZQPcCAVdTSMDQSit6ZbUTc
qxFcmoqaI9OQ1EQzSRL3wk3ZbV70LWlv2R/rPWo2setAVzBagwq6mmUUQpUR9rOOSIbW7mk+030v
/Qeyu2iLaXq5rnVq5V3Cr1zqxq1chVxV6JNJzW1F7Ni4zyI36n6hjazGiopSBXWrHQEaA8169q12
m97GgTQarirCnUUGIoi9FSNAOdrURQFtSKApjUZ59tqL0edLGLWKmr0DV6vRNc67eNgKjIKyDmKa
jRY24L0NLm61Ied6vVzV+QaiToBRo8qUKaFgTaiKtV6B0vp0rmCq2Bku2gNE6AVyFdprttRa5AA0
tcBDcILuOQokUx5jmY0LNkSdzcV6HP07+teuVcqSM3ZR3CgKXqVoqa7a7K7augoyxCvyI6GRHX5M
dTTd57hVxUcrR0Ms3GRGakJOoJFBwaIWj6UAvo57igCsxptLVN+iP/YeqL3KyEKjsB3igb0Sa7jX
ctwVp2sSC1BSzM3aqDxo5uQL/WkgUOdWOsYudCt65A1zqxrtqwq4q9XNXOlqsbdo0jPO4sw+6CZ0
gNybf5NSopepuSTY3JoGw7LgRclTuorZiBRQ13c71c13DTsq1qIvTLagCKvoRwWpqhQlnVmJFioo
3ViL10JrnQtViaAHDZqvV6BpukZ5yC4FNrehpagKXRQbqLU5ueC1A2pdOlAMCAbFuV6vRGnSga66
HnUYvU11RALcAFAVYCi1c6YCgDQQ2+0KGFrmrk0ac2FAcpWEcfcCSKtzvV9OenPS+t9OdD0reiBS
Xo0Ne9qDmpcllJyJDRdjpfW/D3GojdieVWPpE2BHD7UKPQaZJskf+wC1RcyZCKudI2Iq99LaGudA
m/caMleJHp4itEfS2NdpqxGt6+40AoNzXcbD7q7eStY6XtR+42NXok1zq1WNWqw4Lir1zqPkw6ML
k48awUgvJV65mrCrcxelQODARTJ2kiwMhsJitCbmfuLE2Pbbsar8+VXtQNA0TY3FWBpltodBQFG1
KimltaS9BdLXpSaccutAcFqsTTdbkUkxpmuaB0Fwx5gUdLVaiDV9BoeiBaPIMLntorwAXoCwr9VI
pNMbHncNYE3oAirA0VtRFCmNqC3KAVJ2kWtqBVhXKi1czXKhQTlyFFia8Zta2nPR2uVFyn6sycyu
RQJFA0FDV47DtNc6voavXIVzq/q3q9Wq1W4EW5JVa8hoN3UNDp1rIS6+orFTDIxbS9XrlXKrCrV2
mrVarUFJWwFE0KtQFDTrS6ZR+2P9duUa9jMiluxaEQoKBper6nULegLUy98TL9tjbhYkAKjDsUhl
I4bUFNdldgrtFWoLQUUVpwoPjvXYACSrEWOliCx5kGkJII0GjrVhw3GtqsK7TZAKEdDoEBLh1X2g
60RSGlYCrKw8YoFVDSG5Y1e9G1MteNqHctIVYECiDRFFKvY0Gq4NAWBosCO/W4oc6uKBFlcgsb0B
R0e9A3pgQb89BRIpXUIbg91DUUtMBdGurizaroeRWgK9mvSCn6EanpQWgLV1o3NAVYKCb6EUBamY
doIOhter80FgX5m5puVXosKWulE1yq9wFuR2qWvYGr3pAbMguTYUzWFLcBpIUgPOrVfS5BNyvce3
lRFWPBarVaudcvRtokRavCorxLRi5cqtSRMSYyosb1H+kamhyogEEdp9QEgrMpHkSu5aMyCjkLX5
FNOxrHfvS/OWO1BK7RRo1arVau2uhFdKIsWcCppO8x/r4AeIHS1AaWqE83H3NYLxQmxvXWrCigNC
IV2AVarVarVagtdujNQFGr2prNVr12V2jQEacq5URoKPQ2q5r7qANWGhIq9c6Ve4hSqhQNLVAQJM
qaN0bksA+29M1BqBNAsKvegyCma5Y6jlRkYjnXOu80DeuRojmedMhBDChQautBadDQuASaAJqGOw
aPmLCjY0FonQkCgKt2t+oMO3QHQ1e1E3q1WIoGgTQoU3SK9SDkNDS9aZeYoV78yVp6vq3RVq2hIF
KlqPRjc1cUKPQCgLUTfQcwFNu4WHIqhNTobKDdY+ZIFdaLUoJKKBTPVySSToDXkNi5IY8wKkNyBc
wxh3yWHfrar0GNuel6vXKrVzq40voR6QNRj7Tz1kjBqMfcBZuw1LF3UsRuoAq2oGl6yF5/SIxVly
vu8iyCiKtR6lgKEiX5UTRoyKKbIWmmY0STpH+vhHGKtVqtSDtM0i9xNzxEUsgagavQqxpiK7lruF
d613rRlWjMaErXuabQiiulraHTpV9b6AVbkdLir1zq3BH+qW1lNxRqIEvKoWnP2x9wUqTTLYih3W
7zfuruY03dV6vRPF23H3CgbjlTterEUrEm4rmKDUedMtKopFSiwFNIKJvQFHQmgNDQJFEXoqRoDT
USToqi1do1A0UgMwuBoRQGtqFHlSWJHIMbUWue6r0NelAUOVPYG5JINhQFheuQo86JtViKC0WC1e
gKViKZ70LCib6MTQjoN200lzel56AURVjRHICmNhSjlkK0URq1XtwWtV666X0vRq1WIq+t661b0I
JRXbcdpsaNKgBI5B2FF2NXvXKheiLVY0dCakS8YN/pkdlIy5BX5j1+W9GdzTMTV6DtXea8jWPDH+
vhFWq1Wq2qJcV7SuI6eZn9I9QQR0DSWoljV6uKvXO/PS1A2IHIirkadaK2ojkvQ1aiNQKtVqFHo1
WoADjHIrdlsVIJNG9ILU3WSgLAmr3oCx5klOfaaBIruSzWoMa7ri9DnwA2oSUXvSqGooAOwELGAQ
L0QRQN6ua7qFhRoUavV6NE0ABqOVE8lYCiAQRY11oqKZKHKgTfQ0NG6jmGFm1vV9TSA0xsDzo8za
lF6UW06UAGFhfmKY8wKY0otQN6J0vcrS2BvUnJhQomi2hNBb12qKduZvauQAagDS3NCM0U5EVapG
uQLlIy1TzM7a2Fc6vraudA6e1q6Ve9Wq1c6vwcqtwjlUE1wXWx0FA0wsdDQ4CaHKpJB2/wBFj/Xw
jr21arVaraOxRPK5osTRPHbS3KrUhILvcaHnxhST7exFC60DqRzq1WojnQ4JCRx3FXrnVjSSFa5M
qmxrFaNRMQZW5t3XrtY0FAKoGPYBX2tXiFOoBarV21arUul69tbmka9Wok0BRuARyuRpegavV6sK
vV+6umlyatXSrkVyNBiDyZSO3QVarUdRV6FN0hNSDVr3CtQPPQ2JUWpzzJrx12i4AGoDGgL1flVh
egvO3K9chRJIAuf013gklQXF1U3q9czoTQFd6he4kyWokmgaBvVqUUooyEV3k6MbA0vINM0Sa2q+
trVfWw4TaraW0twg6Wq2sV7cBH2aihobjSWS39Hj/XwimdVUSA0BR1mk7uK1AcrVauXD78YFBCaE
dAAcBGlmohq51Zqs1fdRvoBper0/XS4051bihfnIvO4skTujHmpHdV6IvVyK7yaEgFCRiDzonlpf
Q8je9DmSortq3LQV3kUzMR5GFCS49u0EFStA1ar1er8rX1NAkV3Gjz1FcxXWmUroDVqYW0HU8gDc
20UkFgCoom1LzN6ZTdSavS3uRemYk2tV+QP3acyVAsSKJ4OQrmauK91TkzAAktQBoLcUBagNCaRO
TsDRuaItR50a7DQW1Kt6HKu++liDe9SNcgXMfJsufzScNqvbg51fSw0BvpfiIrnwX1sKxwDRiFGO
uw1apDwGhoekrWQm/wDR4/18FqAtRUyMiBdXlAoyM9Ljk02Ma7TXZXaBXKr1er8uAcz2UUIrtNWN
W1CE0I6CgcYtXiBrxV4xXYtWWvsq6UxuwK0yDS2knW1WHo3FKxBJDKlCV1Qi9e6t9tFrVdjXaaAF
XokUFvToBVqtV6YclHK1d1XFcqPAKIBpm7Sn3VbkerLQOhFqA0ApunIURQ4Qa5rRsQyldAaIuO2g
LURerdtBgdG6qbqws3bc+1tT0jFMbCrUOVEC9ciVWylu2rlqPLUnTmQFuAAKLUsTMVjVaewoNo/6
qJpUp2o62rlXcBQBY1arWrutVxTG1HnSCnlSOA+gRV7cFtBpz4L1erUdbcSuwMeRei6mu5abtom/
B3WrtIPcBTPzllDf0iP9dia7DRAFOQokktCgL0BoTTgkCPmkSrRNPIFBYmr1f0Yv9mvSib1YULaW
q1Wq1d61ehzo8tBxkc6NEaMbUTf0L1zq1XGkDEovJ5pIvCeir9qyWUEmiADa4Aq4vY0LirmmBsWp
HQ0bVelNWq1W9Bl541+5kBoXpga7b0CRqCNCaueMGgasVIsVZCDQOh6I1M167AaC2pukJqXqNL6X
q/NWApmFieDrSg2NwWIvehzN660SBXQqtFrMvc57Atdxq9NzPIUWJq1E0i3pmAOgo8qsTXZXaBRI
q9XNXpjyFGgLmIAHLIMnpWq/Dzq44r1yqx0Ao8YkYV5jXmFeYUHU0pDV3IKJABkIDSm5cn+kx/r8
i0ZhXczU45J2kAhQJAa7quKLAUZPvM62aZjRPqG4KsGHAKMirXnozGvMa8rUXJoG1CQ9ym1E3Ne3
Ff7qNMObOBRJJ4b1zq1XAq5PBjUi902VB4w3SMjtXkwo2rkK5mgAKFG9G4q5enhowtbsaiCKXqLG
u29FTVtOVWq2goreogi13Ai/OmXk1iL2q965ig1cjVuIVbT3ItXdcFbi9XpjyQWrtuQLaobVIwIF
6sa7a7RRAq9XvVhVhVq5110B5Fr69KvQN6CkgKFppKPOkYKO/n3Ci1X0vXUBCT+kE3oCu0VYVcV3
VeiRV6uaAJqzXAIoWqwrtIKk04szEE2sfStV+G1XtXXS+t6FWqxFAVYjiPADajMCO9qJ/pcf67Cu
5a8tNIzG/MsTV6vQJom/0Nufc1dxruNd5ryGidLaDlqtrhgCPQHUda95JCTw86sK6aWuLDhxusYb
yzNJZjegKYcwRV65VcVeudXNXoVeu+rmu0GgiiigNdhBNdgJ7DcqRrbVWFc6JJNE8h1sKKVZhXI1
Yir6Wq1WOg0vV65WIpWuWS+l6voauasatYkiu8CvIa7nNdsleN6MbV4mrxNXY4r7xRY0GoVzoWq+
hJq3MJRIWmdjV6UV20bCrmrUTXWghv2gUpBE9yQpNAWomr1er0EJrxmgouyil5UxuWaulXNIq9kk
6rV+Z6109S3ERpyq1uG5qwNG+luH3of0+P8AWev1fP1Y3Nwa7hVxVxVxVxVxQIutXom2t6saAGl6
5+jjm6wyCOXIlR2POulHnVqN7/dUaE14xQjoIaswAY2vVwaFtLmu6gwq+ncb3NMAa7wK7hYvWPdi
YeZ7hS6SGlGnSja3aCCCKuKtVyKBFcq7aI1AuWRhV2oAmlenTuogir0LmrCiyijJVnahFQiWggFE
UNG6ro3XrRUV2A0UYUHo2tfQUAFrvoyXrxiu1auK5muQonl7hL0qgU79qxlmINqJFXomu69BGNCO
rKD3UQatar6M1dNJUMSsbniv6lqvbjvVuEEirg0VOlhRB15V1q3Df+kx/rPX6i41t60coNWWrCjY
Vy0AJoVyFd6inctw3q3PjuK51aoWswCue1SCbUWpeddugANAUa7hQbmDzTme29dorxi3jNrMK7uX
cKFXNd1d1dou0QavHahFzj7FDyV20BajXUqKvTE0Dz7uQbl2qW7efOuRqxFd1AigBcqKJKm5YXOn
WgxWib0RRcCrO1CIUqAVavcGr6A2q+g5E6LTHkOtEA0UK0vOgKFHmGvYWUdxNWq4Fe4U0Eq4FFxc
XNTGk5LfQG9GM0v2kyi/kNLSqTTC9EAaM1dKAqZDCGN65ir0RXXTrR6mh6tqv6J4L1e9Wq9cqI9W
39Ej/Wev0YvVwKFc/pAxFd5rvuA1Dt7e+gbqzXHBb0e6rXoADQm1XqOUIID5qewY8qRrHrp0rutp
YirGudBiKZ2B8rChKaDKRyt28iosU5FiKEgq407jXdRq9e96vTtyUV0AksS5PAGIpbUtwvbde1hV
+dhXMUGpudDkDoL3tTMBQVmpYgK6UevdRar1er635DQnlXuTzo0OpoqRQINdwruNG9XArncC9dtg
eVXFCUAk3I0bnJV6WMmvtWiwFX0FA2oMaZzemar2oC5mVoaZi2tud6IvXTTpXtar+taiePpxXq9W
53txX0tVuG+lqt/QI/1nr61uevP64ctL+kTXOrC5NhYEKQysDXSrGgKR2UlSVPMVG1xe9Wq2gJNA
A0et6Y8+RpuwjpQNK5ouwrvBDJXiJrtAooauQe4ac6vV6vV7kUejpYWtQOiY7mvxaGPaiLG9gT3H
mWCg12sKPKuVc67jQN65AFixWOjSmm5epbnqdSaHRuVCudEXoKSbWokAkixIq5Olq5CmewQEtelX
tBe1d1HnpzoX1tTNR5UikmYeMu7MeAgGktdharWoG9W0Nc9L39W1c66+hbgvV6tqatwXrlVqtw30
tVvrY/1nr6PQacv6d3Xq1DS9qNqF6W6mQG3bbgx8rtiI7SRStYjnV6BFDtoKKAApwKPCTXdajIaV
+0eS9eQGr8jXcpJUE9hFdxrkdCKWlHI0XNj0tUZAZchQDlimyAQWNEk0DalcV3rRbuBvcot+2jeg
wAALkACr6AXFrUdL6c6sa51fgP6avQ4QeVr0RQFA2IkUUTehRJrtFcq5Vz0NWpUC07XPPUClS9Ws
b1amaulKpJlvEWZmPGSTpa9X0var69PWsK5+jbhvVr62q1W1Bq4oiiCOK9cqtVtALkgj6eP9Z6+l
zq39Cv6VxXOrDXkaub20512iojemuDRNX5qSDI0LwWNqjauZo2BU3pWIovRc3sTRFq7hRar3Hu16
FXokad5v3G3aDSntpJA1MtFSB3c6JsA1d1WFFatVqtQFWo1bSPtu6pQNtL3orTcgq91dOBehP3G9
WrlTMBXcSS1wj30IoE0aA5aqRpblV6FEDRqBUUXruNc6tXKr1zqxpUNCwpm5aWoLSqKY0edEUWNu
gVSxlPjLuzn0+VcxXI1a1XvperVf1iLir8d9LHgvrfW1W0vQavtNFatQhc0YXowuKII0vQUExjwj
vN7BqIIP0kf6z1oc+AC9Gj/Q76W9G5NWPBz0tXSr3q2vQsAVN6A1x+cmVCsTsKBsVIIZauRS9O0U
72pXpyTXaa6UeVXq9WvRW1WoA0RXWhfQpWOzFWjFEXPMV3cgRXLTnpeririuVdoNdooKtMgqzKaI
JoqKPKj97aWNdtdtLaxv3UxJZ3NX7lJ7VQ8wvMym/dQ5g9ABY3rnqKa1uDlVxVxV6uauaCmhGTQj
qyqDKBXeTRBNFTXby7aAtoOp0Zr10pVLNKxjLMzH1Bpagatrej9EeMEirg0RVqEbUykVerA1zq9B
CQY2XQxqo7L0QRS3vFGLVaiKeMNTRsD2mkQkoWMfYwqNSGkBZo4Y0jaFDTYxpo2X6GM/edDy/o96
A9EsBXM12jS9da6aW0vXbfguKvchuXBfnioZlI7WIsY20tQtSpejElH7WHaFOhWrGgL0BQq1E9ou
SLve7VehSsRRY2Jq1EUwAplF+w1Z6+6u6u8V3Crirjg51zq5ok1IeSrYVe1BxYyXpSSB053on7aj
veT9Qo37KjodD0B5UHBJF6tQNA0eQr2tXShGaEddgrtSiyivJQc08hFKpagoFXq5q5q50vV6vV6Z
710CKWZz42kJLfQcjV7UQDRq1W+lHHEo7bUyAiSIroDRsajYqbg06qamHMNalAcLGFAuKBpVoqDR
U0RS8qYdx7QKdnBDsDjrZTTSFKSeNqsppsZTTY7iijCu00VNvTj/AFnr/Rb1b0r1Y1ar0aFG1+Zo
ADS9EXocuCxNdvDcVYnSKV42aN2jYXFKwI5mhyoyEC7NRUEKOfZamUVYCl7DVlBvQFWFdtWq1Fat
QPPvAKpcMpFL0nHJGuh50WsTJavML+RSftrtWigv2C/YLhBQUEMCulqUGxH3WpjYXuW5UnVR9q9L
WK13c2Fj+lW+5UHMNzK2P6QvND0B5HobKFckKSSNOdFSKAq9BlpnNBmruNEmrGgKkftCKWoAAdwq
5qzV2tXY1dhoo1dxoGjQ5UqljMfGSxJveun0B6VY1erEVyNc6P0g4o2ut+drgrUqWPY1WNLeo1Pa
9wGK9i/cQAAvMFTRFXkVgwpR3UVFFBRQ0aIBoqLjuorytTqQ4JFCeQVFk3J7QDkR3ln7h6cf6z1/
ptxXOra9dba3Fc6toSBV6A4iQK5mgAOCHIfsZCjMOaNYg0WoLzANG9mWkYkXoqGowkV4bUIhXYK8
dBbVauldaYVZhXjN4iqq5uRUi3WE8u4iiTRo9e2gDQBruosa8hFKxauxlpiKsLCu29C92YCmNDrI
Oa8gp/xjoBah+mrA0TelPNuQoWIJuY/9ZoGrXElrLyLfaFbnRonlcUAK5UVFWFW05UXAq3cQprx0
yAFWFu7kXrvruWu4U7i4ahzq1jKxVCSTXSrBh9AKFcjV7URer2q1qGnX6dG7ShU0g5OKcAiEEBob
0sXa0kx7zToGUKFFIaDqR2g0UrtqKTxksCE5sXsbc9FpWFErRRGpsZTTYzCvBJXilIOOwUepH+s9
f6Ver1bW5q1X4b1a9AW0vXM0Bx9aA4SbVGxRnczKw5UvOh0tRahRIAUfdy0vV6uauauauauavV6u
KuK+2hauVA0TS/bIRTItiKK0GK0Ja8gNXNXoAmlRhRL0CSaFhV6A5ydT+lat3KaX/XzFWAr/AOFX
ALC1LyBPcoFz3AEixj/1HoLWHQdVXnJScwjXo0bkXq4rlpara2FJHai9qMrE8yQpoGwOjMBTOToF
pFU0kq1NL3cAJFHn9ELHS1q5GuYrkQK6j6cMRSZJFO4YaLKLOwJZVJodb3JFGiaaYpUcocWoqKZO
WpI0UcrU8T3+9SJ5BQynFDKSyyo9TTi3qR/rPX64eleudWGl9b8PdVtb6W473qwvxWN6xpzG8pBd
gRStYhhXdV6LCrg0OQ+6gHoKxrtaxU37DfsNdpt2tdu5T5DQckdxruruHBILEG4YXrsNFTRQ0I6E
a12LXYt/tFBq7jVzpcVcGlAqXmetHkEo8j/8Odhav/ha1E3P6lc0hsbBaJ5/qCC0Z6Lew6e7Gw6h
BZYzzNX5dleM0UNdhrsooa7DXYaVLU8hvQWlFBhbusS1NIdApNC1pJI+xiSeM86v9AdetXq1ddLa
e/0y/p0QAsyCzDnQ69KI7aJBoipgL95VI8gWLG4kamiAoqRTivGK+2jzpjzoqCGx0NfjV+OtFkjU
kk2q3px/rPX+jXNW1vrzq2t6uTVtL1zNcqPGTXPjFuAiseQNGRVrG9quaNBCajQCvYsL9wADUGFO
xv3PcFgO/l5KZu6lUV3La6mu0V2A1287kEMDTC4W4q5q5rnXPW4ruWu8V3mvvrtauy1Ac+QoEBTz
A6t1vztckWBBuOjf65L2pOQYEFRcuLg1FSfoPRb2HTt+5iDS9WPNf1k8gxrvIoytQdie5aaZKE5r
y15KMjEKhrsq1tOtM4om9AXoISvkCmSQu3pcj9DbXrXSutXtXSuv1EbgjQdeokGgrodGqYnv7qsC
Ijeu5Qe4W/8AjbmCCaXkZ5e2lnN1kRgYgaAZaJ5MrswhFvf04/1nr9PfS3oBSa8bWNc9b1fjvXOr
aX06eheuvoW4kAAZ1YWuaJpSpNWIPdVxVhSqKZQATRNdx4gDRFc6DkUGBrsBr7gbNVmqz1Z67Wrs
N+wW7FoBasK7wKMgusgYsVAHNu4EH9H/AMSbAc1vzQcmpiAL8mNo1N67TdjTC6qLKrcyp7ie0Ibx
nooYKtMbKjC45UTelIsP0gKV5mglM6iiSxWKrWoVYGggrkKDCm5ksBRYnS1g69g8zD1r3+htXSut
dKtV6tbQHQ6dPo72pZqEimo7GlHKUanrQppbFmLHRXIoS3Aa+hJBUUdJ2vJoszrQyGNAgiwFSTC3
qR/rPX6X3t6QS+ga1SqHTW1W4b1YnW9WoVb0L1YX4rcZ5VC9qVVFcmBFXtRW9LIRQINctFF6CgEk
U1rnnVqJtwAgV3Ci1X1DkUGoEMHYrQmW9wQ7AUZGrvNXJqxABrrXaaSOgot2rRPJz9in7m6qOQHM
H7n6sKHST/Xej0NJTnmKB5E84v8AWeiXCr1c9wvzkP2LzUnkgPjUXBKgu5NBbkACr1egKtXPQsBT
OTpbkCFMkgDXvR5cf22PGfoTpyPD10P1AJFRTXF7g8tLErRcCibngQFmWJlPae/suzC1HlRIC3ud
LaJIVp5WYH1Yv1nr9XagpNWOkY7uCIkNNbv4biudW0JArma5aW9C9dp4+foE0BoJGAViKeVZB42t
bncGrFaWQHTpQe57qNzQFE8gutqtVqArtNdtBCa7DRBFICDcFXhtSSFaADAxk12kHQLegldq1eia
VgackF/0r+px9xNqtYp+pv1PYAdJf9ai57gS3W/arc1UXNxdhYxf6jSXCr1PJin+SQ9yxfq7rvH/
AK0PJk7iIQKEX3eOipFXoGgaJppKJJoAmu3tAmCUSdSKHHa9DiOl/pQK5GuldPqo5OxvMCfIKMxt
5DRYniBsfygQsysRIoppENNOhMsvcNbVbhtVvSj/AFnr9DbjBBq3BfRRbUCi/bw3q1AAaX0Fhpb0
CQK5njvQ9Dma5DTnr1qPIdEijMpYWYEirA0HK0GBBShcEUSKUC5W9FTXbVrVe9WNBTbtq1cq5Vyr
lV6vTRhqBZCrBgVFFFoKBQq9dwFXY12crWpjzkvYGjz0PSL9Z/U9rCpRdD9qjrbup2uUNW7Rfmfu
CC0ZpAbKPuc87/Yp5oO2m5MoskYuOQq7UbghiR3UVBqxBvaixNAE0FqUeJmldk096vXK/H147UP7
FtVuIn1I/wBZ6+parG3oBSaEZoRhRxi9ApTSDW9c6sNbk629LmaAA4r+kBcqVpoyotr11VmUo1zK
iqxAJvorWrlRagKvzBtV6tXOltV6vV67hV6LGvJX30e8Vd67iKPa1dpBBruFFq+812V2gUXArvNI
SaP6pT9xFqH6R1U/dEPu/wDk9tJL2k60o+0jmouZOYqOkN4zSXAX9Tr95Yd3QyURcpbtiNFb0bij
QNFhXeaNAXBXmpjV2e/CdSKHoHjI+jNX06UaB4b0edWrn/WI/wBZ6+lbmFpkI4LGrGhGTXiFeIUE
UVyrlTuKVgeIVGRUq9rXrnVhwEa8/RJArmaAtxXrrVvRtQ5URelkK122FG9dOEEimkiaMqRViKuD
VzS2OgGt6vRavuNdpoKLWUVcEsQADY+UCjJQkoOpoqCO1q7KCKKLotHIWhMpq96tXaKQcud5P1Nz
N+bcgOsf6h1e2k5sD9yqtyW5vQ+1VN6tzJ7QhvGaXusOTOQKub2BJbuCdISLRHmzEUWFFtLgVc0e
hUAkLY9eAcqJ5fRnl9Hbg662tp11P9Yj/WeutqCNQQ12CrCu0VbW9GxrtFePn3CwA1LAUZRSS/5J
ge+xpVtxinUPGNb8zrb0hc8ZNq61b0CbVermr8CuUpwCPQjyGRI4zLRXn00D2oG9Xomu65VL0FNd
CWNwWNEWo0atVtVoNag5v3NXbejEtfjijC4oBlpZiKDqRyoXvJyI/SKkpRUB5qLlwAayDStYt9op
OYkNyDauRokkw/6jSX7RfukYdxFqBIiTr0ENrRn7pL35Cu6jezjsKzdhuavRYnjtQ1P0d/praWr3
voP6vao/1nqqk0EAoKBV/WBoG47Sca99COUjh3q/BbgRiDIPu58FvRNcz6F65CgPoCbUDb0SpFKx
WoXRmYC5HO4NKLG9NSrY3q9HQd1EA09r2ogWC87VagOZFqWu2rWq9Eii4q7GjETRS1crLe8vX/4r
XUHkIKUXpwAwrI/UopvuFA9quKFcqN7w/wCo0hNhfvlP+SQXY2svJpT90NqUkPMTZl7AsqqxY2vq
fQP03SrCun0YOnThvrf+rR/r6sRYekHUngOsZBj7BflZVF7c7VarcFtRTG5Nc6sPRvXP0belzOnX
1Sbag8kkKUVF6jk7T5FkaWAqVU0xtTMaDEUJTQlFBgavXcaLc70BVrkKoogGioFG1doNAWq9FqAY
0I1pbBywFeQUGU0wIC3p+o6nkF6t1h6KL0ygG9SDuIXkFtXjFFL128lQA9jXdCTGbJSE2590360t
buuQv3yczBbtDENM7cFvor/QdKtXT6W3AKA4bUVI+st9NH+tmUP3gj0EIDTxhGIBoAiu9xXlFK6k
++lzpegdLVarHitTEXoAD0bmrehej6V6B0sSfUJq2nPVGKF1GgruL0O2uwszJZu2u06h2FeU13rX
I10pSaJJ196LV2k0FAotaiSaC12GitIObdBcL7Dq3Ogpt2Utlpmu3Fer8SuRRY3kViw+0EWP/wAD
zWGwBLBpu88AFEWGgFz2GiLemfoulGhy+ujQkkCnhNH6kcBq9D6GP9axGlUAegnSWTvOlq7TRVhX
UakVagKPIBSa8YoraiNVQmmNj3cvQvXP0b2q9D07fTi41DEUXa4ZgRI1/KviT7mkTtftuCtWtXOr
0HNCWhItAg6s9qVQBfVqS1M4okUvVwbN+ixKqlAW1IJoddSwFd1GSu61d1B70G4US4ZbEgGmiBLA
gL1jP3sSGlZyNWtRGpoAGiAB9J0rpx9dOldPozwXrlwXqK3bSkFJI+4ywKAQRVj6XK+VGI5P6DH+
vjCk14xZuxKeQtoFJoJwcjTLzsa7TQQ3EVdqCj22HKhTi4IvQF6RKlk4zV651YfTH0SD6XXhtwE3
4lYgjI7o1IDsV7mjtGVrtrtNc6FCRqubEUJOXkFeQV5BXeL94rvWu5aVlux5HmOEDkRZqZrUZDRY
mgL0sdFBXYKdK7mFLJcBr6pIVokk69guos7EgyiRhwDoP6L0+sxxzoC1BSCRUkYapyvd2AgIxpIJ
GK7bLaTFK8OPCActizehb6mP9fFIe0+cAGZ65uwjUV2jiU2JWgrGgoFXok6zfawocxbmFtUr9q8N
651YeneuV/WHERx9deut9OupU2q2h4LValciitXNRzMjB6kaKljLrYW7KA5E8+6u6r1er13Cu4Vd
ajK0TeiQKMlqLg0HrvoNei5FL1f9PM0IzcRG4QDS2rRg12EFSbI9zQF66UTarii3O+hJqSRyoPD7
6ij/AEHp9HfhAJP4o7XUqb8lLXsRUn6g1yQCJQQ0KM7mEuY5IYY5slpFNeAXEC14ErtjSvOlZDKR
QRiOO31Ef6+EGmAcdpoIaAA4SbV5K8ppZFalNwDROltWXvjja4U0bXaeiST9BfTma6Vb1Dx9fWAr
rqTalZkr7Wo8PXgUEUSCdb0rsCjd1Mo7S4u7J3o0du4drNEFUoX7hRKeMc2tRNqJJoISGW1WNAG0
VFeS9ZP0x9KAo6AUdAKIrtFKLSVHa0lqL3rvNg5ruNBxV6eQlPfgND1R63WgeO+l/qMVQXZr0yAh
0KsCVKEMpIIPXuo86jUrSADGIvQYAKUNMujmSmR7hGJ8DXWEuw7Fj9/ro/18QNqJ4i1qJvqQDQLr
UT968BYUr2JSzhhTyFtOf0AvQq31g1vqTQFqBvwDS3MMHpgQdDwE2rmD3hqYG44AxFdzH0EtSACr
0W7iFAFdoNdgFWFoqutgbmT9Mf6RoRQ0Op0H+2rV7RoDRjoRUFFMlqRtGFuEUNToaHpn0jyoi9A3
+uvoCRUZ7lFZNgUVZFKtEwIYHQUDQY2vXNnAtQajLGCAGogirm9waBqZ+0AW+vj/AF0qknuakjBD
dqgm/ESBTNfhvpHIUbvFi1XNG4Xuaudd9gWsfoOddKtoPVHGOImgOI3068HvqSL+kGZaax4RXvoH
U0QRqIgVUWEjVEtDQUa9oujClqY/an6BoTQ0NDX3/wD20q3LLaoelqI1dbFTcHoSLfRe/D09M8uE
0CPqQSCshIbmApUyEOEkKsrA0w5EaA0TV6FZDkVegxBjnDAinmFLKwpiWb6+P9bOoqN1NB2WllZa
eRmELFk1JAovw86trao5CteQV5BXkNixq/0R5V1ocvXtxj0xp19Im1Aa9fQNAW1seEijSsbaAfb0
CqWIFqFGhRonlEPtK3pam6J+m+h0FHQUdP8A9tK/aWa9RUNSOTC4W6kjlHGhVgAdDxD0etDiHp9P
QtoD9LFaiQdQo7ywNE3o0ASedHoBV7At3NqXa3q3+lj/AFsftBIpmLGgLmNO3QkCi/DbS/1lvSA+
j68B4TVuAC3B76H1L0HYUBccI0AJqzirfatHQ0KNP+mP9LSGlqal6VflbTpr01P+3WPqNPfR1pbk
dvCOI+h79OA6dfRFdNOnD04Ov0sbWPADyHIMbFpQi+RLcjQFSL3KsFShEj9W31Ef6ze/aaCGggoC
1XtRc8HP63nwgW9G/wBEOfH10v6hPong96tY93cNTqrdpaUmhQo6jST9Ef6WFqXpNQHKra2vra+p
/wBtRKDUgAKfr4PdhQpUdqZSrcHuPrDQN9enpX+jvSPfiPVz3PQdhSy6tF3MsSCpCPJ/Q4/18Be1
E3/oHM10/pQ4L8+AXtpbnwW4BwAW1GnvXvoDY9wNDqOle2s36EHJx9q9Juo6daI5aKLAixpBTCxp
v9tdDe9J+uvahV+Z0SVlLsWOpr2+g6aGr/TEV1q/0iyV2kcDtZV6aq5BVgw6U816At/RI/16Wp2v
/QlUtRiceleuR+rAPGeEc9evD1oDU8BF+IaDWFVNFVDAc/bUC4FTnkOQJvQ6S/r6Ug0cc0FzTi4A
uaYXFP8A7aAuSCKT/ZVuD29+4AnmeIcI9D36cIoej09K/wBOCVKT9xOQteVTXetTm44QxFOzOf6L
H+tZVNXFO1z/AEHtJpRalkN5lCvxmrcVvVv6V+K3B7+gOAcd9DxIt6VbFTQ0IsKA5NyM1CmFqHR+
crUpBFOaQ8qY2CGx05d0v+6kYBpGU0v+2rcuACk7O6btvwHT31HpWuBoNeo9h6HSiL0PQ6f2RH+s
gX4L/UBSaEdBEpRDZsVJFfHYUEbutVqUWqZ+9+O3qGrcXP0jQ4TwE29A104D9AqtSXutWodSOS8z
T1MbstqkItTf7aQ8ybDqQbGmNzQNwxsKkN5KI5il/wB4HOm6qKNHQqCWFjxdRwj0zwG/omhRFA8R
4RA5DKVP9hR/rPX6oR3oKBwrIy0STQNAUVADvfhJt9N78B+o6m3PjvwnQ8ApJAoQ3NhTDktGkOnW
n/2G+v8A+3Rmvoa7uWiGxY3NSf7BQW5IoG0wPOnFAcm625GiCSyMug4Rw+30HSibekR6PSogC8h+
6a/Bb+vx/rksH+pC9xv6AoHtV5C3EPpCfria6cB9AeirCy2slN0Xoxocix5Uf91qYWNKby6DiNDS
X/YKVgKci3/7qU8nNL0boOh5UWILyFqFe+vvw+59E0aHAPVPog2IcOstrEWrppHZIGtf+uxfrPX6
hjYIVKegBWR9sfqdfVvx9K6+saGo9YnUngNW4AL0Yl7VX7UpqWibmr30/wD3AizMC1R/7aAuXXQ1
2/boBc2sal/2Cj1sbn/dSm1HnSmnNDo9MQKZga9+McYN/UOhH0fSjQWyvYkgECFiI4AKmlQGRUNd
lgkZaiQD/Wo/1nr9QqE0qhfRQXOcLScFvpOoHEPXNDUG/D14T0AsNRr76+/AKPfQNgvX3vbXpovO
Q3odTUX670g5EXrpSi5phY0osHGkv6xXbeiOTf7aOnueq0wuWUGmhQJ6nufoBRo/RCib6o3IyqKk
5mNFUMbEuTXUeH7SLf1mP9Z6/TxqGo0TV/QWs0gv9COI+mT655jgPEL31NDUeiK776DgXmXo1H+o
KSALMaxxdwOeknVRYUwuFFzR5jpUwswoGxZhaT/ZQFzqOpNMLgqQtD0BwjQegaHHb0khZlOOACLc
SsRQ06Ut2KhhHzqwNMtJEbAcnjDU0bLRFqCk0RY/R2/oMf6z1+mJpFCqzi/XQa2rtNdtWrpUkhY+
vegOG/oX+jPIcI9K/oe/BCVBYoW7eY0A5Ug5MOQF6ioSdovdj0xzYg2OhNyh0c2Cmx0J+7I/WKPX
neT/AGdKUcmHOiLGjRP2ksR6PvwHrXvxj6KN/wDGCSSimnXtIFy8RQaqbVfRbrUpN+2w/Sb91JIy
15Q1BiKJuSAa71QdxY/1SP8AWev0vUhVVWkJoC2oNqHOgtBKIAprCu69SNb6G3Deh6HX1yeC3P6Y
UNfelF6C2IvdRyGhH3UTYA2EPQg0vVukGiG4c8hV7GmNyaU3DGwqb9Qq1z21L+putN0Xqw0tzYAA
MhQ2vx+/EPSNDgtQ06+ijWKECpibOpYFHs6sY1h+3tSvGaMdgCajDswN2VSzg91H7zOAqiulBxbz
EkygA3Y/1WP9Z6/SAE0oC0zFiABwqaVgKZwB3G9iaaS30F6HD14z61vUJ058B4b89Rp0HD7UA1kJ
upuR0A+6ifupzzfpEPtKmlHN/wBMA5UDYk3NGu7kNENixuam/UKB7WLLabr1pTcGko9B1bqeddoH
onT34D19K30MZuLmu4mg7AE3oKLe2ixgoQwRmK0CyqWZqiQAPIX060R/WY/1nr9Gkd6IsKZbcNqU
AV1ogilWpJAf6WfVA06+v7++p1SZQim9KNFPMmwpTyqT9EXJS6gCpD9sPJb0TQ199DQ0n6ij151N
1HRTzc8l60KarmgpNHpQ4xR4D6Z+iBtQNxwrKCaDWHWioNMtzGpFZDWGpP8AWY/1nr9EkVAURRSi
poixoUqAUAtBQCXa4mNmct9Lf6ccY4Qb8XvwH11AIUClonlTHQHSTmq8ho/6Y/00TV6Dm/cavy7q
7hVwSCKvU/UURc9pFT6sbmieS9W6X5lzQr24/fhFH1L8agsWxwoKkHiBIryCu4VfSZuQoORQNxe1
BqLgD8gD+vR/rPX6AKTSRAUSBXfT91DmKkUWCMaRUAIYG5otYfXgaW4OvqjloefCaA4rc/ROp4gK
8TgLyUdCdL1cVer01X1fmq9KNE8wa7hXdXXW5q9cjV6BsfIKaz0DV9b8qY8rcwoFHrxmiLijxD1A
CT41jEqgVajpAwV7XMy2PGOVG9BmAEjVzZtO5gB1LuaIv9Vf+ix/rPX1gCaAAC9S9dauRQY0huHN
qJvVzar0qlqk7FX+idfqz6l+de+vsaU2Jm5Jax0vV9DyouK7gaGh6HlwFbE6A870NO49+vdVxV7U
GvQ1vzo1zv42ZmQrR0Gg4RxnQ+lFyexYugKGNSmgpDdXUMGhWmUqf7Oj/WevqtG4ocwFJpiAbii1
X0RrFuY18Rs8oUEkn1jQ/oI1HoDrpfi99DXvwAcIRjQjogUKvoxAA6E0TyJApSKXQ03A2gr3vQJr
uFdp77iu4Vej1saJpOA9QaBq4ssva0rhtPbjP0aXDBrVK5ZIntTL2moB/k7VFFatYzqxJFv6bf8A
oMf6z19RD/kLGu4AGRjUi9yxuGGlqRaAoxiu0CgQolnJ4rGgpNeF7LCTTQMo/oXv6h9AcvTOh4UW
+hNq7rUHvQoGmFzfloVBpVtQ0NHrqTRq/CGpjzq9AW0BoamjR0vz+27kE8B4DoOh6DiAVEkA9FOS
6FQSy3rxi0at3K1P0BNSdwYqKIBplt/Zkf6z19Q9yt5AVOsikFWDAUgNAV3AU0gUvLcl2I4CbUvY
KeSwErihL3aRPYTMCfoL/wBQHGKUWFNRIubWvpcVfhGjdF66nqeENoSDwX0XgbQkihzogGmisuo4
bUOP3UXL9ZRb0UPCAKViKSS5LXW1NyIr3kBK/wBlx/rPX6KOMhkTnyWndSvdamNzxR2bQMRTR3pI
StCOmYL6QVe3i6/Uj0DQ+gHPiUXI0flQW/ojV6XrqeC9q5Gu3mQLFCDwDhPSn5Up5CvYrR9E8a3J
brJcmNQzOAG4gbEG41BqwJZihSW4JUrMwWgzFg4skyEuiktGoX+yY/1nr9DGT5KUkU0gNF/Riur6
KwFd4ppzR56djdvABcnlXOlktTr2/XdfqToeD3jGpPpDVuq8B4DagAKU0xNM/I0L+gNDV7Up1YUR
bU+pE3aGNzQ+1muXKMBxBiKElwH0JtUbmiAwUEAG1Tc6FEA6F3o9zf2VH+s9foFQkKlm9mPOw9Em
1IDR5HQGxkUaIt2ke/DZlWN1qSPsNI3bTL2ngVSx8ElEW+lvz4D6F/V969+BRYUaPoih10bqvTQ0
eAm5CqARV+bEXa3D7a+5o9KFA8ga7jXUVFEXp4kA4BxpqORJFyBQVRT2twmi9qErU7s4pGsQRTyM
CSzH+zo/1nr656Y5/wALuTXcxHpBb1er8+tDS10VSSbIvAnOu7uBFROCHQo1Ib0bg6xkqFlYHIsR
9SPo/ehoNTSi5GhNH0hqeq9ND0PClM1qcm4ViOdBrcAPIHVupo0etA1ej0FGkWyBKYWbhHEpvryq
+hvYdPSEhoyR2Bv/AGfH+s9foFHaPQII1t3UToLGrCpJGWkdGIQUxCgkk6k2p+RAtXWjyrv7kq16
JVtIggMiox7ORQAO9/pLesPR9vQ94xqdb1fW2oFDQ0aHTQ0Ry1JN1J7ib0wNA2UMDQA4V1bVupoD
kKFLTdQ14nk5AXaRO0+lzFd9iDfXuc1CAQ4s39tx/rPX6s0AFF9e/mVRgV7SsrimYseAi4F1agef
Z3AC1GwrnV6SzaLei4UPKWHqJGzF4mX+g+/EgsKPQ8fPUanpQ1PQ9NCL0QKvaibAXq1wBYg24OtL
qentTDQURotWvR0YVIbjhAJo2vwWNKSo8q1GWaipFI3a0vNv7VvxR/rPX6oKTSKBTs6srBhVjZBp
IwI43UMFY3Re493bT83BGikEgWFJWQlhSqWJUj0gKLsaZi0dHU0opWFOlvr1FyNGOl9bcQ1bpwHo
3TU3J5XI50BRJBHMaih11GjdKWm0BocDi6kWPBcigwsSoBN+AgHQNajKBV7npRC2BuP7SOg4Y/1n
r9SXCkKAHe4pkKlJAwSQrQW9SsPRAuZkF1dQrSMaNyafoLXBoC9ILVJyoQkkdsQZix9BVJLJakjq
RhxIPtVSaZ3XjtXje3rnjjGremNWodeBksCOC3O1MnLtoqL9ttbae+o60dEpuRI5ihwMpKhAAw4O
tWvVyabkPQJau0f23H+s9fqSL0o7Rr2LeJLmST0rc+F9L0OvIV3/AGlhd793oJcV5DQIp5TqATQQ
V9oq6GlVLGVBRNz76xKl7QUz9tLPIKnN2+lQcqNHhtztwKtMLHRqHXVaZxcgkEcCiiKA5MOAaDgP
U6HqDTNeraLpfS/JnC0RcV0q6EcrUihaYhUNr/3Gn6z1+uVb1K1h9BIpIjcMAKUUGFEhgV5+gvXV
0uKXqTQ5cDKGXhVSx7ytKC57VFSMC3oCNyDwFW0PCvUaGjwAVaj10Vb0V7abmdGpeAHlGvJmAHJq
INWoA0DYd1XFHodRQtVgRqRR0I0IvR0XS1A0at9yHkwsaNzStZo15do7SzO39yJ+o9frUcCQtamY
sfoSCsgBFWrlQsKlAI9BbhhqTbQMVIck8HQDgUXI+2upDdqmRjUad5cANwilRQAzCp2udI7XEhNT
D7uGMakURqOpYV3UatQQ0hNMaGppeEEin50LVcEKAadu2i5pXvTNag5pWBrtNBTQABsDX6S3XS9q
OhB19gTQoU4a9npXY13ItK1ML6kA0CVpmdtD0/uK9J+s9frWUMOf0V6WNSSz191eRla96blHxkmk
IJtqRyZABQFuFunArdr1ahyMyWKHtXijQU3aKDrZ27jQQtSxFSFtUrAtwoOVe5uaI5WoCiQKvRY0
rElTXdyPD2mhw3591chQa1X5k3po6UUTc9VXqrWBIFEA0htTG5JudD0B5UDTaEUORoUw1sSaUmzS
gj+6U/Wev9Q7wrd4I0jj7qYKoZix42JBWlehSii3bTG559zObq9X1kPLgVdVp4y9TWB4bG4NqLE0
VuAhIteuSUHqYsKPCo5jQ9P0qBRAFOBYjmase1VtQNqBo0OBR9voWq2lq7efYKYWCrahRa4vVxRb
h/8AloQCLUBTLoKPMcBRT/dafrPX+ogAaIpYswUMxY+hIpYI/Mc6XkO4Cmfu0BtQa9FaViCBeitq
du5tTe4lJKtfSNCavamJJ4CbBZDfTxkgyqQ1DRz9vCg4LGxJIJJ0K3rtFEWoCrUBxB1q1WNdpqxq
2lqA4yKIIq1W1twleZ09itDlXWiLEGhR6/25b6JP1nr/AFJULVYRqzEn0nQMIe64qRDagL06HRSb
oL0DamkuOByAAwNAkUtyFNllPavCeYuSUe1AXNwtdqE2FxQpr9o4UGpOtqA1IvVqtVq5VcUWplcA
Y8xRMZjFj4fehiYSz4zxpjwvNUqGNzAwRE7i8fafHcdgopauwmu012mu012mrGrVb0DqTTHkDTc6
XRv7Zt9Mn6z/AFK4DA2r9Q9MsBXcqjzKR3tRNghBUm1PFcpzIFgWtxEAgoUpT9yAWAqZ+9uJ4w1R
tcswWjIbSfpDdzaW5ugGpNhHZqGtqtrY1auVXFE0Ype2LEkkTHw/JS4iLPNDFHJkSRNCjHxmZwmN
JO6u0kOTO88kGHlLHHlyhpnzIimBJGpzpI3pfxxHhojS58aBcXFQxNERkT4wWPFxjIjxss0+M8aQ
wNIGWzvAVCx91FAD4+QS9FbUatoaNWqxFA0oo8qbp/bl/ok/Wf6kyhgGeMiZKdw/pkAgKBQUDVE7
SevvYUTb0IxczJ9+KbHIlsPQZL0FtQteQ2VASeVKyimHc33BdCLiBSBparaXFKkjCHGllWDDaQxY
IE0mNEkuUsRjaRfFD5ljgjekjJlkiQtKFCeWPtY3bBljC5ksbOJIuzCKGbOAMWLAnheBBlZGPGY8
SBWTIgETtggpjLM5yYWiZXmdYJpUEomjmkyJZo8bKWKJpFOVkZMTxYUkSRZJjORIsBjwYkc58So3
jcqGtVlNMpFHmKtcFSKU1b+2r1f6WP8AWev9T7Rf11ezEgCiCWLW9Ed6m5JRQA3X0iVBZbqgKkA1
daD0JGFHtYaIOR5UgZ2fFlUvgsolwYli8eP4cdkWKN5FWEOSsRMnhHkdI+6VlRPPF2x5qWOXJ5ZZ
5HaSWSQHuFMKETWCtdgRSlqeV2qLJliBkYs+bK6QZbw1LkGR/wBxXx42QI5MvJR02/tMMskceTky
RCLEKmFhG2ZlxgxYUaGLJgUTzYkTLiQCSsmAQ1+CzRwSRpHNgRuJsKSMhrHtVqKEajU9f7NtVqtx
Xq/06frPX+wyvPnQoWsevokXAMgq9Xq/okXoAgMCaJlNdrGuw2RSVjjphY17rG7jCEgle8lOrsvh
a/hiqQqqeaMLHmoQuc4d8qUySzSSFmZhY27DYAABgKMgBaS9F2uMlwFnILzB6jmiUM8ZZmhKxqhp
0APg+yKJ2fJhdKigcoe4Me9Avew+6gzJQZjT5EjCHIkippndnz3dMfKMLZOX5im4xeOOXumBueRq
bCjdZsSWMhjRVTRQjgbp/Zdq5aXq/wBcn6z1/sUhgVkUj6C9X9SO4pSCJ15jrBieQQRiNA8RY5cQ
Zs5Q0uY5aXIklBLEAG3bauVXWu4UZKL0ZK7jRua7TQFWo1a9dpqxqzV91c9BIRQkYEzOaXJcAykn
8lu1MgCjKpLTwsqPCA3iJIhIjCXkQArGCqoLywEKnIRTPEUzY5ApuAQTPhRvUmJLHXcQSqmihGh6
f3an6z1/sawv9Ler1er8JjUlCUJZXTxsDigdkzOHANBb0AK+24cCjIKMlGSu+izGrNXbXaKsOBLW
YWJPJenHYVYV2iu0V2iu2rVY0QaAIqxqxAu1d1B2vJKyjzyCopw1Ag0krxmLPVirAqSLz4UclTYk
sdBiK7VamQrR6/3Yn6z1/uaM2pJFJkyvFGZe+vJXkrvNXarNXbXYK7Rwk1egdDSHkedMpAHTQmr1
egavR6KTfjJ1tVqsKmFWpequRSvegwNJLIhjzxdGDC9SYkUgysUwsCak7e7+7E/Wev8Ac3aKjADZ
D97x9O0VYcBoNqTperVagNDSUsZFS8gOmhFAUQatpfQNXcKvV9AaJ1vwTaL1tVqDEUr8gwNBmUwZ
7LSOjieFZhk4MqVb+yr/ANDT9Z6/3ODYk3qPppeiaBq/KiTXdV6vpe1HtrpoaUACBxIuQtk16Vfk
CTTA3emNqUm7NSmu6xB1tVjVqI4JtB10tVqDEBZK7hSkqYs91qKZJFy17cj+7E/Wev8Ac5r2QcqN
CiDQF6PKg16DEljSteitWuGNgOYtQo9FN6iRYkyMgyLoOq8ywuFYAXFHtNSCgbFiDSg0TzUkUguC
LEgkFQaBNr8EugoUNDoVoEilloMDULpFHLI0j/2Lf+jp+v8AuoajhFGhoKPCKP6TwDQ6ngPWh6J9
U/3aOv8A/9oACAEBAAEFANw+a7ri5/8APd4r+ebxX883iv55u9fzzd6/nm71/PN3r+ebvX883ev5
5u9fzzd6/nm71/PN3r+ebvX873ev53u9fzveK/ne8V/O93r+dbvX863ej863ev51u9fzvdzX863a
h863ev51u1fzrdq/nO7V/Od2r+cbtX843av5xu1fzjda/nG71/ON2r+cbtX833ah833av5vu1fzf
dq/nG61/ON2r+cbtX833Wv5vu1fzfdq/m+7V/N92r+bbrX833Wv5vutfzbdq/m+61/Nt1r+bbtX8
23av5tutfzbda/mu60Pmu61/Nd2r+a7rX813Wv5rutfzXda/mu61/Nd1r+abrX803Wv5rutfzTda
/mm61/NN1r+abpX803Wv5pulfzTdK/me6V/M90r+Z7pX8z3Sv5nulfzLdK/me6UPmW6V/Mt0r+Zb
pX8y3Sv5lulfzLc6/mW6V/Mt0r+ZbnR+ZbnX8x3Sv5julfzHc6/mO51/Mdzo/Mdzr+Y7nX8x3Ov5
judfzDc6/mO51/Mdzr+Y7nX8x3Ov5hudfzDc6/mG51/Mdzr+YbnX8w3Ov5hudfzDc6/mG51/MNzr
+Ybnb+YbnX8w3Ov5fudfzDcq/mG51/L9zr+X7nX8w3Ov5fudfy/cq/l+5V/L9yr+X7nX8v3Kj8v3
Oh8v3Kv5fudfy/c6/l+5V/Ltyo/Ltyr+XbnX8u3Kv5dudfy7cq/l25V/Ltyr+XblX8u3Kv5duVfy
7cq/l25V/Ldyr+XblX8u3Kj8u3Kv5buVfy7cr/y7cq/l25V/Ltzr+XblX8u3Kv5duVfy3cq/lu5V
/Ldyr+W7lX8t3Kv5buVfy3cq/lu5V/Ldyr+W7lX8t3Gv5buVfy3cq/lu5V/Ldyr+W7lX8s3K38t3
Gv5buNfy3cq/lm5V/LNyr+W7jf8Alm5V/Ldyr+W7lX8t3Kh8s3Kv5ZuNfy3cq/lm5V/LNxr+WbjX
8s3Kv5ZuNH5ZuNfyzcq/lm41/LNyr+WblX8s3Kv5ZuVfyzca/lm5V/LNyr+W7jUny/c1Wb57vUZb
/wDyNvgr/wDpO+V//Sd8of8A+SN7JX//ACHvJrH+bblKP5ZuNfyzca/lm41/LNxr+WbjX8s3Gv5Z
uNfyzca/lm5V/LNxr+WbjX8s3Gv5ZuNfy3ca/lu41/LNyr+WblX8s3Gv5ZuNv5ZuNfyzca/lm5V/
Ldxr+WblX8s3Kv5ZuNfyzcq/lm41/LNyr+WbjX8s3Kv5buVfyzcq/lu41/Ldyr+W7lX8t3Kv5buV
fy3ca/lu5V/Ldyr+W7lX8t3Kv5buVfy3cq/lu5V/Ldyr+W7lX8t3Kv5buVfy3cq/lu5V/Ldyr+W7
jX8t3Kv5buVfy3cq/lu5V/Ldxr+W7lQ+W7lX8t3Kv5buVfy3cq/lu41/Ldyr+W7lX8t3Kv5buNH5
duVfy7cq/l25V/Ltyr+XblX8u3Ov5duVfy7cq/l25V/Ltyr+XblX8u3Kv5dudfy7cq/l251/L9yr
+X7lX8v3Ov5fuVD5fuVfy/cq/l+5V/L9yr+X7nX8v3Ov5fudfy/c6/l+51/L9zr+X7nX8w3Ov5hu
VfzDc6/mG51/MNzv/MNzr+YbnX8w3Ov5hudfzDcq/mG51/MNzr+YbnX8w3Ov5judfzDcq/mG51/M
Nzr+YbnX8x3Ov5judfzHdK/mO53/AJjudfzHdK/mO51/Mdzr+Y7nX8x3Sv5lulfzLc7/AMx3Ov5l
ulfzLc6/mO6V/Mtzr+ZbnX8y3Sv5lulfzLdK/mW6V/Mt0r+ZbnX8z3Sv5lulv5nulfzPdK/mW6V/
Mt0r+Z7pX8z3Sv5nulfzPdK/me6V/M90r+Z7pX8z3Sv5pulfzTdK/me6V/M90r+abpX803Wv5pul
fzTda/mm6X/mm6UPmm6V/NN1r+abrX813Wv5rutfzXdq/mu61/Nd0r+a7rX813Wv5rutfzbdr/zX
dq/m27V/N91r+bbrX823Wv5tutfzfda/m+7V/Nt2r+b7tX833Wv5vu1fzfdq/m+7V/N92r+cbtR+
b7vb+b7tX843av5xu9D5xu1fzfdqHzjdjX843av5xu1fzjdq/nO7V/Od3r+c7vX853ej853ah853
av5zu1fzrd6/nW71/Ot3r+dbvX863e3863ev51u9fzrd6/ne71/Ot3r+dbvX863ev53u9fzvd6/n
e71/O93r+d7vX873ev53u9fzvd6/ne71/O93r+d7vX873ev53u9fzvd6/ne71/O93r+d7vX883ev
55u9fzzd6/ne71/PN3r+ebvX883ev55u9fzzd6/nm71/PN3r+ebvX883iv55vFfzzeK/nm8V/PN4
r+ebxX883iv55vFfz3eK/nm8V/Pd4r+e7xX893iv57vFfz3eK/nu8V/Pd4r+e7xX893iv57vFfz3
eK/nu8V/Pt4r+e7xX8+3iv57vFfz7eK/n28V/Pt4r+fbxX8+3iv59vFfz7eK/n28V/Pt4r+fbxX8
+3iv59vFfz7eK/n281/P94r+f7xX8/3mv5/vNfz/AHmv5/vNfz/ea/n+8V/P95r+f7zX8/3mv5/v
Nfz/AHmv5/vNfz/ea/8A6BvNf/0Dea/n+81//QN5r/8AoG81/wD0Dea//oG81/8A0Dea/wD6BvNf
/wBA3mv/AOgbzX/9A3mv/wCgbzX/APQN5r/+gbzX/wDQN5r/APoO81//AEDea/8A6DvNbZ813XMf
ef8A2PW60NBp7c9LWA05WsKHW1DlQ0Gl65UNLcq9yOY56davpYUNQNCedDhvzFXFXocfveuVDS2t
hwjX2066X58F+DloNLcZ05cJ568vS9qPPX24RpeuVWrnV69jfUmhR0vr14bV76dKvVtLVbQ0LXr2
09tB1o2q3BY68tPfS+vtqdHF1y0sZRY6A0jcsKWxRu5eO+h0voLeh76ctOdc6B9C3ENb1y4bVajX
Ph5a2tx9Kvpy4OVc9fejpyoda9uVHUHW1Xo69eK1AanS9e1qPXT24rVbQaCvYdOI+r7a24OmorrV
tDwDnXSraW0617cV9BRq/LS9HhNzoa569KvXPU0etdDR/u/Yf9u8/wDsfQ2oAai+nKuWg4Lc+te1
zfTlbpXvyrnr1q+o5C9A6XOlq6aWvVuD2tQFW9McA4eVHS2vTgFcq9gOO2vWrUOHlw9Kvz4ueluC
3o8tfcjQa3NctL0Omoo8PWr1bW2vt7c9b0BVvTOttPehR9PqM2M2mU3I1Q2MT2bEk7lvqK96OnSv
bQcHPj97620PGOCx05aX4Oeh0vwc/Q6ele2hoVz1A058N6GltLVztx31tR5Vzr3o6iupNddba+xq
9Wr2tXQ6HpQo1bT358d+H3twX4QKOnXQGrDTlV65VahVzreudXvraueo4LV00twDQ6GuZq1EUKNq
N9BrY10oir8Q/unYf9u8/wDr+rbgBq1e9c6F9OV7Wqxr3tV9DQFWtp7HQdD0FXvXvzoUb3vwCjrf
l7acqFWFHTnQ1HoWoVyodNDr19G+ornXPXnqOPnqfoeprrwe458VtRwCvbhPS9e+prlYcXvw2q2t
qtqeDnwc+AcFtOdW0traspLrkLYsNL0rWKHngykUDccVr1ahr7c66UAaPEKtwWrpwCuelq5cVqOn
PTrQ58B1N6N6F7V0rrr119+p5+p7cNuHnVq97ihfTppz4Dryrnp10tXPgOtqvXPgGnPTpw9eDnXv
oevHfQ31PDz4zRGgvQ6Hp7AaW4Odxc0b6W0Ao1786tVuVcxR58PKrakm19BXWrW4OfDz/uzYf9u8
/wDr+qNDpa+nuK66dNOegFc9bURoa9gfT97VYa3ocFuHpXvbTlXL0LaWq2l9RwjS1EcNuDqeLlQ6
Wq2nPT31HIniHBy9C3qW4etD0ve2vtQ1A0A0twHQ8uI9PaunAatfgNddLc6FWGsi3XLjIMgsToDS
NWLJZ4G7k15+hYaW4+WttBp14rX0Glr68+HnQ0Oo5cIq2nQmjoa6USL1y4La+9qtr7aW4OfBcae4
GttOfD00HBeufFbS3PUjlxGvc206el7ctbehbW1chrbgvYacrnXnpzq2t+evSraC3Aepq1A8z1vX
UWB1FtLacq6aHQ6ijyoc/wC7th/27z/6/rAactBa/I1auelhVuXXhsARp7V1FGiDYXr3q9WOnter
VY6DrV9LVbX35ejy0toatyrnreh14BV9OnFbS9X4OnFfjuOO2h4OYo1zsOM6e1HTrVtBxivbpVtO
d9RqK5Vz9I6GrV04LaDgvQJrnpfhFhRFXq1dOAVbU2IzIriZbUasTQiegvbSSWMO4TIMbIaUGudC
9W4baEejbgHP0LcNqtXQ2150a9tLVY8FzRtpz4PcjgIFXo89ffUDT29jQo6HnwdNL8fPgtQ19+Dl
wW0Op4rUKPOhxc9BR1tVqI4Drz4B06DhFuG3LU8uC3K1daNr68uC2nvVtLaWo6HlRFWoDlXOrG5F
A89BQ620voatQ152o6jg5f1q/wDSth/27z/7Ho21Fe/tpzo2q1qFA6CjpfW2nvzq1XoUQbVer8Ao
9atfiNc+DlRGt9bWr2Nc7aCrjhvxCuXENRrbiGhNDpbW1W0vw20twe/BfS40HANBp76dNbac+AcN
uHpXPS2nOvfX34bV7+x1txW1GnTW2tuAjTlbhnS65MX3CFBQUU4Ao0DSGsCSgeXBbW3AeDpQ4gOd
dKHKhe9DX29G3Bz4vblwG1W05cB662168PSutdNbcJ1tqNeenKhqdeVdDXvpbnwHj51erV00NW0F
cuK2ttPauXB7cF9Rpfitpfg9zrfQmrVeutWo62oVzNWq9X0NHh5jQaEUKPOulWFWGlxXPS1cq9qt
wWrrr7/3HsP+3ef/AGPSNChfX21534b2q3OhXsNL6DQadaFe99Odc6Fc65VblblwDT20tztVtRQ0
NW166HhtqOH2oaX59Tyoae3DarWq9/SHHzrroRw8r24PbnVqtVtDwiuuh1FW0vXvV6FEcfPXnrau
lda6cJr2r3tqOLroaHBz0vwnU0K51Kt0yUs5oG1SdW63pL1hsQ8Zulc6Ghq2vvxGufo2rpVjxC+l
q6aHU6XtoK97HW+lq966cVq56HT3rlVtPa3AaPTTrXOhryoae/OuuvLTnfhOvWrcqA5V19Dpp76X
4Tr78PPXnXWvbroOWltevpcuI1ajy061yNe9ctOWl9OXB1FWtraumlq9zV+R0vxdNOVDS1dNPb26
cJ/rd6v/AEzYf9u8/wDr+j0156HoKNXtXXT306UeG9crV0rnV6PS1WtVqHBzo6e/WrVbXpXKvehp
erVblyrrQvoKtr76W5+9uC+ljQ1traumvvpflpbQDXnVqB15cFtLamjoaFWo6e/XQ24Bz47cI6eh
erDT3tVvR5cdtOle3oGhaudW05aWr3r2qxrlwW4rcBrNjNiDdYyaawpwtwaDWqGQB8Z+5bVb6E6E
a2q1c9Drcae/DbTrodeenvVuDroNOgtpbgOhr20A0tXTS1Wvra9Cjeuug5aDXnVq6VbS+nL0r6W4
Out9eldKvpYVyo11rlR4fa5q1W1PKjQq2g+o61bUVyrlp7i1GhfW+lzXWvbnpY368B68tbHQ1evf
rR5UNbV0q9Wo/wBgn+nbD/t3n/1/T5aCrWq1W52rpoa6a9NOWt+VuQBrppytV6Ne3Hflz066Wrno
KNWq1WtxdNTxew6cjVtRpavbS3AOnrWGgFW1GluAVajwWFWq2lzqbV724iNBwctRrz05Vyrprz16
VaiKPo+/Or8IFWq3DfU6cqtrajVtbaX4BVuK1ZCAo6f5WNNe5W45A2vSJasGYUDejpytpa/CK9+K
1W4behajXOj1q2nIUdfb261y05620vp7UeC1Wo8PSutc+G1D0radKPBbi9+ADQUdLHW1Wq1e1DW2
ltBoaOtqtrY6HgOnTQ8Pvp7cVuG1c7cXOia61bTnXWvY6EVaulcq5UNLcuWljXTQ3rnRGntauunT
Q86tejoDRtVtPY1ejb+tGjoWFBx/S9h/27z/AOvry4b11N7air6c6uLc76DQUOnWulA86vyq+l9O
dAUbHg9xoODnQ166e2nKr0NOdc6vVufD1NuC2nTS2t65111vxCuel+Drr00OntaiNb1fhtwDh9rc
Hv710q+liNSODnXv10N6F+G1Hrp789L8FtLWrr6VqPXgtwijofQtR0617aW4ZgSky2kteioqSu7t
KMDXabQMVkgfuTitQHFz4rfRc+Dp6FuC2g05Va2h0Ovvx31OttDwW1vr7Vzq3CNLUK9rnU6e50tq
a51bS1e3Faj6HK5t6R4eXHy4banh50NOdE1evauml65a2o2rkKIvXWrChRtfU63q9XoGjbToLaHQ
Vz0PXU2099T/AFMkCpJgKaY3jkJZCSP6TsP+3ef/AF/Q5aDgA0voDXShy0Gl66eh1r31tp7V0q+h
05CvaxoDl7869uGw0vrar68qtVq68Q9O3KgNRry4/a2oHAdLcXOgNLWGluO2gB4OevXUV7dKtQo0
OC+nWhz06n20PB768q5a+/Bbi68Ptpb0LaDlRq2tuBxdcpT3WsHJYstlc3KOVKS3COt8Rrg2ocQ6
1bW2h4veud9LUa9tLcXtwW4L0NRp10OtqtwctTp0168ZoVYaW5AVa9Grcfvz0tR4TR0tpbnar620
tpar878Vqty19tPbj9zr78dvQtoOWptwcuEcNhwW58qI0sa5Cjreuuho3oX0HU9atVqtfg6nmdPf
Q6cqNewrrwHn/VCbVNJanck3vUEZJUWH9J2H/bvP/r8Q0PLTlVqArlwe1dasK99PcVfU1fQdeWlq
tXPX3tVtDVqtwGutGh0066ewFWo0KtaiKFWrnwjlRFAUdBXKuXB7G2vLXpqNQODnXKr6WojTnx2q
1e+hvoOL3PPT205aW1A19udGhVuXOra3o6WrlVvQFGuVW4Lejavf2oadRYUOC3p21t6OVDcS9Qlq
nlomgecfMDlWLkMhiYOtWq2lqt6HKjxW9HlpbisKtoOtHS3ALXr25UdRwe3ByrloaHKrV70dOVGu
mlr17e3vVvVtr7aW4veveiNba34LVahR0tpY104+etqtVuC2lra3vR4DyGnPTpxjW2h9S2lqOt+d
Wvra9Wrnrbi9jQFW524BVqvXWrGraDpXKrf1CWQCpHvXUxxXMcYUammcKFmU/wBF2H/bvH/r8duR
FC+vt10FWrrXShQrnQPKudX5Dn6HO1CulDg563okUa9gOVW4Ra3ENedX1t6NuG2t9ffg6ajrpfUV
z4edW1NdavztVuI6DQURVhVraddDwW050L69eE2q1AaA8FuettOdW1PPg9tOlGhXLW3pdfXtQ45w
Skq2eZyqm5NjQFqjPIygVB5GfGAEfp2q1W9GWTxpDuSySAgj0hr01v6XtXtr70dLV7cHtz051ar6
jkbaHS+hr25VbhNDQ8q51bT39UUTwcq9raW9W1H0uvB006a251bi58HtwDg5cXLXnpz1J09tOtGu
lc+C1qvXuNOeh6dK5Var6+wr2151f0Lf0omncASzXosTUYuYUsOCRwoklJMTEshuP6HsP+3ef/X4
rVbX37a51yoctL2rrVtOgGvLUdbcA056nS9e/XWwr25aW0txCuVWq1W15Vb07VbW3B7+/KrVb6S1
XrrxA8q6a3r36cVtbaW47UaGnLUdOO3p20PFbg9r8XKjw24LcAvpy15a245V7lnis0gBorepOVdw
oNSvakeY1gSuR1q3Bbgtw24/aRQy7ihilxtymibFyhMulq6eryrlxWo6215a8uC3EdLV78A5URz0
tqavwjQ8NtOVtb8q9vSvrfX3saOh199Tw215cFqOptoPQI47nS1cvRsOKwrnRq3BfX2rnbT3Nda9
6tRrlbkatpYUeVe+gFWvXTTlR/qbGppOZNyBUEdyosNXcATS3om9Q/qTp/Q9h/27z/6/CK66WGlq
9hejoKAq3L209+Ve5rrw8tLVa/D1oV1o0ele3DyoaHgtVuDlr04L34udzQo1ahwcuC9W068V+G1X
4LcFhrarGhVtOXpW1HGNLcAtw24ev0HvQ9HlrbUaWr34bac/StxZpAplvU2QiBnLFVocyi0itWGC
GXpoa5a2r34LcZrK3NoHgnE0e7kmQKLbWzKy8xxD1LcYo8ft6prlqdDxjS3D1152NW562q1X0vpa
uWo4rUdL6e+nPjt6J5UfV9rehbh5Vavb0DVuVHXnXWrWOnvQ06adKNc697V0r3Ntb1yo2q5OgFdK
FrG1DQ1evc6Wo2o/0q9E1LJydrkC9RREmOMKNWYCpZb0zX0i/UnT+h7D/t3n/wBfiFq9rc9LV1NG
1GrGwArnXSloae9Wq1WvVuVHQa2151ahpbg5VY0a9q5W0tQ5aW0tVtDYV7c/R617aAaihrahrbjt
qRQ1tpejw209tba8q61ara2156WvQ0vpzq3B0q2vXT2toNLVarchRGlqtXvwWq1HhFO6IBnYpKur
jj5cNvo+noWvWZHWVPYhGcmPtq1AqtfkEETTscWaeNsbLSQajX3twjSx0662reY+W1ZRCbpJ3TBi
KjmZG2+ZnTg66HQ0eM6c+O1tRXLQ68jR9e2vOhwG1AV00sK6egeD24RVtOVGx4RXuatVq56ddBqa
FHSwo8Nqvxc9BQHAaHoG/BbQae/voBpzq44varaDW9X1tp1PtauulzwnTlQ0tahzq3ARVrUbejfQ
W/opNE1encASNegt6igJpIwtW1ZrCWUmma+uOtyo5f0PYf8AbvP/AK/AOp6gV10Fc6Ioa9DahfgF
66Vzq9GiDa3BbnzGvP0RbX34efBz0FWrlxWPCOEmr6dNSPTGo1HFfW3DauZoDgtQ66i+oq2tqtVt
bac9efCOD20toNbcPTQaWq9TTxwLl745MuRNKwdlO37i6mGRZVq3oX4OnFbh9/Wy1BTIiRXBJDG5
ZgK6mNLkMqAMzmBnifGl709uHnwW4LcO7R90cE3iqeXyuKVQa2ueNQOYoa2tQo/S+9GuenLg99Dp
arUa5nQjlbitQGltBVzfgtRHCdbUBwdKsLdeDlr106VYaj0Pci/DauevOrcFqNW4bcAt9AeG2h4r
VbS16tXWvextVuQ689bVahqOdE6HqKteiOVW197WrlV9Otc6NAUNLC1e39FNE1eiaZwA73NiTDBe
goXgY2E0t6drnXFH9F2H/bvP/r68tOVChVq61awoWNewq1E0KAoXoUABXv76e2lq6V7ULa9NL6DT
lXLTnwDrpajreuevvXtQ1tparaW0voNL0K96Glq56W4rGra3r206UOegGlqtw24rVbS3FzrlwGvb
S3BajwjpoNOte3pHgtplZcWMubmyZLk6qxU7XmXAsR6d78PXg99RpbTnVuG2h0nUFMof5JFCqzG9
KKU0kRakTtoC9YL8fL0LcO4LeOUWeloG1bZ3M6X7dbaW1tqb/RHh9rCuup051bTnp7amvbQjgtqO
VdDQrpxGraHQ1bht6Htp109tBw2GttOvFbQVbTprbg56nppbS/DblrbU89OdGra9aOntp70dDV9P
c30FtBbXlVqtp00FHnp0oVarG3PU8heveiLCulctDry/oRNFqJ5kmiQKdr11MMJJAtwE2qWSpG4F
UscePtX+ibD/ALd4/wDX4bV0oc6971fS3PpQrnoL2oCr8r17cAoenbWx4b62oDlehp78uHnp7Djt
XPQCrUOWlqOo0Go4RwDloNbehbUae9Wq1Wo0OC2g6VbT2tVqtpy9DqOnpc9BVuMkAZu8iInd8wtH
vmQtS73mOJJXckk8OFL45cWQSR2q1e1cuO2vOh9NyqcHsmT/AC5j0aAoV3Go5pBSTOQj3rD5MOml
jp14LcVuDKQNHmR9ktLps6CwFuAfTdatwW58Y1tR05a+2vt7VzGg0vp72o1bnw30Oh4LaW52r2IO
o0tRo6e1qFHUHS9Dl650A47UOK1W051z0trbQ8HKrcXuatR056c9OWnOjVuLloaHTQ8B56mr0Odd
a5WNWq9AXrl6dq6/UXAouoozIKV1bQsKLUToSKd6JvUMdyosNWa1SSU7UxudYI7lRYf0TYf9u8f+
vrbQaCve50FdK97XrpoDqOmnLX3rpXLgItVtSK6ajrVqHDzt0GgrlRoetYaX0F656dAOfBy0Gnvw
21OgGgudOtCvfgtw209raWq1DkKtz05aXq9Dh9/eumlq9/fjtb0efC7Ki7jukshZiTfTuo865cIJ
B2nLuLggDiHpW4PbUDiPXT20tTC65sRSslrvQ0AqxpD2mOVTUEhV4JA68PPQcFteuvs4uu6w9rkc
xSi52mOycHtparcNh6Fqtp78PtpbXlwcraW05UdTQ5Vfma5Cute2gq1Wo630toeent7cHtbS2luA
0atr1166ex6URXKr6e9e3tx9eD3GhtoKtrfgNcq99fbW3pXr24bV7A1a9dNenBajRuK6jT2tXPQa
8uE0RfU0ehoDS2ttOvoX+mkkC08jGixovUeR2H8skiTuHdRNBqd6Y3pFuYkAXT2ZrU7mma9PwRIW
MUYUf0XYf9u8f+vp71y09q9zXWudc6FA0eVDnQAtY17DrVudCj1F6GvShbgI1NW4OdWq1WrlXKrC
ueltLVbS3BbX20Fc9ParV7V19AcA51ahRq3BbhFWtVq99eegoa2q2nTTrR6+56Wo6W15Vy1Go1t6
FqtoNfex9LddwADMSb6Xq/oQStG+BL5IqFWrnr01PFbW+ntVuHrVtTXLW2u4ITHN+ugjmrMK7qV2
pZJqR2NI5rDnsVbuGlqtoNLcB1trat0xw6MtmtUSlmw18cJ3Qo+PmPkHXpp0q+nP0Rbg5a++luMa
e2tqPFYURRr25W6HT3GpGorrRrnqbHTlR61yq2nTUD0jbX20tr769NByrlr04eVWvryOluLmeG1W
58qtXtpy471fgtxWq2g4Pbi5CutGjoaNe/uRwctORq+lq5aXo2GlhbTnpYCugP1bsAHkLMxotRN9
FPNTy7quKLUW5XrHShoTTOKdqY0afS1JGWMMIX+j7D/t3j/1/bpQFHW1tRz06cHM0L6DoaFdKvbU
cN6FDgGhq2ljx9CNLDh5eoPR5X0NdK60NLen7ajW1Hrryr34LVbS2h0HSuvogcfTXlrblQ56ctbV
avbXnwZ2SkMWRJ3yE8IHobRmG6kEa8tbamrcHP0/fh6cWYvekmLh45bPC1+45N0zlemwo5lkjkhY
SyUstCRRSSSXi3SaMQZSTDX39MUa3FJmikDq16hmETLuGVkCDaZnqKFIltxHrXKuvoe1D0Rw+2vL
S2tqFtPb2Fe9Wo9etW1tVtbaEUOh0Ohtry4LCuQr36+lareryqw1vV/TIroBwe9X1NXNe3CfQ6eh
76Wo3q2lxR4DVqsKOltelcqOovVtbaXFGidLHXnRq16tcX0vwWOt666X+hLKKbIQU2VUmQzAGnYn
gHVaBq9E0xpTcwD7avTvai1yxNEURyYV20kZYwwhR/R9h/27wf8A7ccHKr1zr2oUQatQ0sKtz9zX
Sjah6FqtahV9OtctLVa1WoX1sDVtBpbitpbUCgKtp71bQ8FtTQ6ajlpblQ0HOutWq1DXroBoOIel
0q2l+C2h0GlqtQo0B6fMcHLXrw9K58A6W1tTMFXcXeaR1INtAt6ItpyC0or7aJFjVtInZGxt0ljE
G6RSUksbji56irevauXoTThBn7r207s7a4eVLEzquSs0JjaNXkbxwY6x56k5GRCw22TmpuKPpc+H
La0c9ixVb4aY3kxnxAl1rvTjtqKOoo+oeP2FuH24D0o30FW4BpaiPTvwGgKFvWtodL6HS1DXrRHO
3FagPROlqtqL0NfcD1verUdbjS1dK66dKvfgFGutWq+nKhwc651eh16+hauutuC/Dy4r24Pb1rij
IgpslAGyWNNITV6Y0OdE2ongHVaAo0TTNSH7o3VUfKFCUsGYmga9yQKL1a9LEWMcQUf0jYf9u8f+
uOVdOAVbS1W0I060OnsBVqtp7a24etDS3Kr6nS+lqtwddBXPg56jhtVuR0tXPQaCrVbg5UKtVqNW
0FWFhVq66ctBwDg5Vy0FWtrYgaD0bcNtQK96A0AGltOut9R69joBz0yFZky3THZ5GkY0BzuBTEGr
UaXmUiTsdLEITXgNNFyKmgKViKSWosqRKi3R1qLc4WpJo3HXgtwWtw+/oCrcTMAMnLVRm7k8tWvX
LUWvC8YIlCD8qF2aOeKOZZQ6BnMOLyxkSJoXDLpbht6OcwCSEE9oNBeWKhqFDaJOXqj17aXo8fPT
29D3PoW0tp769asK5aW0N721NCvc9KPKhoNRe9WHDaumltbG9Wr2q3CNOel65a34unANPc104eVd
OL2odPe3F14DwX4OWpFqNDlraumgo9dLVzHFbTrXtyq5tXvYVbT31tVuL31tRIFGVBRyUo5Rpshz
RlY0WNXq+hNdT0DXJ7aOqilFAcj0c6A2rytXdeoybVyFM9GSu+9RIWKIFH9J2L/bvA/+3tajoKAq
51tYDrXTTlXWhegaGnPTnV9RpyoaWA1B4OdW1sNbacqtz9IejarVzPDz0tVgNLVbW2ltPe1WoVYV
y9A8XOr0KtQvrz0tVtPb3vzq2ltL0OIeiOProNBbiGhAIzdq89NsWXcbDlmv2R41yIUR1VqEdw6W
I6xlu0hiUtRgJDRspKA00dFDViKDEUspFLKKSdhUW4zJUW6rSZ0D0JEbhF/THoSSpErb2gkyNyjZ
MrMecgUTqEJo0ORaQkRyNG+Hnx5C5u2gmJDHSmgagmKGNw44LegKOm5zAB2uVuahxZpagxJVqKBh
SqAOE8FtLfUW158FuDlqeO169/bQcQ4D11I5aXo6HQ0K9uACjQ0tztz4DVuXTW1tLcJ1tpyoamra
C+h6cqPoW4eVEcY9Llw2ojT39zahqKNCuutqty56e3Kra+xsdAdDXKj16cHLWwt0NtRQ0LAU08a0
2UKbJc0ZXNdxq9Xq+t9L1e9AVerCmFHVBQoc6enPOraCk5Ami1M2iC5x0sP6VsX+3eP/AF6tXU2G
gF6GnM0NL1zFHpeidb8B69aHShVjarX1tbW9X9G3BYcRrlV9OfAdTry0tXOhVtBRoW0GlqsKtVqG
vLUUKtVqHDbW1W4PbgOp06aX0toetqFDg9uenv1IFvWHHavYacq6aSwJMBtG33O1bfSYGGiybRhy
GT49E1J8da+TtLxK2PkIcMTSmbCkUPCwooRVqZL0yEG1AmlYilkNhJQkpMh1pNwnSot1BqPKhkpp
I0EeRDJpb1bUdJG7V3PPkaVpCTcmrAVfVEq1MgANCrUrMhwtwDVLjq4BZSGpWrFnIKt3D1ZnCJnz
97k1GOe2JZPQ5W9fl6Z9C/HfQ6chXua9/Q9q5XrpoRVtB05aGvbXlXLQ1bQivbUUBp76HTloeHnV
9DwXtpz09/euduC1HTkK58N65a+2vPj5Vy16Vfh5aX4QOLlVqvr70Opr2Ne1q6V7mvY1yt7exo0K
vpejXsK9zV666MwFGdFpskU2Q1jIxomu6r+jauVXFE0Dzo03XRF5WoU1N11XqDRo03Uc6x4iSosP
6VsX+3eP/Wq3IV0oC+g5aHS16Fq61YWNctRoBVufvahoa9q9hpbnYVbW9qHKuetqtXPhPLitVqtV
qtpy0trbTn6J06VY8I6VarcFjXLW1Wq9CvfnQrrQGlrVyrr9CAeG2t9ANOtCveraXq1DgPH0rlVv
R5cNqMaNSoiUyg1NhRuJ8B1qTGKkoRRUGmjFFLUQauaDUHoS15OYlpZmWpcuaQY8jRiDPApMuF6B
U6347a9atU8iBdzOIp4ACaSEiiCKLBaLFjwcwcHcSheOOZSChU0jEHFn7teVctbVaufDuWR2LK3c
1Qr92Ato76+/LjHFy1vr7DiOnPg9uLl6FqI066W4eenKhoeWh4fY1bX30FdRXKuWnKuvD115a+3q
Hh5Vy09r6nTnw8tLaGvax9Pn6F+LlVtOtW4enEaHBbWxo68tDqRara8uDnpfnozqofJUU+QzUXJo
miRRNXo8d6vV6Jruq5oUTyUaHoRogN0ZLfYaCg06C0gUG+qdatRWvGTUWMSY4wg/pew/7d4P/wBs
OdWvVqFW53FcqJNCidDahR0AtXOw4LVyrkNOWgoaEVcaCr1721FrcXOw0t6VtBryoiuenOumtuD2
q3ANRoKtryFHW3DYcYq1WrlVvRNe1Wq1H0OWg0PKufoGrV10txHhGnvpyq3oW0sCJcVHE+Eyl4Tc
rTIKaOilqK20vXdzD0W5KbsrWCvSzBaizZEqDLDgWPFbg66TThK3DcwgZ2kbpqFJqNO2u+pJLEkk
gVbW+uHnNCR4shGRoypqGTtaCTvXhtXvVtZG7U3Gfvdjcjrir3SYy9sdW0tx2q1W0t6NuA6ni6ev
7cXvry199BV9SNb8VuHnpa2nPg5Xq3LXoNPfgv6169zrarcHLgtoBzsBR09r63o6Hpry4L1yq9e9
HS2vSr8R1Nr8F66a9KFG96tXTUc65UeE1autWqSRUD5JIaQmr13UTXdROp0vV6vV+G2gtVgK610q
+jVagQAXJq5ruai7cAF6ijJIgalgoQLQhQUAB/Tdh/27zb92oXrnXtarCrChQFAUatavawNEVyr3
rqB0HOulHlp7+9Cga9vfS1C2g0tr7Wo+hy1OltRbT29G3AdLUdD0q2ltL68/Rtpy061ajzq1CgKt
pahR6+hbS3oWrprbgFH0LcQ4hqdba24uulhoVDVPhq9TYhQtHRjvTRUyWoqaItrek5lXNlY0OdKT
UbFKj3QJUO4RyUsiNxWq1dKyMjtGfubGjdieWqqWoACgSKeSutW1tVtBoRWLlvA0UsWTG6NGfKgG
NuWPHUOXBMKtpbg56WrccjxpPJ3NSDntqd0qCycNuAeny099PfTl6NtLa+9tPercXP6AcZ0HD1r2
1HXlQPLW2nL0TXI1bnpz05HgHBahRqx05cQHonj5a30NtDy0tVqIFWq3AKtqdOlcq6Vzo6dNTz4b
V7X5100tpy1toNDfS2gFSSdokk7z3WruFX4DVxV6vV/TuKvV6FCjoTaiSeMVjQhmVFUWH9NHBsX+
3d//AF+g7uVdKvzJGh687DkCRrzrpXWhXOhpz1vrer0OtxXtQ0FdKGttOVXHCNOulqHB76e3Xh66
cq6aWtw34OX0QGlqteraWoaCgKtVq5V717c9La24Lamrely09tRR4bchx2rloKtxe9jXty4L8A09
2iVxNhBqkgZS0dNHTpTRm5TmRarUi2F7UpNA8jIVDSM1J1EvaMXJkDQ5MbL3LYMpOpsBkZAVdw3N
pDYmidUQtVgo5CnkvQHHbS+hFY+Q8DxZsEySRjuIhao/JA2DleVPQdgq7jkdzuxJFKa2iMni9/Q6
fRCr6e+nSjqeC1DiOnvxX9C+luVDh6V10vRoc9fe9e9EctDQ4LVfn78dtOenKjofRuPQ99L63Gg4
baXoVejwHW3Fyonhvp70db6daGhq9Xo1cV0r25aXN6vrcgDpbUmude8j9qtISbinNBr1er1er630
vRNdeK9FqudRQ0NXsCb+gi3MEfav9S2L/bu//rDTnRPMUDV70NLVblajQ0FEVahx20FdKFcqtVq9
wNBfgtwWq2vXXnoKsaHDbh6UNLcPPS2nTTnpy0tqK60eEaCvfpwCrVarV21bQ8NuC3D78dqtzq2h
oVfW3Hblry4xXuNbcN+P34Ro8CuJsS1PERRjJJip4hTx80iuSlqNLyruNXpibRmwEq90UgYNOqBp
2cwzyLUO5OpizInAYMMvJWJM/cXnYC9E6pGTQ5VcAO/cQNL0OIjW9KpY40IiX9dEKrA3rGfxvE/e
tdOLPnEcc8ncxoCl51tMdk0traulc/Stx2q1WOh1NW9G/wBF7nQ1eraWoa20tpbW4q9G516Ghp78
9Dy0NqPPXnry1vXvwDn6HPj9udjr7elbXpV66635178FtevEeAnj669BXvwGulGjp14BqdDRo10q
/MG9MbVM1w5sWYmu69dCDfW9E1fgvwXq/DbW9CmIFG5q1uK1KKgiJIH9T2L/AG7vz3b2Fe9r10II
A9wa517UK97cHtV+P36UOdGvYcHtyPBahw2qxrnQ19tbcVtLXq2lra8uC2o58HO9dKt6HXW4rrQ0
Aq1AUByAoDU8A0NvStpbhtx2Fc6PIaDQegOO3B04Pfhtxe2g0IBqbGVhLjMtFLFkp4qWDn46kjsb
CgtEUzElmsqm5EpVbsxWltZSoozqtY2ZI53TJd2AvXSjoiVembtDMWNtL1agOMjSxpWKmDKBGTmF
62jJxwMnEDC5U4md2mOQSDhJsN1yLl2uaFQDufBj7YvRtxe9q9uDrwdeC1DiI47a24OVWr2q3FbQ
aW4TwAVbS3LTlXSvfQa29K+nvaw056W1NX0PByrnr04DVqty9LnfgtXLQajT39QaHgPFz09utW0t
Xvr7a9a6i2ljVtOtGr6cquakYKJS12NG1NXUA2q9Xq9Xq/Der6Xq9Xq/DahYUWrrRNqverVbUCgh
YxYppECj+p7F/t3j/wBbpVq9qFe1hoNRQ6W16Ua9uuoOgFW1satXuKA5+9qFcteWnOrcHta2nsOC
2l65a2q2ooirW4xp7GrUb8FtBwHhFW4RVqA09ulCjw2oVarVaumpJ4hparaDSw4euttemg61blar
a34udCgOL2FC+ltOp4RXvxFA1TYgNSY7JTR0Erspog1SY5BbuFObKOZkPOIc2PNeeneBRmVRJMxO
3JePcP8AeNY4wKPOmISiSxA1A9JIi5ECBZEKkVagSpwN1ZTNHFOjxmNsTMaMw5CSi1W1ypRHHmTd
7mhpt0ffNCoWOhwW4PercFuE6W4beia9uH34/bj99Dwcq9tefHblRAq3FbTnp769dAOCxq1Woc/Q
toKN6txW16enz4eWlq99bej0q+h4bcJHOrV04TVq5aEUa5V005aEWFr6W0satzq1CrVarCrC0zWM
o5U1Gr6Xq9Xq+t/RtVqtXKriu6udCia5mu21AVY0I2akxnNJh2pYkX+rbF/t3flu9Dpzq3IVzq1W
rnwWq969tPYCjpara20tXPXnV65aAUete1craW4raW4bcFtLXocAFW0tqNLae9tbcJAoaW9AaChQ
4QOC1Wq3Dz0tVtRVqtparUOCwvwW4OWgHENLcFtLUaFW56e1vU9+DnxNGr1NhCxgdaMVdlqmXk4N
5ATXYQCvNeQvSUXpQWLC9PjPbbR/j3RCs9AElVVaDU72rmSBoRVvSixy9dqxiSa1MxY0DRrpWHnN
EZJoZ07fHWNlMjY2Uso5mrUeVbrlU5udBzO0RXZRYfQHjOltOfB7HX3q2nLW1Wr20tpb1baGramh
pbTlqKtpauutq99Per6nT3GvU2051115W1HWvbTlRo1araWq3oc9L6ctb0b6Wo24baWr21OhHDau
mnP0jXPg6UaNtLacteVG2g0AojQctOWo5acqY2EhotTaN6F+O1WGl6vVzVqtXLS1KhNBbUIXekwj
S40S0I0FWH9Y2L/bvH/rjlRr2JrppajQrrXvXvrfgJtqNOlda9tDXOhXKjpaiKsatVrcBII068PU
DprarcF9BR634bUb6da9jpbUely0twChwjitoOC1c9b8HvXKgKtVqtVqt6Ftbcdteeo5ae+o1v6H
Kx1twdPSeLup4bU8dOlSr2s3itM1iqFq7TXZTMqhbkqe0DJRaOSXXb8vxzblj+eMRuWeExCg1h19
aHHvVwtTT8zdjbiSVkMU6yKS0T4+UQcTOVwKyZOyLNm75CavogudnisnDbW3oda9zVqtw2q1tOen
T0DpbQ6Dhv6J47X4LcHtyogaW9K9cia5X16m2nWreidLURoRxEVYV04uXDbhtytp7cNtRRo8dtTV
tLa24CKtXLS2lqFWq2nSr6Gw0tpyqwq1W5ctTrY10Nda95CAZDR5Fib+xHHY1agBRHo8tOZrtNBK
CVFiM1LjIAIoxQAH9c2L/bu//r8q9uIWq1Guuo06cBq9GjoeC3By0tQ6VzocqOtq56WvQ9C3PS1W
0saGgtYjlahwW9Aa+/HzrrVjperVavahfgFCjwc+E6GiTXWgeK/DbQivc1z4hyq+nTW+vKhVuXPW
/Llr70Dp0oa2q1Wq3qCra24OVNEGqTHNZMVjP9oix2kLRKi+Ik+KwyALoCKdjSqWLt2hjY4m6KsT
GKRp2BprXFW9QAkw4/JnCiWe9W9FWKnHWPJWXGnxjj5JvhZ3cNyktHNcvY1Y0KhF326Psi061z9D
rx2199BVtT009/TtoOO3Kjp7+jauvBYaW5ctfbi9zzq1EDW9jR6jryo86Ontwe/pWvofpTXXS9Xr
rxW05aHW2vLivr72068PWutdNRzq3PT3r26CjarUDR6UbnS1Gr6ctDyEjUw5uRV6NEAUQOFOvKmC
6dxo+iqXo8qAqPHd6jx1Qf1/Yv8Abu//AK1qtVtOmnO9Dg66dNRx206aDgFtegBGltDxWo0NOtCv
bS9ddLVagOK2ooijXveidffXlqK58FqtVuDpQoaDS1WoUODlpbjIvVqFc6F9eeg151Y6mh6/tavY
VbTrVuAVa+gr2+iFW4Rwe2Xjd4mhcyJEI1YFiPtpyzGWMk9hFMDdQFBEIp/GwIsY5iBNKWPqqpYw
wBBJJapZixA4TxY8pjfFaPLiz9ufHfHyGFTZkjrJ1OhrbYDLPCnZHQHBfjt6PPT20Otj6NvQt6R4
zXvXLXrw+5oekBp7209uEXoVYcFrcF6txHS+ltOWl9BwHXpVuEmuugFW9PlR4uXFbQcPPgudbHT2
4jyrlwXr2vwjSVrU5prWPPQ0eVE8Ki1GjR9EA0qcwKjxneo8ZFoCw/sDYv8Abu4P7tyrlQNcqNtD
zoXFe9dCBbTlQ0FdaNdaFcjVqHSvfnVhwW1Glhrbh568tOgr3ocVuEcR6Ghp7aDUa24BQFWq1Wq3
FarajhOnsBVr1bgvpauvCNRpfX31tqdL17ihVvV5X5a8q5aD0Bw358NvSIBEuJ3kbcSXxOxXSzyR
gAi5d1AWMsZUCgC56U47qxsUARR4U4zcF4G9NI2cxQrGJZQBJIzkcBPoAG+25LxyAJkRZGGI5p4u
1ZOtCgL1suIAPStpblVvQPo258Nqt6/L0rDhtz15a24vfU6W5V79a6a9eC/CBqa9jpbiPTpXWumt
tLVbnwGhRq/FajodSeK3DauWvKracqtrbjvR158HKuXFauWvK9tffrXK+h5VK96Y05q3I0wNEVYa
gE0q6G1EURx2NKhqOBmpMMmkgRK5ae39gbF/t3e37tbkK6HnXKuV9RpbQddRQq1dNBXvauel9QdB
0oURQGnO/PTlXPUVbS1W4uWtuehGltBqavRNctBwW0tryq+g1HDy0HBehoCK970DV6IPDy0t6FqN
CudqHoA176e+g9EaDrpbS1Wq2lqtVtLVy4va2lqPKi3ctWq3qW192FxmRhSVeSpR2lUBoKFE63AW
1GowoqfJaQ40scNI8eVFm4Zx30HHHEzlI1QSyBVkkaQ8BPoobMIvt23Lus5E0mWLCX9VCoU7n2+I
Rw+9uHloOn0VuWvtqRr04z6duO3oDh5Vy0tVtTXTjtwe1e9Wo0OluC3Ab251YcVqtwc7eoatXsdb
a8tLcNq68HLQ21trbi5cB4LV14bcPLitpava1WqZu0MTc2o866UaNHUC9KtqNX0JoniVSTHjSMUx
LUqKo/sXYv8Abu4/+2vx20uNLadNBQGpvVtLcFuC2g4Tp1q3ADwda97UOLnf3PAK5UbCr1ej1rnf
S3DYelb0AavwA8F6vV+dDS9X0vxX4eegq/Bbg5+gOCwvrahQq3pAV70NLcI1t9I0atQx4qmwEcyb
f2jxANLECrxgF+RkcsQppYiKx5CtAxzpmYjQPoOGGEuftQSzlaZmc8F/T22VXE0ZhkxOwLmsLv1o
Vt0ffNEvanHarcVqtpb1hwjhPBbhOl/Qtp7eofQtqaPQCve2pr29uL3o1bTlVxwnUW0B056gajS3
p+3oEcF9OdG3AOADgNWrnw8tLcxyo1aiNOtG1cqNqtrfTlV9DRo6HTrTGwmkJNxTEVc0xokVcGu2
9FSKVGalFtGq9Xq/DaoomcwY6oKv/Y+xf7d3/wDVNWq1GhpyrlrbXloBp7aXoc+D31twX5irVaho
evLhGoq2hBsOE+gePlwda51arcIr3F9eeh4vbQcd+VCrjgtoOC3FbnpbgNW1t6Jr2r29C2nt6PP0
bV00t6FvTZQwyISrtH3LkLYyRO7HHKUkNdlqN1MD04WZMvEeBuAc6igJq4USTdlEljwX9SKRopLp
kwQsUbJewY30Uc9miuw9D3q2ltTQq1W+htx2q3o2q3oW4LcfKreoNTw21vpbW1W4OZ0tfgOh0Olj
wc9emltLnjPBc8Ao6dPQ58Hudeul+O2tvQIocFuA11GgAr3tpbSaSys3M11LcqJomjoCbpkOld4e
ulNxKKjhMjftz1DjiJa5UNL/ANi7F/t3cf8A2pq9DQUdLVbnqKsDXsOltBXLg5+r1q3o89RV/Q56
g8NtbcuK1c9Bw24vf1rcNuDnreva2p4h6HtqSfS9/cagcPPW2lq5+j724LcfvoOL31to8QcS4zJU
2OSMfEW08aSTmO1FKK2pWdWje4kRJ48rGaF9YAgLSipJTXX6Pbsko+VGQexJo5YmQik67Mlk1tzt
VqtrY1bhtQ9f24PbnpbU/QW0NW9C1W4jVr10ocHPjPWjraw0PTgtxmuWvtQ1NDhNW4jRq1c6tVuM
DnodTVtSa9tb6e2o0PXQ6c6Ooq2lqsatpbhtreuotR4DfSxpumTJzJovyrsc0YZKKnUoFjoG1K4Y
MLHgghMrJgoojijj0vpbUjQ6+/8AX9i/27v/AOrpajp14OmgHIV72q19PautDX2ocHTS2vPUaW06
a89Bx+9uep4b69dRarVz4rcAHo24R6dtL6X19+AcHtqKtw89PfU6HW3CNOWl+O3BbW1HU8Pvz9UD
jtxkXqZb1nKqJBCoUwmmjNMhuyWpWKMhBqSJMiPKxHgYAkmMRi/PvPDf6AEg4Uy5MDJ4ZZVBEkJU
xDntSdsXBaregatw24LVbW1W4LcHtrbS1W0I4baHjtxW4+WnvajparcQq3FajQ1PENb17aHU6e9u
G3Pjt6XO/ANRqa9uE17dPQ58HQVb0TwWJo9DVtRpavaia71FGeIUcyIU+SzAwTOfw5KXHhFd+Oh/
PaMTbo7q0zsaSJnMiKsZ0sLcOG1nFtLVbU9eX9jbF/t3f/1b6jnXWrVfUjgGptoBVqHCKNe/BfUH
XrrbQcF6vXX6QaX47cVqtQ9Aen0rnofVtwWq3B7an6C3PQ1ajQ4Leh7eoOD29e2sg5Zw+/Fx1aJ4
rU0dNFanj5OoFJKUMclTeCZBiRQ1kjmaH02HknHmnRZ4UJcP3I1lDbZn47ICCOC9W+jPCdTw20H0
BOnuPojXTitqK6cXt7ejfS1+C9ddPbW1Xrrpfh9+A9OfEdb+iat6J4vaulEcd9fbjtp72piq02TE
tflk0Z8g1fJNMj0I4xRfCUHKxADuKLT7k5Bzp2ozzmjJNTTZDAg6xyvGzSJIvBE4WpYLDTCF3GnS
hy0voTpYf2JsX+3d/wD1dRp7dNAKsK6VcWoacqtXTS1W0HThFCjXtR1GgPD7Dj5aXrrpbjvoOunv
xHS/ANBr0Otvoba24bVajQ1t6Nq5erbgtparenb6v29AcPOmF63NO2trmBV41apcYinjYU8DEnFp
saMUrrGWXuosoGVMGahV/ptry6yo+xpualyTDPDANrz3nA0twn1zoeultLVbQ1biHHbj568/RNdK
9+H39A+ly4rULV1Fc9eWtqtaude/vr1rnw+1hXvxmuddNTxWtVteunSr173q1dNLUdffW1W0tXtr
a9W4eunWhV9Ode9tTUs6rTTpX5sC1+bKa7896/EzpKGLNEHXux8PEicDHxkGSqmURqQEAq9GjRo0
RQ5mTGjTH0FEUDaoZihmhVhW3obmrVblbX3OttOn9g7F/t3f/wBXivXtVqtVhqdDqTrbW2lq99Dq
Nemo4ueo4fcGvejVqOnSrcQNWoVz4fbrV6vwDgH03X1hQvwX9a3H7/QXt6VqtVqHpij6B4DWbjmV
PLLh5GDuCTJ1BjVhNi8niZS8dNFSt2VLFHKH2/GaOeB4X+nRijQZSZOPbsaeJlaKytg5ZifHnEq+
nbh5fRW9C3FbgOl/WHTj9+H2o8RGvL1vbW1W4ppViV99VWHyBaxNxjya7/v4Pfh9voTqeLnVtLcV
+C3qmrCsrKVA88krQbfJIEw8aMAAVzNWNSr3LEbNAfFPLyRzd711FE0dTS/qmA/EPWgbULGmWgSK
il7TLGC2HGFiq9X5Ueulufv/AGJsX+3dv/Vo9BXSvfnxDUaGhQ0GgNX5+gRVuYoa24L2r24gNPar
cXWhpz4OfHf1rfQmufBb0r0NBxew4/fg5nX29X3+gtp7/R2ocWTcCWGKSmjlx2wdzDBWVhTwI9TY
pWnjN5YrCNyrIxU5EEeTHPjvC/0+NOYZJbuqt3q6lWxo++tvcqfWPAPVPqn6D31t6Pt6h1vz5620
6+gdPfS1Gr6GrV11NhW7Zt9YJmhkw8lZ4q50a5111PThtwHQVbU1bTpwc7dOD3vXXg99enqe2tq5
Uank7RO7SyYWEsa+2l9DTjtfJHa8z3xybkVflqdR+qTnit1HUrQJFAghloG1RHuEHKOuuvKiLUOA
n+w9i/27vf8AdQOdW1HBbXrr7crcrac9BqOmnXgPPQcFzXWraAcVqGl/WFddRVteeltBVuDrQrlq
BpbT25VbgtpbS1Wq3Dbj9qtwXq2o4L8dqt6Aq1W4/bh9vbW1W4B6dtLVbh9retap07lyCYZI+3IS
eBomwdxKmORZBRUEPhxvWRhsBNEUeKVBQktWTEk8csRjb6fbpO8zwNA0kDPW3wEVAn+ReQ9X24xX
LU0dTr79NTwH0OWvtVqtp7+/v6luHl6fv6pr2r319tb0OWuQ3bFkv3za7PEyppbU6WogcFtLanW/
LUV7kcNtOulqPodNLa8tLaEehe+nK2WxVNuxwz2HDa2k63DASwiS+P7rYkgUdDROo/VYHEf9S9bU
RXMENemWoGs8Yst+C1ctbGxFqtp7cB/rmxf7d2/9W4q1C9W56nnoedAacqtp7DQcFtbaAUNemhoa
cq68PXTnry4La89eXBarUNBpbS3DbUa+/vQGhocFuAcfL6Aae3LityGo4Ry4T6VuG2g4RVtPY62r
3tVuD39McR5aW9FhcbhiBlw8gwTPEs0eRilDh57xNDOkq6WBGXt0U4yduaB42tUjFam7WDCx+mjk
aN4ZI8vHs0cgm8NYB8nFb6D29A8FvQtwHiHpe+o9H34elHS1dKOl6vQPBy1tR4OtWr21PGNNxl7I
WNzrs8zMnLS2ntqatXtQ1Pr8tDpehXShbgI5cqvbTlr7enbV+jxrJBiKFjvodDq63WM2fKUxSUps
SaJ53o0bUdP/AJR88OT9a9aa4rkaIIoMDSsFbGy0eiNDpeuVcq61fnR4L6X19v6zsX+3dv8A1QNL
+nz0twjgHCOde9X4BQ0I4Bpa/F7100twjh56WrpQOo058NvRFEcA4ufo8tRVtPbgOtuEV7ehej0r
3ocI4ufDbUajTnr7W158PPiHHz4Lac+AnhkQMN3xTG207gGE0CyrlYlqxsuXGfGy0mUaSyrEuTMZ
2nkQOSZFmVo2CB1ZSp+mxMx8dps3zNBHLO2CgWP6Q8NteXpe3AdLehb1Dw3o17Uatw9K9uAnjt6J
4vax0NDh3ic93BspHZXsL6X0Gh4LcV9b69OLkNDpeuXB7Xongt6HPU6kcDC4aZY6xclA2ltPbS96
lJV85e+O/NedHkNTr74/PEnFpF60ReiLV1oi2mAFtbTpXtwHrqfRvy/q+xf7d2/9XXpryq1c9eeh
51zt6POhwX4efCPS5cV65UOVDhtQ4LVbgHBbitr7+j7fSW4762q2g19/SPFbhtVvTt9Fb0renatw
xRLG6vjT7bniRJ8dJFysIiopZcWTCz0mWadIknyWmfInIrA218l1wcZUztogZJoWx5ZE7l+lteh2
RrjwNI2F/keGPsT1LanUcdtbenyvp7G/q2+kOh099Lan0vfU8J0PB71005VNIsaZk3mmPBsq2S1G
r69aFdPSI9Y8XtQ9K/ARpy4ud3v25M2SjmZiVZw8Dd0dX06aislORUPEy9rKaPTQ0aOhrCN8XK/3
L11ApgLVgGr8jfT2rpXOvfgtVrf2BsX+3dv/AFdL6CrV1oji60b0DXOhpbgFXvx21GtuIDW2ttR6
NuAaWrpoatoPT58PP0x6I4DVvRGgHFb0OtD6MaHhPXg9uK2nt9Dy9FlBG6bcGqFjDWDnLIskSyDL
wqHlx2GbJLUs3au37c+S8cSRJTL3DddtEi9jRNMlj9GBeuSVFCWpMlAuyByfrx6nPhI9C2tqt6fP
0D0q1W4SKGnOrUOG1EctL6e2nvoeM1u+V2qeBF7m2+ERw6dK5620PBfgNc6GnQDQ+vb0T6Q4crcR
GZspZYoj97KHihI8eprnpapF7liLd+fC0UqmrjQ0aNE6GsA3xsqG5HIg3GrdK28XHSjpbgIq1Ead
atrb+vbF/t3b/wBUV10tQ1IFrcFq56DS1W4rWoaHXnQvXta1W4ufBbX24LaW1tQ5cHLgtz4vbpVt
Rrar6e3H76Dgt9IeC1DiGo9G2lra89RXtoPWHFz19uC3r29C2rKGGZhgCSRkfD3KSAy75gMpaDKW
bFdDhRRNNCIwmsihhu+3UyXUix+hAJrktRQACaYuYm7W2SRmS3q20PqHS2tuC1W4LcB1PX0+enLS
2nXgOtq5cYq/Farc6tQH0t9ZGCJnzGWbg2+HyTovamlteWntrajzrp6l9Rpzq2hPDarctTwiuno2
0eRUVvyMmpseRXTGkcRCER4rBlgmEZBBrpwHTKyliWGQtW4RiTHQc7VeiaJq9E67dc485AZyCVPA
elYTMo7iVtQNe2pNG4rrp717C+hrl9Wf6DsX+3d+e60NOnB7ajXlQ6mr0aGtuY05a++tuAcdtete
3ACDQtVq6fQjrp04L0PXA5aGhQtqNBxW4PbT31tx24jVuO/ojj9/SHF09O30vOpkDR7hD4XjxzOM
HBwi8u3QwLEGnWfHZTh57xGKZJV0tWRCJE3HDMMkyfQhSdFiSFZJWkNJ+vZ3vB6tvVvoeE8HOrcZ
9A8fvqdTz4Lel7cI1PCemh05anU17a20I13PKSKJm7m4Nlh4D0I0tqTz5aW09tLenavb1TVtTqaP
AdbGpZUjEuTMUWDLyRBiZEQGJFUWMkRzMBpqihaCsn7WgyihjmSQdatypnVRLuEUdHJ7zA47ktJH
IDHIGNXomjV+Daz/AItyFpKUWGpo9cCxNuXSuWhFDQ1106cJ06aAf1rYv9u7f+rbQDS2lqGvTTqd
LaWrnwWoVarUKGpq+tuelqtp76CuWoHDaraGh6NqtwW4eelqtXSraWoX0Fe1tLaW9O3oDivxHQc/
VtVqtxCjrbisPRGtuC2tvX9uG3rSt2JNiT7jOMSXFLKCG3CaNduXuiyMYMJ8axx8qTGfGyo510YX
GfjJLHPCY3de0+sFJADOQExg7s7aQAGXbCng9C3q++nXQ8NvQtrbhFHT306/R9eCx4rajXn6J9I8
Bq2jyJGM3eoxUs0krcEa9z4EIihoV70eAV7e1vR969vobVyrpw20vXXjmnjiWXIndIsLImqLCiir
oNDRrKQ2nXvgJZWiy3SsbOSQPPEgyNx5k5M7R7XepvGrKbHFfnucXZIHFd2h4drI7dzHOgbUDfge
sE/dqdLakaE178tT0tVudHp/Wdi/27t/6vByoaW06iuWlqGooVagKIoUNDoNTVqGl+D20Fe1uD24
PfQ686tXTQ3GnLUaWNW0twW4La+/pjjvXKh6g4rUOG3p9eC2gGgq2luX1ltLcHvwjh9z19Aazr3R
4bLBJvWfI74mTcNEGXEzXheKRJVmxlcZOMBUZlgeLdu0NvEdpN4yLRboXl3LF745Vv6ypyRXlZnj
xlZix1woleXCSNYfRtR/ofXS1e1dPQtXXjtVuG3DbiPoHUepcV3oK74670oyRimyYFqfecSIS79O
1T5uRkHi2+IyTovatEcA4udH0TxEaW168PKj6Rq3CbVPkRwrJPLKkW3vLUeHDFXLiFSi6JzoYcc9
TQSQsrFTHIZBHj/5EjSJcrKeVyLBqxZKzIvNj9KvV+LbTZdxYEUBcWIoG9ddHrCNpB0oURXtoQa5
2o0atyGntR/rmxf7d2/9TlqKPoW06cV9Bw+9H07UK9qHF10FHn6J4rel19AcHLW2lvXvxDW1D1Pe
2gGo+p5airejarelb0Trb0uVZW3rOG+POwl2VcSsSLzJkY5FYuZJjvDOkyywJIuZt86tBgZ0xl+P
7w5/i25Gofj2NC0mJG0WdiNBJItj6aqFEUMmQ0syQqSSdQLnAxQ0mKiLF6nP6blxdPQ5fQ24L0ND
w20NdNL+ma9tM/cPxUxt+aR43EiyZEKU25Yoo7rAA27rT7rMafPyXpppWouxoNXea7yanlsCTe/o
bJGC+h9K2vL0zy9S+luDlXLgfJhSv3CC8c0ctGiQKnyY4UaZpo4duDCPFhjq+ntR4eof7JSfHPPj
xZMeVhS47AkHGywGy9yDIj3IaiahcK8FpI8mMxy8ccnix55jJovSiKB0esQ2kU8qFuD3o0TXWueh
06i39d2L/buv/qe3Ba/H7jpztoDqOHlR4Rpar6HQaGh06686HARp76cuO3pircVtLaWq2g4Bwj6G
2o19hXKh6FuVW0travbW2g0t6ItwGrUdR6/Ljt6Rq3r50CSxbfN43nxw6z41qgyHxnxslJlZVahj
opsxoxoaCqtEGt2w/LHNGaIsfQAJoKsYihaYzZAVeAKWO0bMCkkEcmXAoWPW3o24/bi5+n7a24ba
W50eC2vv6Bo8hp7g178Br2tparcqOtvRNb6QACQcfcMgqzkkvVzV70Toa5Crir13WqRrn0dlQCPi
5Uatx2HHyoWrlxjS3oEVy0tU0qxBtxeo88msjKJp5L08pqHJaNhuMZhEwyFx8FCEghj0uOD219zY
U00SVkTwu8w7ocfNhZCI5V3DDTHbRP1e16vY4EtbvACOKNe5p5O46KOWhFA0wvWOCJU/STXUkaex
0sK5VeuuntXTTrR/rexf7d2/9WhVuel+EagV0oajjtoBxc721966a8q9tenoHrXTiFDW3EKFc+G/
BfXnQ15eiOfo24BxHUCuXo34baHhtVvQFW9EfTD0T651kXuUjw5kTB0nx1kXIxStRzSYsmJmJMtW
4HjDru2J45Jkt6ABY3SIQ4/dU+R38KqWOzbHemWNYvCHzY17U+gOp4enAfUtqPQPHy19666kcI0t
z0PFavYa89ffhNb45L0jEHyXAc0DV6JFFuRau+u6u40Wp+voDrtaduOeD216104r6Hjtp7ada5el
7ayyrGmRktM/caiH2zUxNGveOYqcKWKWO+hvRHCa5AZW6pGX3PKanyZ3osxpHKtjOHTITskWaVKe
R5DonW+h51gtZ8ybujIseAAkswQaDr3igb6kV3EV3EHBErCuh0IrlXXTkatXLg9zbT34Bwn+o7F/
t3X/ANUW0HAOPnRo6c9PYaW4PfgtXPX2HPjHF76DS1Wq2hofQDhtw3oUOC/q2rp6g0PFbW1ctBVt
AfUB4OnoW9K/q2+ht6+7REHbpQ0NTQK4ysUilkkxZMLPjnXW1ML1n4yzRZEJjdl7TwohYsyoIoFj
WfIaY8CqWO3IkE8Dq8U4BighkbMQWXrQ9c8NvXHFb0Twe49G1H0COI634banhY2G7Sd89Cg1KaBq
9XFM1E870NXGtqsTXaatrjRmSXHj8cNHXnVvQ9tTw2o68tbegelW4c7Iua9wbLJzDjmRRGm2TFZP
QmnjhXL3F5iefDgSWrc4gDwL1vV+VI5RskkqTfgRCxYhBwq1qBvoRTLSgd2DyBrrob39vYa+1AUa
9+ZFHSwoVb+sbF/t3b/1emhPAOC9dfRFcq68N/Q6a9KHMUNByr3o8Y096Gp66+/BbS1DS1W9C2t+
P2GpHBbU+r7+rbW3pHS1DrpbS1c/Q9qHT0fbW/8ARMyJXi2yRUl5aSxCRcvBNf5MZ8DcUlXrwOtx
u+ERU0dxwJGXrupIo8dZZnlbgjjaRsfBWCN4pe/ac8mspe6HAgkbJAsOG31XK3Bzr2r2+gHSuehH
GfStpbg66ngHFksVjzQTLoDQNd1d1d1Fqvrermu0mo9tyZqi+P5DVBsWJGI8DEip8DEcZ+zvGSjA
rE7tte2+IcF9emhr24bcXPT2tXPS+ltLcNxpyr3vR6U7BVyGDSUo5nmGNNzBFEUawmtOOlDXlplZ
SY6ZOVJO/FiP2vlRibF4B0oHler1jSKaniMT6Ihcs4UcYa1BgdG6L+rCiZVq2t9D0r21IoDX3tQ0
H9Y2L/buv/q6HUac6twcr+41sOK1WFtRp78N+EjS3oi3CNRVtPfgtoOG3qW4hQ0trbgtrarUeO+l
+Ea2q3DfiFW9ICrV2mrVbQ0NbcJHBbg9/wCkugdJI2hzIjePR41cZeCDTxy48m37kHAII0IuMqES
x5mMYZZF7W0jjDUzGQ2jxUd2duCGF5Ww8FMdJGZ3QC1miyJWMmHtUEhm/oNtR9Fbi5cV7aEehbQ+
hbkR6W4z9iTksxFjoDau6u6r13VfWLHllOPskrVDtuLDQAHCedPiY0lJjQR6c9Pera89Odc+E0KP
EdL17nX21sdbaWo8E/b45YmuVNCgWBkRqtTUaNY5tLEbx8HtkZ8EC5GQ878aN2thsHjyo/HPoOC9
Xq5BRlyYmUqyIDTP6QNK1N0X9WIe6GjQ0tzq9GudX51z0N69xevejoDVhVv6tsX+3dv/AFL0envQ
4OfF04BRtoKIocA6aWo8XvwWoVy4rcB06VfQcA9EcY0FcvQGg4Bwj0LaDg6cPv6o0HHaraWoCgKG
h0Iq1W4ff1/fhtxmh6QHrbtA6nbXZ4dWQMMrDDCeF4Hw96SMQZ2NkDSV0irdII545UBoggpCe1Ve
d5GjxVLFjwY2M87YmEkCzyCu5roSpwY4cp8+ILj7PA3d9L7cNvUtxe2lqtpbTlwew9M1z4Bqa68f
XW3HbTckBic3LcKgsRtecVODlrS4WWxh2XMkOPscMdJFFEKvx+/Bbi96Ooo8Fqtrave3D7c6PD04
LaNYB8lrSRT5aw4rKHxIHB2+XvgxI4hmwh1cWJphTUps2DJ3wcG4ZyxIWLH0dumsd3hs2g4Ousbs
j5aoy+qSTpgf6+elxRGnKuldNDpbS19Da9HS1Cjwcv6nsX+3df8A1BqKHH79dLa9eI0deenWvc6W
1tXv6VtbcPLg60OHrqBr14LepbU9dRqeLlwcuA8NvrbUBoKPUjlQq1HX21Gtqt/UNwRnh2qd/JVt
WAYZ2MOzKjJfFz5MN9uzvyo3Yqu4ZUpbFyFddwxwkjQY8YAlzHlnix0JJPBhYL5DY+NDjpPOzPFi
syrgZBo7XOwwduONW6reHZoO0f0g/RDjtwHrwDg5cHvw29A1u8wSG9N04ImCvgTrLBR9T305a9Kt
ccteg4vfT256ChoOutuPrXKhranyPu/z5AhwPHQAUe9ctCoYZUYSQimFMNNpfuiq1WrcNwWEO7O3
pYr9kuW8cuNr7UdQCSFSIMxY+sKwD9mgFqPIactTpz1vXKhQ5UedAE1a1HW1tLf1LYv9u7f+pVuV
q97VzrrQBGnLTrRoUOHlwDiGo4eXojgvwcq5VYV7k2q41GnM0OnuOvqW4rcQtwiuVWq2g16629M+
jy1sNLepahQ0AvSryK2JSrcPtw9PqbaH1rejILoZ2xs2J+9KtrMnem5YzoyYPeuLny4Ym37McSTT
zMheJsjdGnEeEZKyJwi8A51iYZkOPCEWUyyHb9oaMgADXeU7o9nhVIvprcFvQtpbgPqn0/bQ6HQ6
jrrbg9qtR4OWtqI1tW+SaHi23cmxGx8iPIjPDzoagX0PDbQ+h76Hrw8xpaiK9vSB1sanyFioSTM4
2+PyIioL8DMq0ciEU2djLWVNHMxpqYabO5D3r23DcgtMxY+nHyYzEBjc6A1fVVLElIlJJ+h29SI9
Pc8B0Jo31tVqtp1rpxE6H+pbF/t3X/1KBrrQqw19+AaWtrbgHLS2ho0OLp6Y0toRw8tb1yrrwDhF
CrcPvrbgtXvrajparVbgFChoatrb0LVarcXLXnpfQaDg9+WtuG2ttAKtyFLzplFiL0enXjHq9at9
Lb1xwbsFR9vyBNFwTTwwLuW7xSqMyZKjypZSuNlyCb8uNvHkztFhY2MJpWlEkTUV0tQRmOJt7OY4
Fgjwo4syooIoRw72isNqjjSD+uW4zoaPP0vbhtqeHloeQ3qS85o8I6mtmMaw/QX0trbQX1tp1q2t
qNq6V76H0SODnaaVYY/O8oODFKI4Y4hXXSwp3UDK3DsBynkZpgAZGNQveiKYUwoitumWKY5uKKz9
z7h2saII9JERqIAPIVgxxyybnt6JFwxxM5eRUH0OPEZZIkCIL0aFtDYcNudDp0rlXvoeXCeA9OlC
j1/p2xf7d2P/ANoKHpcqGvK+g9Uenyq3oc9LGhry06V762OgvwX0GttbVbS2lqtoNbVbg9regdBQ
0tw+54LDgHoW1tVqtwkcI0XVLCmFxY2NEVarVarcdvqeep+l6VuMSPBtOQitpJLHEMzfUQZObLOx
lJJjYjHmMDRfIZIKn+RySrLlSyMe41isSBGDU+DcptkzmPZHqDbIErGwQBusMRxNgli8XFvSxtJt
youP6N/Qt9N7/RHWx1txGjwDTrw3q2pq2t+KVwke4SiXJNHgEZIq9YRkBxJC8PXg60fXNddenB7n
hJ4Od9LcNr6e5dBT5iyyQ4wdUx4I+G1ZOYsIlyJHqZyxXlRNzY2xwO5qtTimoEglrm4q9N04wiKB
A8oXGlFPCY6JvW3ydsuRkf4D11jiuHkv9EkbO2HiCFSNOunvy0Oh0NdK9rUdAKNGuQOh0OnTg9/6
bsX+3df/AFNBXXj9tR0Ne3D105a2q2o0tQrnXOueoHOvflr7aW4Oug4LVbTn6Y0HDz0I4OelqIq1
WoCrVavb0bejajxD0L+hbhHABoKFKbUr2puddtOCKsatRFMLcI9Ecftb0jR+olRXRRj4udPvWHCu
T8hyXqbNllLS3osdATe4cPEQVirx146xh2t9xJMtowxOMjSNDjxxrW4LG2NsEyd3Fu6xHJxQBDb+
njj9tSOG2p4DwChx29JnRA2bigrmYzEyIK3fc4wpJJ4VYqTZqUXaCKKDGw90xLI6OKHDz168FqvX
vqa56Wrrpz4ParaW1HF0ok2oCnljQ5efzhhErCGEHnR4cmfxrK5dmJsIqk5G3J7dkAo89HF6YcAp
uFVLHsjSmjRQTz51c1fRGKNOxeEm50VAA8hf6KOJpGxMNYVr3Ooq3ojrR09zpz0vytXvoNLcv6ds
f+3df/UBr2t6Hvrbl7Wq1vRtzNXr21HoW09zwWoaX9Ln6dtOeltRwWq3BbQaWq/1Nq6cIocHP0LV
z1toKA4AaB0DGgaFEA0y2GjDlarfWEaW+itx5WZBjJn5yzSNMxJe9FtLUEJoRmkVABGwoLcBTTLW
Kh7hHHTrIKWeUVt3MVasoL4Nklb8q3Fn9jbhED4/r7eqbfQc+DnerctTVtPah6Z1ZlQZG9YMAyfk
WRJUuVPM3caWR1KZ+QKZix4+lYjQltxyT2WrZ5keDg9/V5ac65aDlwnW9e2lq6aWr34HkSMSZoZY
o8qao4xGlG/FfllSl5D1Y2ry1JYhehawhP23o01MKOgGhNzokZagkbFlVKv6G3ToDm4/glpQqhmL
H6GDHeZsbFSBb30tr7Ua9rVajVqNe+nOuunSudAVbUDQ104D/Ttj/wBu6/8AqCutDpQ1GoHH1q1A
UdDpaup4TXtQ9EekaHSjXtqKtz4r8HLht6tuC+g9K/oHQ8I47cuEcHvqKtrbUUDQag1F6PMHlQsa
arUfStxew4xxngsKH0WduMWKmZmyTyM5NE6haVKVK7aWFWrD2/IynT4/IAdkkBO1fcNstR2wgNt8
60+NIK2+ORV0l/17c0n7lQ4Da048m6RiyfRW+j99baW9O3rW0PCPTPKsndcLGGT8kmep87KnOiqz
FNvy3RkZT6QJBiyUlWWBojt2SYJlYOvodat6tuC3I17cfXg9mZVD5vaXgzpJoYPHQOvXhFZDdkSO
XIBvKDQNysfcrIRXYTUa2GhApxamFChTaLASPAGp+1av6eHijOXN2vKwzY/RY2I8zQwJCpvXKjrf
nXuOvtpzrppaj047UdPcaHryrlVr1bU/0vY/9u6/+n7DpQ1tQFW4xwihVuVHQ8FtOevPW9Drry0F
X4LcVqPBbS9e+g4b0PRHB14r8A0HGNLera/EK60Bp71b0xQFHS1X4b13Vc0avV70RofqLVb0/f1m
IUblvKx1kZLSMzXonRedBaUUgrHw552xvjxqXBxocfau1JxoEA4CoNCraH9MVxuw6cDclVRLugFl
1tx8vqRwH6G3qWq1EcI4TwS5EMQyvkOLDWXvOZk0SSdRyqHdFgQbpl900ryv6kGV200QFbXP5IRw
e+pHo89LamrcJ0toNTXPQVapcv7ykuZHBhdgHLhOnLW1ZS90Mbdk1qkQsBEweOPk0INeGjFYauLg
rXbQUmvGi14hZyt+429SOR4ng3U5UGVC8UnaSfGbdPUII0xMJ5TFEsaW5WrlRvXtR4DXvyNdCTpa
r6dTQrkKNGrciaFEA8B4uVv6Vsf+3df/AFK9qFW4fb2rnwDS1X0FctRpbS/Fz4wNfb2B19uAV1oa
dfo76e9cvQt6Q1t6NtBV9euo1tQFWoCrVbS1AVbUX4bUBpfW/Bc1e1Xq9Amu6r1er0aOluIepare
uat6s00cCbju7zGSUmma9HS1A2pWJrGxpJnwNhRRHFHEtSi8cQ8e4IQV4zrkAJuykFOB+SYiq+5f
RW/o/tRq/BbX30FG54RXTilmSJc/f3vJLkTsY3HCCQYseSao8NFrKsKPDarVarcUM7RHbp4w6m40
PoctTrz058B0NW0NtLUbVfS19LGjyrKzI8dEmlmYbdj96qFHqHmN0xWhlgnBpQDXaooMKuDV1FN2
kOB3W0sTQVb+SBS/hBZgTe/0AJFQZoNOkZjcoGYEH0VVmJjWEEljhYBcqqoPc0K5aHgF6vpyrlR4
OXATR19jRr26jS/Hf+lbH/t3X/1Nb1audDj6jTpqOG+l/qffUa8q5WoaWHHYcYq3o2q1W1GlvQGg
4QK50eH3tQGluK2ttRXLQ621FAaX0vV7aXq9Xq/EKvXWrW0Gg06UOI68qHF7V7+vb0M3cIcVc7cJ
chnlJotfW1Ba7Ca27bZcp8Hb4cRNT0zgwzcexit6W6+OLcYW7ouCf/VtQU5ulvX9vpPfh9+G1D6H
30OvXQUeC1ZebBipm50ua8ePGlXADyxrUnZJTIVNAEmLCY0qhATasle4EUeC5q+l+OGZ4X2/dQ4V
gw0vrbgtrbU8FtBR6kGjoRrbnR6CuVSZuPEGzJJ5MfDjKJFFGKNDXnpauVW1trLDHMuXs7x0s0kZ
bIJryNQkYVHKTQmJaVlShCxUX7jLkAvNBGHkZz69uHHyngMkUGUrBkLKB6EGPJM0jRYo+6RsLAoA
Lp0q+nterVzo6+1dPStVjXOva5tRGnPXlVuE9Lf0rY/9u6/+pbgGgoanXob17jnVtRx++luE+qOG
2g15cYrlwjW3AKtwDUcXv6Iqx9M+hbgGl6vr04LHivV/TtwjUj6O31ZIA3DeVjGRkvIzyE62oITS
wE0sFYuEkkuHjQ48XDvCyeXb2Bh9LfDHHk4rs8FW1yiBBsdzLw29MfTE/V8uI1bT3oUaNHXeu6TI
ACAyKKJLVKCpBYlYpHCYLkxwxxVfmSauafmJYiKIq3rI7Idt3S1JKsi+r7nW3Bz4zRtpNkRxVNlZ
eVUGFMW5AVfS2vvXWjyocY6it029XQgg0ATShlKyQqRDJIRgzGpcjKjDyux+gFE8SOyMJ0yFkiaJ
u0NxYmA01ZOVHAqq0jYeAEAAGnOulWtpzo6HnVxpagOensNCNLcJJ09utdNfa1Hlpz1tpejXv/SN
j/27rf8Ac69qvbg68PLgAo8F+Ln6g4LVbQcQ9a3Bz4h6A1HDaraW47eoNLcdvXArl6w4wKtRHr25
+vb0bcFqkljiG6bhJIJZzcsSe29BDQjNLATSY5JxtkypjjbLixDccGKKfDkjkh4d4Ehi2dlMPpb/
AOYDbzKcXg3B+zF2BXKfQW9IfX24OWvvxHhFGraGsqdYIZJWmaaQ0G5iTtj/AMkrQ4qqAoFGieA0
RengQ1JEEB6+qCQdv3N4zj5Ec6kcVvWvXOueltbXqaWOBJM/Iao9snLxwpCoojXpw24uWltLcqky
IFrNxUeVsdlKwZKCDD8oXBhFCGMDxFK7Q4y9njkqfGmga3qBGNRbflShdnybSwPGSPQjyeUmN9p5
0QRrh+Hy5mfcRxtI2HgiIcPXS1WoivYc6Io6GjxWr3FqOljXsRp71blV+R4utHlXP+lbH/t3X/0/
QOnWhxi9tT6A9G+hGl69+Wvt6A4eeoGtvW9hXL1Rw35+hb1BxW4r8d+EUOL34BrblodLfUW4rUBx
yOI0y/kssci7gZxNN3CZeapeljpMe9Y215E9Y+wIKhxMaAabxhxzQ7LJAsOltc8EwbO4DHW2nLS2
u/xSvj7N2/iVbXdWZcT4/wBph47fScuO2nv9AfVPCfQtpv2Tz/8AhOSDGhahEXVFVKFGjRq+p0kc
IJJC59dTY4mY8D4memQnK3oX1tpbXpxXvRvTyRJWTuQMsGE+U0ODjQGjVtPejQo17adaOh4BRIBn
zlRn3FwpDSgbWZUj27xVHCUoADTlpbnyqSOOVX2vDcSbLGKnwZoSI5DUWDPJUeyFhHsmMtDacIUd
pwqfZcZqGyQio9rw4wmPCh0mxo5lysF4iykehBkSQlo4clSkinsa5XtrlaGF5XxMNIFPANOnFbTn
XSuettL100PW3D1octLUatwcjRrppc68v6Tsf+3dP/UoCraihfQniFDjOvXgtVtLUKtoNQKJ15Wo
cFzoBoR6l/QB471bX30HpD0R6/vQ0tQ0A0tR47cN+AejbTlV6Gprr6Vqt6tvTHBmbviYlSfJJnqf
dMmRZ3LmLIkQyTs9DvYw4zNWJseRLWNtWJj0ABw5MSSxYH4+JnCxHBOpaLCPizhzHo71Ej4ewsDi
cG+kjD+PhvxfqD9IdLfQ+/F7UdRXtqxsN1yC+Ziv3rJjlpUgVQ1gF6ij0NGjV6vQtTyBRJKXP0N6
xct8d8HcI5lBB4fbU6X0PoySxxLJuSsExcqeosaKJOE9dfarVy0vqdZc6GM5GdJ4o3klC4eROsOB
EirHGnqdaaMNQgjoKAKNCuuvUexrloyq4zNuuJIWRiONHdGgkEwybU/3CKFpGxMaOGPUa+1W4DQ0
5nXlwXr366Xo9Rr01Ohrpp1oC9AGutG1e/8AS9j/ANu6f+prarV01tercPK/Lh9qOhtfQa20tw24
fflQ4OWvvVuAa+/p+3DbitoKtxD6Ll6HvoBwC2t+C30/PTnVtb0R6luC3rHhtrLNHCm5/IJJKknd
jFe0jkkrRXniwvK+D8dZqgw8bHFctLcBANbouPj5eJMs0NW0tRH2z/4NxibvT0dzVWxPjzOYOD5A
wGLsqOuHrb6G3o+/0vP0LemenB76HSX/AF5//a25vvKgEimFyBz6UaNGrcyK5incKJJCxoUfooZ3
hbb9zWUBgRp7VbQ8PL0J9xPd4svPXGw/Ex51bnVqI4OtWFAac6tparUK9nnhjrI3Ltr8mSSoIJpH
h25xUePBF6B9T31IPGanw4phPjFGItwxxPI2JtgWhHGDLBHKJ9uYNiY7IgUAejbQ6dKvpaunFahp
7e2nTW3Byo305Cr68qPon+hbH/t3X/1NRpz19+dW4hoKPoe3ocqA4Pe3Dy9Xnx24L1fW3qCrcVtb
cFuG2h9G+luO9Xq9XPBf07VbjGnPhFXo0KteiPoPb6EaZu5YuEm571NmMQxCjuPJVtc4u3ZGSIPi
2Y74W242Cl78Y06VvcUJg2PNWeLTlrvCGPIw5DJBxDS1Zau2P8fssmvKvkV/BtcapicNqt9dbi99
ba29Y8VqPCKPTVgCu8RGPLxpfHKpEiMLVarXoirciKIq1EVJIqiSQufpo5Wiba9wEy0atwnXnxEh
RlbnjwoGycwx7TiIVVVX3tpz0Ne3pO8aCbcIkp9wyHdBIZIdtk70wcVK6VfgPTjt6Aq3PhtVqNSZ
cMdHcIO1ZjkCLGypFj21o2ytrapMeWM9j1HiZEhg2eQmHFhgB1NddTQHpGrX1tXXQ0avVtLE8Ptp
fgOnKiDoDp7CvehXX+kbH/t3X/1NANTodPbUcV9eo4uXH79OHpxX4BavfhHrddb1bnp7/wBDt6Qr
3vQNDU+iOK3p9KGoo6H6IcB47VbXcsiWCGdsnIm2r45JMd9hx4IY4wokBJ27bpMqXDxExYfUylDQ
7blPHmjS2grfYx2bTK0mPparcU690OxkLmcHyFkvggjFtx24Lehb6O30vvx++hGttAKtwGvkmPY1
gZNhYEFbG1GibU0iCmyohTZyVJlO4JJKqWLKyn1hR48GRo5oX749DVuG9HW1dKyNxhhppc7PfE21
Y3ACg9fa2ljXPQ104L6+/IVPmRxF91lYzRyvSbfmMY8BbgBRe9e1/Qvw31Io6nqOC1WqTIgjLbji
qq5MuQqxbhMsGJkRiKCOOvFHer0aKg0EjrkNTwW051bgvrbiHHYUdBp7nr1o69aAq9c70eenOrVb
Q9dfaj/R9j/27r/6nFb0BpbQ8ArrRFe9qAq1Hh56W0tVtD6PP6MV7+p76W1HHb0xxW0t6N/6EKvr
arUatVqt9CPStwvGkgTExozmZHghzMmSeYPW3YT5k2JiRYsenTgtrbgIrcWyYszHZmh4N4iR8fYZ
HZOG3A4um3+Rd1twfIOc+GoXH9Dr9T7ejb0CrChzJR11twW9EUdOuo0NDX5EoOKeuObPHNH2kBqb
kWdQZZTaV2J0Ao0L0fp8LGVo8T/Tx34cnMgx0kzcrLrH2dVpI0jXW1e1uAaW1tVuU+TDAJN3uGma
ZI4lmSPaYiseJDDXteva1W0HKrUKtRtrbgtR161biZ1SmzcZa/OMpDbjKYcTJWo8LHShjwLQ6URw
nXlVqPAfR529qtXKiOdq9xXXW1Wrnpahyr2vXvV6PQ0ToK96HSjer17mjVuDlVr/ANH2P/buv/qU
OdDTlr76ihoNRqOevua68Y19vUtrbU6Cuv1QFW9C9cuIaDS/GD6RH1NvoBpavc/QW47a29CSSONc
7fceJc3c8zJKyBa8ju2yYgig9ffIch4dkdvx9bVmqrQbQxGSOC1W4COQ8ce9jppat8uc7Hv4fQtw
nWWaOES7yGP5ErsOQ9T24fb08GJHkmEfZgwRJJMiMk23dqW9G2tqtoRragOH3reMdpsZwVdAbuhW
PHmmWmdiGkenDk5ETV2kaX0F7y4rxxgXJBB+k28/ZjraOjodDQ4J8qCAT5+RltjbWWkjijjFc9La
jS3KrW4ACalkjhD7vjKGyJ8sLHMZ4sLOJx8CCChyq9HQ9eepq3BbmeD347USFp8vHSm3Fe45OY7p
iZjPHtsSsMXGU9KFGr6+1DS1e1W4R0q2nvpbg51blqdL6ngNe5q3K9dKsdLCjzrnrarURaumhq3O
iaI1trYerb6rY/8Abuv/AKlAaGhwjQcHSutXoUaHD1068N65aWoVbQVfgtpy05eiPSGltbcNq5eq
db630HoHgHHf0RxDjHBb1RwigL1bQ+qOEae/qbrimeKbCy4ysGSTibRkZLYnxrHhdUVB6+dAZ4Nm
8WNl8jwSC6Rlo90W3b6OZePeFv267n433SAARenbgkcRpmymWgpU4s0SSghhVvo7aW44IWmfJtgY
23bl+askghjxM4ZMbOqKeZ4x9E69y7xinHycaaOIvm90kWRA4sCJuRDsaaRhUsTGimsEiRGWZ5ni
w4ciEggspB+hK8lW5wEsYx9h9CaaKBZ92llqDbJpGggigXl6fPT2YhRJumGhl3OSZpHlajGckx7V
G1RQRQir+re2lqtxG2hIFSZMEdS7miLJm5bV+NmTEbYpZcWBa6AX19vbTpxmuVdNb1ehoeK/Farc
+WnLU8qNc6tauVWrnR0uKJq1ChVhp70demtuC1X9M/X7H/t3Uf8A2dqHTTnQ1tpbTlwewHFz9K2t
uC3K1e2nIUOvr2+ktVqtVrUBp7Wq1W4BwDXlwmvfT3+ktwW4hry4RoK5VYaW4BqKOhr39O3DbS3r
FVNeNB6duO1MoYZqY+HukEiyxamtwRotwhbui9HeVK50XOPQ9MoSSbygsno24crNXHqTPnyUtE1G
OFiygSY+ZIgj3SUTSbpAtRyxyL6I+hxJ0hXc95yMjD+KxSybkoQL8gVcWZYpI4nXtr34bej09Pft
tmyFIIOkWTNHUWVHOHUIAtd1qmQHW2kcrxORFnxlSjMpU+ui128o1AO3rdwPt9rcMkscS5O7Fqjw
MvJeHFghHGeLpUufixGfdg7GPcJ07seMxQ5UpxdtSJQFXQ9ND1051fhI1FuLnXQGaFafPjJO4zvS
Jk5Crtd1iwoEUKFHDerV7+geD2vXLS9e2ltPfTkPQtryr3Og6W59OEi2ltOtCrVarV7V0o0bVy0t
XLQ6W0tyoiuWt6Olvrtj/wBu6/8Ap1yqxq3COlDi50L8I05mrUaHCNPc39K3B7XPrW9McFtbej04
LVbQeieAcY4hxX4LUBqeO3GNLcNqGgoUTRNH1hwAUOG/Dbgt9N8ihJh2XKOTja9K36PtO3uz43o7
8iiSA3h0b9KKj70OlW9SSRIlyJTPk5WWYVjmkYo/OTt7h290z2q8jhDKgizcsGPcMmsfMSYW45Mm
COhnwEggjS2htwW5XHBnufHNGHj2bPh2/Ig3DFmj+Q/8zLGVjukrhz9U6K65XxzFkOVivBJQBJiQ
IHkmelnkUGWRzHh5Uxkw3iIxiquwJ0R2jYGPcEZWiZ0Kn1VW9RrTmgbHbY/uHCzKq5G8IrR4uZnt
jYUWOfVmyYYayNziSNHz53TbWQPkxRIuPn5hgwgjcN6NW0NdKPAeL3NPkQRmXNgjjfNyCJBuMrLt
zuybZjoyoqC+l+eltBpar6niPqW4OdEWHWra9NLGjp7W0HAetq9+dWvRq1DS3LS9Hr10tpajoevu
a617Wr2r2r2A5CwNW/oOx/7d0H/2ZocA0toNbUNL6DX30663oW1tz4Aav6N/Xtpyrrpbh969tQPQ
5ehbjGo0Gp9bnqOA8QOh9A8FvQ97VbjNqPpW5D6Pnx29e2ubHJLj7E+VDk8G9xBsbY2vj1HDJKZM
WaMcXyEH8fBbuxNH5JhFn3i3qEVJIkSZOS+QwktWRlIail7jA4vkpYTTARf/AARUkglyDGz5LtUU
sjMJLU24SKFzpmA3CZQu5TgJuyV+445E+a7jzq5IHbhBhja3AM87PM25SIuFnz+VtzFT7hO5aXJB
xszKjqDJjm0yn75rgU2Ok8/xzbY8HbtxxY8zE2TYpHgytsiEX1T5Ma1k7hMylJpEXaY6TbUWkwDQ
wyoGHAalTDiSLcY4FyMlWkyWkkNqtqjsjI8W4pLG8Dunb6ircolclBNyvOTbU5V70zKoyt1giCw7
hnnG2+CAWq2nP0XZI1bc8Va/Py5j+3ZTyRYsMEU24wxusOVmyQbZjw1yA05ae1W1NHTnrbhd0QNn
wKZdxBkifLlaPByQUwYFRMaGKrmr+l019qtqT6Ptr04LVbg6VfTpV+O/P306aDpbQ0AaPI24TpbU
0SKvy0PARbUira+w/oWx/wC3dP8A09BwDS19BXXgOgoehb0BqPX58PtXPXp6luMaji56X1txjhOt
qt6F6vV6HpjjtqOvFah09C1HTpqfoLUOG2h4bfVEdy5MC4m7ROjx656d+N8VhxmXJwkCYVlx79wd
bPY8O+rfC2h+7C0l/wBe1L3bvoWUcJIAl3DHjpt3kajn5Zo7nOCm7z1PlyZFS5YSjmyFvxmmqDHm
V1UrU03+KQ8+4hvL2gMgVVSVjeJSWamkQUjK7IpYTCVEWVmAi7wcealhkUxi9IEC08kaCfP+1MiQ
gkMBD96jtJkcDxtRiSll7Ujn7WO4YlpJo3kJF5ZGiyvjnyCF8fd9zMWJtOZFl4OdmxYsH5mOa/Lx
zSSRv9IdcqTsjvChfMw0qITSJKJ4UErTrm7lmxStuOW1fkztTSvePJWwMcg7LVLARXbRFdtWpWZG
gaHcYZoZMSWSO3pAXpEqwALVjxeV2iWPJ289uuVuUWPTNn7g+LtWPCoHaLenNn40LZO5FmTHy8yK
DbxGx8UYyd0x40HmzBBtuNGgVVHBbS3By0PACK92kjBmy4oaXNfvMuRI4wJZYY8C1NDE1AAej7Hg
Gp0PBa9dOL2r209/R61y9P309/fX39+elr1Y62ojS1dNOle2nv79dOtW0Nda6aW/oex/7d0P/wBn
XPXppblV9LcfOhqfQ66DW2lvR9gLVYVbUa+2vTjHDk5MWOk2/TM0O/G+PmwTrVqtVtBVvS9+VW15
+oatQFe4Ho2q2o9frVtbesKHBbQ0forfXWq3ofI8eEJs2SmRiazr3Q/G5WTNIumPdEZlUZ0SKASN
VZVBsTvCB8HYGJwdMjlDsQB3CpmKw95YxZmRFWPuEchLKBk7pFEJ9wlnbvUV3lafvauxKSNLjHcr
kYaucPG7pVxIYkjkJd2Co8wdh9zqD2tKUqXIeU4vijgMgFGGFqVYbBsdS2QLtK7HH8homxQsCWnj
CyA15WFCeQ05Zi8guBIxXtSpJwA2QTWOCqCQEsCQY3JRewHoHEKDmJFvl/HtsxIklijmj2PZjBkZ
mFj5kZP4+TJIgUZLhsHI88Os2fFFLHIki8PO/pZwuu4ZDLLg7VHMqqqruqs2PIcrDaLLw9wTL2yb
HN9VZlMOX3ULWlgBpo2o3FXGiSPG0E8G5RTwS4cjoPQHOkWkWwdufdW0RdxkPdm4aknI3DGgpsjM
3BsfZ4omA7Rpz4+tSyxwo+8RARHOynTaiGWNFUmwl3KGOpTmZkmJgBH1tRtwijrbQ1amYKpz8UGT
cgZY5c2ZUwsl1O24zCPHjhq5q96I0HrWrpXt76W4Odra24TbW3FercrcjpbQ34PajxX510oddLc6
Bo6E0elGuWhrrQ56HlRrlbXnpa/Aa510/oOx/wC3dB/9mKtXv6Z51ajoOIUeC2oq3B7n1jwDS3AK
tQ4OtSyLGm4ZjZEvkWlIJjnkiO3bv30rBhQ0HrW4hpy4uv8AUAKFAc7aAVbQ0dRXXg9tB6A9K1W9
K3oW9LcY2kxfj0uSkulqYArhyPj727y2kkhnQi9Z+a+LuPDmr3Yux50EEMe4YsjVmEDG+OAnJoqG
WSLwSyycmyhd9wkI/wAspgjWmWNKIDCeCd6jwnJjjhiXHzFhYySNSIKkbyGNPHWRM0h7aBFKOVgw
/EgkBZIFBaRj9pLKKEgse812SAqzLRkD1j5ECq7gRlCjPBJ45Ix3qpFcqaSEUjIaHfLUmDKtHJLB
boVsyaNypVBVTWUxWf4zviRrm7okcPx3do5IMzccTFinJnlBbseJGbByGxiu445qKeKdazYyMqJ5
ceSGVJk4/bpU24Hu/dcoy4eW8raZOQuPFFm5GbW8pKg2PdVkSTJx4huO7QhJ5mmccjh7t4lyNugy
keN420BtUTCREmlB7mepCwN4zXZGaMdKHRsbJizYszAbHYi3Gi0i0xpjQuTgRrjYEbh8sfkvWLs6
ikREW1GuXGWUGfco42yMnKyDHtTypBiRwVewFZeWuODkZs7xbWy0iqi8NqF6NddPbVmValzceIPu
pUSZWXK8e35M6Rbbamx4GoKFFzbnwXrqdTzr2o1ajxHS1HQVa451ah0oc+EjQdK9q5cNq9+G3rdK
PAa6aWr3ItXt7nS+h0FjwnoKPBfnqNTwcvqtj/27pb9z0FX4rae3GNRXuNOY168fvp7DS1CrUOLn
wW4raW4DTyxxjdtwjeN70qlj4jbtZaVyDte6oFSRJF9E69aHGKtwjQcFqtbgPCOAevareuOG1Aan
ht6ltfYf0Zl7lkjTD3eNldNc9WTcIbeNsowZz73iKmduH5O4wt3xO6oJN2gjYbtEafdBbcMuQwYm
QUKS+Q4m5wiHO3fEaH42Y1frpukMb48kJNfiLSwItEAAKCJjJfuACZPapdnIEZqXEmCxvkqZ7qnh
7g0UopkYBpUDJIDQy1ussb1btDyJJUbIhfx2SBQALUBVqKA00IrtZaRpVp5pr4OQYEkRpnfEmYtg
ZHf+C4SKEpUs6xrGrhZkj8/KjPGhjdXFGo+YjHaMoXk2Ha8QJNjwyxYPxzGOfLgYcqZ0D4uSXaSo
p+0K6uJJAY9szEhkVldc+EvGcu1Yua0Mn7nDY7k1Hc2FLuUZqOeKSjpkzLDC8+W8JRij40kh2lIY
Xde05O5wRnEx8rKkgxMfHXNxEyIsrEnwZtvTEyWyPj2I6ZeLLjPSqWOK8mMTJg7gmZt82MSLaKxU
l++o8l1PdFIHhjpowKIYVdqDMpws+LITPwHxJGjV1160qUiXpmADG9dvKBFWTN3QzptmC00sUaxJ
qeEkCpM3GRP3PLkZMDLnePa8RGACiuVPl4yVJnvkPBtyzRw48MA0GvvXtVtTyqXMxojLvEAB3HNk
AwtwlaHZ4RUWDixG9tPb24Dry0twHi6Aj0BodLcItRr2tQ4r0evERp7acqPXgvR4CK5V7Wojgtwe
3ty0HQ1auVHS1G+pq1WrnqL0f6Dsf+3dB/8AZ2Ggr34/bW3P34ra8q5aDhsfR5+n09MaZEywxZuY
88pyGryXpSCQi0VIoiokLPtmS0b+1GmYKMzf5kmxvkQJgyY519uO1Wq2o9MW1tQ9IegNPf17cI1F
DhNHjtz1vrz/AKXbX5HBGh2rI/IxNd/XtXBORk4EkOVh5G3bdCkW97Jiz422Td2NnZTeLxRR13hq
EzCpH71xowchYlUoy1lggbf3KIdxljB3bKIlzZJSRXdclbhu1KfKNgJHoQV4Frw0Ee6gR1KyFmla
k507ogzM1HBY3jVWAY3iZloSo6IqXZL0sJuFoLpbQiitG4H6Se1UhmFCZAA4szIonyVoysRtolKM
uOGmiRi8a9yqykURTBrqLVl3B+K7uGGRJ4Ydl3dHmysyCCHJnfKychD2QwTNIqOp5ORC1QT5OPR3
OUpJCGJglWh5hS+Yg/4j0r8gIcbd7VPuS9ryM5RrnImKCKbKJSUVNvWSJcLao5WAAFP+mQHLfIwh
iuM+RJcjN/cIFjDOYFiSSUkxQteDPCrk7XHMjoyNQJFH7qjapGIruJpVuACKI5WIODnpImft0mIz
KJBovVVvXIBzelW5YAVJJyw8V55MLEXHj4LaWNT5UEByd0UHsz82GDa4VqLHhh4J8uKCp9wnyxDt
00jCKNK96voNLcBIFHJgUTbxClPumVKfxdymEezKKhwMaIgAcR4L8Bq1W4Bw+/F7Vz9Y1fQ1bTpq
demp1PAeM3rpp7kCra8tLanoRoa66+5GljQGh69aNGhR50atfjI+s2P/AG7p/wCnpbW2vOudX15X
q2nPg6Vc6j0hwircQ1GvP1b1vmT2o7cFzXcaDmkmKuuWFkwsuPJj03jJ8GMxLGsfOyIDg7+DUU8c
y+ga9voRw2+jv61uEUOC+h+ht/QbelumO2RifG5pbGwDZmOrLkQvW9IzYvxmRZMHfWjA27PgaHfN
zxsTC2XLjSDKylaIZnKKUuCUtJKFKS9mRFOslM6XzEtFtw/xtKBTTpeN3ctZqBUvLO4A75CkIFAV
er0DRvUneTCht2LSisjH81NAI3deYFykRWu8Cu1SImVaR7UtzpbUjUrRivUkJYJjqKktG3bO1KUi
hgyXdp1hheKcFbvdZRG7ZaMEYMLVal5PlIEyMkV8dxoRCCLHZMSXe2RLb/CuHuJlNBWZInkSTwiV
jDlRAvMKMlqgsS47T4x2rD5iSiSNNG7SsyG8TkFojLlFCmeOw5ZYrP2LI02XSRLHHhTtDUE4mNEX
E0n4GXn5eJkiTCEryY8sDcwS8uQFxhGJJwKZmY42ZPjP5cLc0y8GbGagbG4vEhmp/sYSsAJjQbuq
xNFSKwdxsNx2845Nm0Xqn6Wah9x/QJJCTBC0r7bgrBHp7nQsopt1xw0k+dNKNrdzFjxRxgW1NS5U
EKSbpkSiDa55Tj40eOL0aOpoX1aWJRJu+KhfdpZi8e75Aj2YPUO24sNKirV+A3q9XrnoeC+h568q
vXTgvpfnXU6W4eVXrnRtQOlvVGt6PTg5ae9Hi9r621vwXo8HOr6c6BoHmb635Guel6JNc6ubex1N
W161aj9fsf8At3P/ANPgtwc9By0HBYanh6UNRqedW5W9O2g0t6vPX3pum8Td+Rx2NXasLOnxThz+
eKvkE5aTUG1bfukmM2JmR5KcXtoPStw3+n5+kPUFAVarUBR1PpD0rfS29S3E6hlMsG3bxl7lPk1G
Z+6Bg6ySqYNj3Tcopc3bN8yMnFw1y53wMMx79gft2Yo7l8MdKAtSRsaDWqS7y4mI5SSI3zEnjixY
Z2jXBlcjDiSiQqmQgyyB659yiwq1dtdldtWrsFWtoTajKii+Ow3CNSVjdCTK5KlTHc0VJpSwMUwY
CgdbaDloaPOr9tItZEhjUmZ6ZJEqbCkyKxcbwxeVpZCv3TR9yw3GprKs8eT+jYNzy8fcFF1iORi7
o6ns3aWafcGx1aljdEco5P8AicZTqv5IZZchQEdbGTuH2WM8a08kTFWjBMiuV8TUphJ3DFLImCEi
7ozFjxSS1DCkSzuqxrkd1YuW+JUGTDkJW55GIIpYj34235pmm24ywZu3S45x5fE+Tl+TgUlTh7sC
uVtSuhUq3bEwSR4mlkEhoCgDQLCixsxN9tzfIM3EONMy2Ki5B7V6lF7RNJSIXbaNuCqABqxCiXcc
VEG5ZmQYtryneLbcWFr8HWp82OAz5uTmrDtbyyR4uPFpzrpR1tXeoEm6YiNJvDuWbdsik2V3EO2Q
xUIYVOh4ufo216a3rpV9SOAg0a9tLVfi9qPLTrpbS3EeAVzrpwW4DqeDpw8rWGpGtqtwnQjg617+
2htodLWq1tff+h7H/t3P/wBPW3EeE+java1cqvwcuG1X4BXv6VvRHBkN2xZj983EvUVYUOu05Sky
P2puUxmyeHDzZcaTA3GLKT0RoKt6/X07fU2q3AKFWq1WoiiLUdLVara29Aetb6S2luC2uRuMULSb
zN5Du09t4aWSXFJkiUQoEnIaRrp8YZY9yAvTTR4u5DIxnX5LuC5WehBAIonuA7mEyLOskb48oyJR
Qdwc3K8mPhyBccyyGvuFSZKrXdKzgMTHGQAvK1AUBR0tVtLippCtTShqSRjRcRsZu6i5NBzSy8zk
Cg7PUTg0vIVfS/FavdzEjLKrMWVaJJSaZyFme8SSSMJcaCpMrEcMoFWNMLUJSYpxeL47LCrgmh4j
OWYV8oEQznypGAV2r7bzwOXRWjKhGDfj1LkRoPLTHuUR12CvEpooy0pBrt7qRwVyM4qRCzPEGCd6
pUuSJCnYgkmPiimXuyN1k7MHbMx3GPDaXAgevx5Vg/MfGnnwIshZInRuHEzp8VwuDuqZeDPivooB
b9umKmCdSVlFdzCi16VmVvvzIcvHaKolpjcxpepXAU3dtp27yMiKi8qnyoMZZ91Z0igzcyPF2yOI
pDFFwsVUPuWIobOypym0lzBBHjrw3FSbhixtPut3efdJ449olmqHaMdAmNBF6vvXudOvCdLDg9uM
86vxW4hV666e/vRq3Dy0Poc+K9cteunIcQ5V7npbh9tPbhAoaX5+/Kr1bTnr01Nq9hzrl9fsf+3c
/wD0zQ5j24b6+w4vfT29Hlehw3q/AOnBbSw+g3N+3HmU8YoEHTmTHI8RfPynRvuY8uLGypMd8f5I
wrG3XFyACCPW6+ja3Fbitp7+nfgv6QrnoDwnS3Hbit9QfWtVuBRWfIPyF7npXYLlwtJFtcos0ZYx
w9hbpFmzbfnYmTkZOJPh5OO+24MMMO9bVj5mJEAgFOyRnz3CSuXz1BeIhkyonBdJUXGaZYkN4ndS
0axEhRQQCu2rURrehpfXOYBSD3FrMSxq1AizIQlzQJFRyMpgYvS3AuKuNOWttL1ekowiVwkcQaW6
h2YuqTJ2K1GSyRI4lkiVwQIxcEHpexkF41jzpGwopVg3Xy7e2O0s8PyHZ5omJjQrzXsXyI/ZJmxC
0c0Kwl7UYQ4WJrBKAsNXjF3MUSSO2bMuL4VSZAWygRLIxEYYVCpRi0agtFkPj4qY7QbmgVZEfX5B
h+OWLJkx2WbF3JcvBmx2I4UdkbE3WOZM3Z2VSpBqDLlhK5+PIGnxKfKxwJJS5aN0O22TH3L7q/Sq
Lcs3YHcudtwWnkU4+HHk7qiU/wC45scO0RlIoI4F4S6rWXuSxP35+4x422uJAiA6WrkKnzMeAz7z
AAuZuGSuPhZTum0opOLjGgLCr/Q9dDoRRo1eudCuunPS1X4rcYvp7e2ttDr7e3DfjB4BqeE8F+G1
HW2grpRGho8h0r26Ua68XtR0NDp10NHoOA6X+s2P/bun/paWq2go1avfUcXOr1a3B1ocFqtVtLVa
rcteelzR9MaW4zeh00ysb8hczapgZMWVKKEVYigrGiCNQaBoWrkKZuQ6egsjocHfZYaxtzxskAg+
kNbcuG3EeG2ttbcNuC1W1HCBw20tVqFctBpe+tqtVqtwW0tXT+i2qxoKTXYKkaGJMrdZZK2rLmnG
mfmHETJ3HImpTeiyxqMmP8WKRikJePLFXvRrJVUy8Y98G5di4uFnQ+Lfd1gw8OE91CpIS9RRlAUi
FZjI1RKSuTjzdkg7VWOQrFlLFEZMckQwuI0ZKA0J0vV6vper1fTJMKifJmkq1EiwruvTygwoVuJl
J74bwsopQSAldgrtWigrttp91+40G5XFE8hYCUPIzk3zlVcBckdsszdysTIzFgw8kMAlZwoJtylW
hzXacqLEkxp45480FoUYFd/c/tYxWJNlpXLUUMjKQ8b4yEFHR44yjBa7RRUV2120FrIyIogqT5JW
PsRe4VJGGowMtBJAQpu6lUUvKyJCDIoYvGVOPO8bwbmppXVxuGMuTj5MTQyglTi7qQMnbkkVlKng
VTW35WZA2TtUWcmRizYzxx+R4fjU0ir8WitLtG1YrHAjbIzofHNjKFhzudWLGwRZZCxx4C7RLlrX
7ZPMIsLHjjAAGh051LLHCku82o4m4ZLQ7VirGkcca0ekk8Uatu+MKG6ZkjPhblK67LHUWFjxRqio
NCPQtwHTrpbjtrz9Ei1CrUdeddfWFGrcN65UL11rlp7WNWr34bcNufPgvQ1twdeC1tLamjfg66jr
ejXtoRyFda5171flwdSdOVdPq9j/ANu6f+lb1hQr39ufGNLej7+odRoNRwW0twcrPBC4basJjJsm
FJWRsIAysKSBipFY+M8zNt/aGxHFGJhRUim6+nHNJE2F8hMSwbzhTBJopNRwW+gPFbh9vp7Vbivw
2q3BbUa+/wBeKtXKgAasLkgGSSOMZ2ecmQMDWypZOtZGVDjJl5zZUnNypCVlGRaR3EkLGWsuMK8U
vlhXlpuS/Z8UzN1yI9223cMk4WIuRNJjY8ibxt6YG4Ch0uAHLO2REwhwkAjWd4GzhC7pjvRwEdHE
UJhEVxVzV78XLQ6dwtl27CXvzNLcHkxLDTpQcA+QXxZscLjZCMAL6WGlqIq1GhVqteu2itW7SsqP
FLgnHkzE744gfJJ3tUE7GpM+NGB5U4NozyWATzfFceTH25udba8j0yhhmWgy5Ig4WN0YS8/ADkO3
dVjQ6A01ChRsBlZtjj4DFuxQOwUUFWtXWrVIUQGKYFCbSvHGcfI+78lWpogWKG0GdLjum6xyLvcZ
dhpj5c2M18LckycSXHfTDiWXIx9o2+NUjjQEXGThwZCZuw5ENYm55eC8vyKWYrLtxaf5HhJFLM00
uIftzeqKFE8tQQtK227XGsYUKOLJzIcYz7rPkVFtk2VJDtmJCRTSIlS7phxmTeJnJTds1YNm7jBt
2PAwRQbfQ20PFz4LcqOttBXTitVtDpYHT30OnXj56+/Dz0POueh0twHgOltD1rnVvRtoNLjU6m1g
OA89PejeraXFdTpeva3Dz0vyrloTf6rY/wDbudv3LhHWrVarVb0hyq1ctbaDiGnPhtwHgtqOegoa
34rcuO1SY0Mom2LElpNgWNjtrgHbpCJNse2ThNGHW7eqCRUeXkRHG+QZcVYW+wZFI6OLUPUFcqtp
biGo9M8Xv6I476jht6FuAesPQztyEZ2iaaXJ4QL0qFgvhUvKAJdwxIxLvWOBmZ02UGYCoyXrD3KP
Hgl3iZhOzSsUdiE5STolRkdskEcixBUSa8sOCwKx9zNes5bxfCzGu2Mb1+QkGcrIy/J/yTuIoUyd
wixWFZULLBiFY4u6KdsteyZpLBn7adkkrxBSOdX0eRVrzXrzLSsGq5q+lhVq7aZCzDbo6eFVctcs
LUbULXNAEkLzx4XLRhI6SRWFWocqJq9EiiKNDQ0L0wrtNBPtKWiQ9yxyjuNwo/WzxyBUChhyQ2Pf
4sr47vU+RmyXdWyv2zI8kk0O7bblYcqR841bvAEjmR+6OMNAGIK3sKF6tUjpErz5GW+PhxQrVqtR
pmAoOCGyUtFBCKkmPcMpXU9kgRFRDKhoNQmkBa3cEJrKd5EIsbaAkHH3BXXI237WBBx5DHLhTebH
0nyYMcZG5M8+bgxZMDYGQGG25hP7VlAR7bGSmPHBU4vJPJ20ql22jbuQAUUdb1LumPG2RlZWRJFt
HkWHHhxUk3DEjD72CDPu85Gz5ElRbThoEijiH0Ht78Io+lb0ulc6HU8dtPbQcRo2o0elqtR4OXEN
OWttenoHgtXvRo8Boam1hpautC9dK973q+luA9dOVW5GulEVa9X1OpsKNW5fU7H/ALd0/wDS+iPC
NbfSc/Q66jjHHbW3BuOA2UuXtuRCWxplBglAIt6wYiotwyoqxPkeRGcLcoctfTHFb6AfRDgvwA/0
cVu+5fjqrE1sbRK1+bSRqzTQrT7lhpUm9oph3mGQS7riR1Nvc7NPkzzMWYUwnYqjgCJKICgMCv5P
3DuIeaOOmlc0mLKZVlHc3R1MaYsTvHjgw5gqwvkL3RfGM/Mx84uzDIwhjyhQF+SQxvtaG6g0z9gD
yuZEJTAyFjqWKFDkMTlZMoYZGGhx0iLNGGUXFAipZSolldiJiaSQsVmaN0YMoodbVa1GrUQTTJ4j
+MZGeN0IHPuAW9xZRUGLHMI9sC1LCFdABSmi3LuvXdQN6NX07jpc0ZUBXJDOzxqGWBYzbxRdhMOO
WOXMFP8AjlpYUvajQ5SZY5fHxi4+YtiN4hXN3Ycx8lZBt9+6lDxnxhnIftiUFT5O4aCsjMjhRI8j
NaKKOIXq9Cr0edTRK6hpVAn7As4VnZSzEXEzxtBkdx7Aa8amiCtXDiaZYo5Q+QmVjGE34MTOmxma
HE3FZseWB9l3eFEkyIYo8zdu5Y9vnzVggjgivTsiLmbrhwJn5bzy7YhbJkuXyZArMxdtpwDNLGix
rpepJEiR94iIi/Ny5Th4OLLPvEMZfcc/II27cZ6j2XGQpi40Z56n0Dxj0r8FtOXBfgNCw4jfht6p
4rcFjaud9L6W1FW051bQ8XOr6e9dOHlXtoatwdKOgq9GidLUaHKutXNW56Wq1qGp1tVtPbW16v8A
U7H/ALdz/wDS19h6Yr309tPfivr7cqGg4rcXPQegOP21twDiKqaMMTVNiwTLl/HVasna8rHogj18
HOkxJf5HhpDJ8rlJxvlCscfKiyE+qtwW47cdvUvoOEVb6D29vUZuxJgMibwTBU7kD5WTeaeaWk85
AMoLuFAcABgzFudiakjRWXJjuzLdB3DtDFoXhouyMpkciJQwRi88oWl78iVvsUMqNJmSytll0ngk
71pxddqyRhbtBLFPHusoAwMnIc/IcDccqCMMhWnABMSSjs7UxkUzMn+GdCs4Qg4pBw3FyikVzpRz
yFHZJ0Iku2S4rGj8xjRlFqtYgirir1c0XtRlSQJEq1lRoEkYM17UrEV3Gkb7jueQiHJndoMpgIZl
lUk3XtI+1QSoPctNKgozrbzLY5JFNlPRJJUNV5FKqZoS7JHCX7wjFexbsO6kUqPanFnnF4Yvy2mx
IBBBu2HEYcGKUQfI9rLQyjmtiHx3ipC7qBMCFtVquAMrOIOPgsXUBRXWrazTpDTK0zSM3cTajYhC
LkRgdq2jQq8bGxAFSzxqD5pqWGJXJRhuKWj4UkeNoNwgykzNrlgCZkxrbGxDBT7hirX7lnZdbT8d
yMyLddr2/B2uQ3ba+TyPYzymR8HDeeTExkx4tMrMhxVy98DKMlZax5po53xtznlGyqWTb8ND00Ol
+E+n7eh78Fjw21toeethXt00NCulda6egQa6VyPDblqeXpXr2058XPQ8I06cdtL8HO+lufv7m9C9
X161y1NuG+ntw2rrV656X1vV+A/TbH/t3P8A9K2gFH0Dr7cd6vr78VhQq2hvwddbcA4B6QFW9ca2
0eNJBuWxrJU+NLA/0ANq2Tc4Mal3bAKx7hhykEEX9Lrw3+r9umntw24BVuAcF/q9xy444p0CuqlU
blQDM3jKUuQIWkl7mRVsD3Hte5iftMkgJhlkP40NHE7icdrrCyLLIzqCVkEUjUFAEynxJj4sjGWL
HlXmZRcxgqNzxy6be4aLqAoAnHZmbHAkO3TwpLHtiqkLtet7WNN1W9BWDM7CoxK4x0vk5LHtsWyD
GSfyewG9KQKvQNi4D1NEVYtanLNWPGyMjEL3U8iLUWSkrkqoRlerCsmTnE3a6N3LmsxR8aQC1C1z
yKg3aO6VGjO2LiSqPFLQhNeKvBHXgjopFbtio+EAzxLRyozX5IuFdh2G36VeW1d5JBqJe5u1UYcx
7EVMCKFnjwMtMMbdnQZsGWrvBBIrpuzsuACVKWNB+byi5nHkUmpJEjEk82W2NhxwrpNK8defkkvd
V7VPkWVIHepOccrLGO0EgAUO0hLlRe6syn8uNYx+XOEw4UNPHcIWBnRWiIseGGIyyYEONHBuWxxu
Y5svAmwc58zI2/B2HDbL2+KTIyd4nxM/5FvEs2Ga24hRmEiDHheWTbMBceIlRWVvGJjHI3XJzGXb
M2eSLYirRbXhRP2oDwngtwcuE8Hv10twngOpPBfgGvvwnpfjOh9C3CfT58Io1fUi1Wq40I4bcteW
ltDQr3sKPAaPDauhtpfQ100NHhF9Dy066EcBq1+C9H6bY/8Abuf/AKdzwDh9uXp2q3K3og8Xvr7c
FuA0P6Dlbdj5In+MKxydgy4Q8EkZt6167jSyupxd6zIWwt0x8iObdsKEpveIxTOxnpZEbUcNvQ68
FuC9Xq/ocqtVuC3DbS2lv6Dbg3SR0xgiRmeNTGhJoliQCpknSSBYkdiImmliZ6SJaSTtHcxrvBDP
yu5r76Beg7KHW6xSi80yoqkyDJBkiTHXGfHx4sqSXuZnH+ONhjJM804xC0WT5UBWRWO5fbL8d3nt
TIaQw4Uk0czMTXyXblx51qV+0s7AdwYLZMgulyAcnyGyogFiwIqwoWorymUsssAWu6ICOKMUHS3k
FSY8krrhurNiSmo8Z0HhBr8aOhjxUI1FFBWXFCUZedqCm6dsdO0sgOLJbFx2iJkmCq8/d5mrzNQk
NGWi1qL13i1lYdq28S3WQRwRKLEm4xS1fj9hRajiZwEnpGJFGphcQ/oXHSef4xhDCwe9RWMlp7WG
+Y6RbiUKjyGiQwaFmqWdMdVjmzKhSKNT0DaZyHtRyDGXYnIaYQY6xCmF6yIX72CdpFgCbnuqNwgD
SMkWMiiPmpvVjQPNx2kSFxm433VbUAmsaZcZZGyZxt8N5s/bYMlM7FXFeDLngbGmfIjyMKOOTLyJ
ciY9cZrHMF4cCWHEqffcsuMTds8Y2wY6rHiY0Wh4OtW9A+lbj5cXOvaudtLegaNX1OvQam2h0Fe/
CatRHOvf2HTQ62099OdHU8vRtxHW2h0FHjFG9c9OWhtXsNOuhtp1o306DgPU1z4edDmbVarae+pr
3+k2P/buf/pae/Fajy4Rrz1vbQ11099bUNR659K/pXoevbQi9T4GNOMr42jVk7VlY5KEVb1gaxYG
8cp8dflOCu4uKTd3FRb5ODDvxak3hDS7rjELm4z0s0bVcHg3DeJEnQbpCuNOJo9BqPRFddfbU+kP
pB9Ju+QGpP8AIYnVTkDwys58qNzdrrJJ2LjqqUV5FmFDpdmoIBVtbVYirtXhUvLEr1KwiiAkkRo7
ooWFZEdqwAfJlvJM0RAiymCz3jdEQCtzUlPiIRsixrMdYcuN0lT5bK5jQ3HaDQFxIFhAI/IMMZpF
/wCbNdSHjlqMlBLMZCykkI1G9iFqfxFVjHejxAeeGvNHQnWvMlCRDXdRcUciNa/JWpMzsV9znNM8
2RS4kl1xIwDDCCFjFXAHlkrukarTUUJoRxLREdEotBwQZJDS95LIpoCNa+2vtAMwFY790YN2gljV
lniaeCMuBJJZ5kZ7uzg8qYXqI2aRvHkfH95nl3dVuCzpNevkuGkT5POiCo7wDNlGosfud3HjxZAz
9nLtIIIqSNZF7I45VVmeEWkI0954zIkkXiLCrBSGalKBUQmsfvpRy7RXbTLTcw6NRYBZ48ZgmJK4
MRBiw5JGG2yq2LDg4sRYdkWQtZG8iFfyMXMqbZHc3ycORd1jkilljZS6AY0g8qxrMDtuTkLDtWKk
SIqLp14enpW19vS9/ora8q5UeH3Olq51y05ae2ho6c696NX1vwW09tSdbUatqOO54L6ng9/QvpbT
lwcqtpyoivbgOnsK9+enWud6GpvV6NG9uPrrf6DY/wDbuf8A6XCOE6dKvw+1uXsRoNeXDz1HDbX3
tr04rcVqt9B762q1W1tVuAUyI4ydnw8gZXxuRan2/JhJUj1FF2E6JFkymSTS9A0GIoZEoqPMySf3
KWMrvEYr9/dDD8olFRfIVkWTcc9lwJp8KbM+RZ7pt3yIY9Y2+4GQqZmK9KQ3HbU1bQcHXitVuIcZ
1HpWoeoeLNyBjwSO15Z3Rsed2fKa8QPNP1usxqPFlu1kqCUzSMkhESOxtbUdKFWN7V212julQNQc
GnLtQuGDC/eFMTUQQdxxVeLAkV4rqgywJMfbcmaBcGaebFzsRjUKqsW8QRzbfCbqpvTLyLI4mUJN
c2jHdkurd8cbFm+weV6N2IrlY2JHYD5FFCUX8gNPOgrzigZXIx5hXl8dSy0MuAFsi4JBr/BYSxLX
5S0csmlmju2SgZcwmkkZjLP41bMc0Mg0chjXmppWIE3LzsKMzE+UmuRoMVCXsql37XjH5bAtkWOC
7PNiHtXtlNJGimZIzjxKyUDemocpcjmkGTjQRYMhmxT0WUSJuGImbjhGEkrxpQ/IyHiwkR1iAqVo
4YwqgrLag6miFtPkhEEE04FrwKoosbk11piAmbNHLViQBQVSQvMMtQSkGN71y0IvTJU00MQyMppR
HA7omOGWKDGCHJENI0+RXiMjXQByHaTGzJDjK+NIdxa4m/Kjy8HskGDlmotnzpG274bOVSBocS1q
trbiPBb6g+ieA0Bw8+DnXXgvVxRGttbcz1r3J09tOVGhz4L6WOttTw20twijqbcfXhtodPbQdKNq
5114DRtXOulDqatpavYG9c6PI17i5NuVuZr20tpz1NW5EfTbH/t3P/0jXsOHnQ4/YcHU2q1c9APU
txe3CL+gNBwW0HDy+mFPFG4ydkwpxuG0TYjelGLvNIV4r6KzKSSTqCRUeZPHSbvkCk3sGpYsfLWf
HeEiR1rF3vOxqxflqmsXdcPKUSIa5VbS2lqtVqtQ9G3AOK2gHr2+k3XLEk7yPJK8EdoUjSpm7q8b
Exk93etM16dh24zBDO7Osd7aXq1AUKA1ArLMjzdviQSELloMdRPCQ0UquAVUGZHye6SPBESTqkPd
IpMW3ntO3ZEWTiZsiRY+3ZUjjdocnIxFVopVN6tTIDWQrKwL2jZllMjEdy3U95SELSSxyPKBG32G
u6xdpDTSZAAlyFCTSMUjiNCWFSci1FnkrxyCpMZpSMJBQwoiBgxCvwkLDBhpsDHuNvgr9uio7clj
twr8AWO3ivwUNfty3O3x2/Ajr8OIUMNKOEhr8OJQ0eMKMSsAJA08c0dbWo7SYmbMhdZMeGSVsXHA
rKmjRJeyMRWKySdkKsWIHI9JeTeMFVxEym2bczg42RlNJFizxwxdvevyPAx9tS0mQ8ECRRzR9tFr
iS0iJ2qOdXIpsp/JBCsRDo47e2WNPtKkVcWmz442mlyHMkZirsLVJH2DpUbACMI5jjZaiBFA0Kln
hiEm4TTFduncx4UUbdlqMKkLLDEng8y4KtjNJLOCskckcPlv+JM4hxIajHjSXNjRTkZkwim3GaL4
+2fHUcue7Na2WFGRwHQ/V+2ttLcB9K3FYa2rnxnnVuLlrbi6V7anhOvOr6ctD106VfgHQ+hbiNdO
G2ltTy0OhF6sa6A1z099LacqvXt78zpyr3NW0NDT20t9Jsf+3c//AEuC1hXPQcY4Rpz4zQ4vf34r
aDgHpj1x9HLDHMu5fHzeXHlha3oY8Ts0+GwQxyCiCPXx8h4HMUeVjTRlH0SWRDj71kxVF8lkFSfJ
WdDueZ24O8ZUQ3H5C7tt2SZ4OEfR2q1H17cYq3HNkQwCbeW7p87MepxJ44ECBjFGsJckf7O+MY6o
WKxgUVFEqVWO1dgNBaKnSw0FGr0DV68i3YL5pZCygt2siZ2FLgTY7gq2E7C8dnohnMsORBNjxixH
KA+PM+PYohwp4Fnj2wBQwIrfOwbshFA1as23bhrAYYscyzNCFHiqOA+MspqcFSxaQqQKLii9wTcE
glSFry2JdSA7gpkGlylJEitpyNXWrqaAFdimuxRXiW/YKCkV21YGipuQb2erSVdquauaaNGVNvxw
zYqws+IXJSTx+C1SRMaCig8KiRpuzyxskWIJExVkiMsy2jkV9DUo5IbjIBjkxU3FMtDHIm97qsOV
uHyJQmZsqx408F0iDyCS4EzNE8ci0WPkWeVFkkbIEaoK56FFJSUIs+5xKpXKyqxMaC0+OJFmxjJD
HhCISyLciu2kjcCCQlmaONZdyiWvyMzJaDbgaRERTRUUachiiREu+YtS46Opx28YjVaTsSpJo0H5
TGo0mmqHFSNZbJDj7g0B+P8AYcB1JEkM/ia/da3o9PQ9tOXEdOWtq6cHKjp7cPL1zpercPKia9te
Woq969jXtwG1Dgtpava/K/Adbj0jQ05a+2p9G/oe/B14DV7V1r2q1Gr6kcvajbTrR6X05/TbH/t3
PluXoe/CNRqBw+3H1+ptxW9K3FbjtVqPoZGDjZAy/jSmsjZM6FWUqaAJMO3gpKqKQxFGeQhM2ZAN
wkIGehr8nFau7b2pY9tr8XbWo7diGjtKGm2WWm2rLFNg5S00Uq1Yjh2XJrd8fsfiDMKgzpEb8ePJ
TA3DbsOPH3fDyKXIhIjlSUO9iOnoc+C3CBVqtR+uzNz7TJkcy16YkURcXQPJKWKsgUvSx81WulWr
lQFAUBzNEVarC16aRBXmrytV2auw121K/YI+9ihZTI08tAJHAhbtQeONi3fnKxhwZO+D2yP8ebsG
5gCQt2YizHLY3HyXbhi5EZoGma1ZZV0x3UY+CQHOMZqVrSNKAI3U1Iq+O4Fd3OzUAwokgdxosTX3
3KtQuKFjQANPGRUbTJSTk1dTRUEdvNVaiDQANd1qaSvML9wNBGYCM38dq7RRUV2g1MpgUQ8lkJjQ
ySKvalI0VwY2Y+KQEwmmmjNHJZaxYjIY076zfKyY2NNE1lGhFSC4gN1kQPH+XPjw7REJduzo33h/
jKlcjO2bKyaliYPGGjMcLEOizqYJEmfwI/a60H7QslDIYBciMjIzFQePKyajxIoj3XEaJGwkU1ZT
UkSOuZHGlX5K1JKIyFnkKqzmDbTSRpFQNwZFWmz8dQ2dPIjRyT0iyiT8OPyK4USFfFLkSBpctQyS
ZU5TBIKRJE40yzaCPa1yINkwpsbHeZopZMiFFkbvc0fQ5+qdDRq1dPRNW9G3o2q2po1araHT31tx
89TVuRFWr3OnKr8FvQHCRrbXrpbiHBa44TXPQdLcJ1Ghq2h1Ne1HT3IrlparHUDS1q5muVtef0mx
/wC3c/8A0tOnD76jr76ew0HHfhGg0tpbQehah6I9Af0K1ZE+Ki7xLgSSgEm/ZRZm4+dXNdxrvahK
4PnkuubMtDcp6Tcp6O7MobcMd6DbfJX4eI9NtktNgZK1jJPDNmypNA6lW49szWirJjWUgywyDcCt
bduZlPkR3oVegRxCrUOMVbW1W4BrarehbjtwbhuPc1yCbCgzu0kJDSSqCpjYsqmQRqR2oKsLcgOp
tzpQSQtctCwAacCmlZq51caCu4ikflepO4vJk9rYswlV0YAOASKk7jSKBTRCRdvfskrcV7X+NeNs
oKay5Ugyw4ZfkOA2bhRcqFdvdU8KFIvIkWDa+PLHHLPjWnnEkTiRYxEWmX8OBklSSBw0jHvICs70
7TpS5IAXKjr8mKhJEaHjNMY79wpHZ6KstNI1xPJWKZXaSVRTuSIwyBJy1MykAoxEURplBidJrKHs
TzK0lgXlWVu7tLxFaUfcUjenxe9hhyWj2zOlobfMoMMMAi8IkSbHUNOoH5qSLJmJ3Ady3q9P0iNm
XnWRKmNk7bBl/IIo4sbCg+Ox4qlhz3cpLuMiMYlluAoC5OU0ojx+WavkDYuQtEci5v3PWPN4jHmQ
NXapHaRRvSveg9689qz54nS5oISIsiJEjwZpTEIYA+bjKTnSyUBnzAbeCY4sZDE0jOkCLIakma8m
WqK+ZkygYeTIMPEio4rwUa7bt0oVnNaPEy5Uy8HPgkhVEyjlYcLR6ngtx21tofRP0IvqeG+h0Onv
XThtahXO50HEaueC+p4edvY634rURXt6RtVtQOWgr34DodPbg56Gug4La+50tVxQ16UeY4LcuLlX
TS9cqP0Wx/7dz/8ASrlwCrcHLgtoKHBb1OnAfRt6duG3Bz9DlwW1t6d9L1es7LOLDL8j3FjJuu6S
0yTylMCdz+DJAjG5471fgtVqtXMUSdbkUs8i0ufOKXcpKbPVqWbFNKm3sTiYDV+2QtTbVJdtsyBT
YOQKaCZajil74XJp3+9u2opnU4u4Z+Im37zHkg5sVfmxsY5F7Qb8I4hwW5W1tVqt9LbS1bruoABY
pZrwweVpPIpK5LU8E/fCgRSqoSftHU9LUOVXrtoch3VaiKme9GrGgKAtRvXt2k0oNNKqmSdVEzjv
h3NoY8PK/IDL2gEAM9RkO0nbUoEGUDcbkndDszO0gBtukAMeFKssOVG6wdrpKpotaonVBD98OB29
3dCi4+UpTJwTKuTt5JxIpYl8lSyBmx2C00Ku5aNKgmUB5cZl/wAL02BDLWRj5EDlnAEtCS9Y8na8
ZDnOURyLKymCxx3xVdUg8Rm/yIscMUSj7FiUsEsJ8gho8kgfkKQ8wpCXbkA72IidwFmVgpamMvd4
JOyIspTIaM+aQspiZvAHM7PBJ4sqcDAahtvNYyAI3cygICO6rMkk+f2HIxZ0fBzN620zbjm5EWFu
ubtc5+T7hkkNYSCKSXLGJHGXldsVYIwGvRmjiORno0kWBPNQ26NVyMN4x2irEUk0sdfn5FhnSV+W
Ec5pFPPLIRJGBI4alPJXAKvnZC/thcJhY8ZJjSvP5BCJpDFjpETT5EMdHcGkIx8uUYeDCpyCkeSs
kfbPOYISXUXDAfqnj8cwrMF6gjwJpMfHhgiJ7DuOb4IlYMOevtR4CNDpz4Trbh9vS5aWq3rHgvwX
Gl+AVfQ620HGdPb0LVyrpQq3Byrl6B4bcHvy051fgHF1050aFhwEc9Lc9Ouh09tOvBfU1Ya9K66H
6XY/9u53/cqHThGttRwc6FCrcFq66c+O3AKtwHTnxdatw+3HbW2g4OXpX1HBarUNXjSQHbMAlcDD
UDExhQRFr5C9oPc+mdb+jzruYV5Gry15hS5FqXLcUM6UV+dLQzTaCRZJMnlOWJpD9z7lC8Ckd0ab
h5IspomgnmtFJ3jTrxW1Gg1tx24Lern5n4qPn59od7yAc7eXkSzGrIil3ZoQY6Y0pBqbtiSN7gqW
JqxoD7SL1agKtXsOpqWW1daC3LDmRYDmbLYISVS1MptkFgZGNOTQra1u7sAO52K47MIIbCJSWzhd
cWXyRZa98OzuyNGWKZ8bPjbQq9jEmvlUax7hEe5StwscndhxyO0WO5gSUSK2RaODzCmnmZn8QrvS
wKAl27S6tXa4cELFeoJEJkCAyyzzFIkKti48crtC6LiQoJcspSOZAm2Y71dY17jUjS3K9oci0YZm
jgWIrJapGikpoypJlpYZ5agxxGrkK+QzFo3mFCSUV5ZTSmS4Y0yqW8TLRU1HDIzHHkCgq0uNtM2b
NP8AEp1jIkheGMNXYFrIVnfZtvxMbE3aSXe1xtuXEl3t7T7ZHLlLJjRFcHGytxmk+ObhiPkv2RSy
sAsMkr4yQxJkx4hkbKkJ8Ms1RpCEkkDVH3yFiqU+PDLUkTxtcKC6lhE7UIFWm8YpitLEz0MSvxe0
wIqqoS5n8a/mNKmNFLII8VIafJhjDbj3UI83ICbbCoCLGwYGjdTKizNkIsGQrXJZLLLE1NUsglIr
N51t+Nkw5eFLI2MrXrdVV8TFnEaXB4+foW4Lelb0fb07aAeh04DbW2hvVqFDQ8R16UeD2PLS9Cud
HnXSveufFejR4T019+G1ctOmg4b11097cuA6dKtodL110sQQNDVq9uuhr20OtqJ5HTl9JgbfJlNt
uAkeVudv3LqOVCrcAvp78XPj6egOMaDgvwCjXPhv6PL1R9R8jb7NbejbWwrtoi30AYisJv8ALlG8
1A2oOTXx3CxVxyqsuTsMc8m4xTY9bduUuPHjZUeSntz9EUKHLgOl6v8ASbxG3kmLKXkCBUZynKaU
IxWlFqsKPKpGuIUK0BVqtXQAUKJoEGiKBqSaiSSBpzqxNdtdnO3bS9xroNwlL07NSozsm0tWHiLj
AROahju3YqGUdwWRpBK0RTBkKlh3JgN2T7XuMGXFlQyyQYEc8E5518qxjHkowABNHyqYnKy7YC2N
lJcLHlOjySrSTKlGUkyZAJaQuEiyGVMEXkitX3UEApiiLaSY+QRM8tWSZJUaKvMWop3VF2pEuUDU
J7y0coZEYLkzhHx5BIQtqJFSQrIsGOY2khLUIIFruFRQyqrw91bfgNkyN8RYxZWDkY02PGDQhhKS
wyRv8c2uKSOfFxpYtyxTiZmAqkkdoyIEMnxlVXAZuW8rG244zCMSSxlsvKihGzZEu8tJPgbbjyZX
dmZZGXl7eo/CiUpL8dEf4LlFXcstXzcHau5Nw/FWGCLIy8hviO5DDTFgjfKHcVhS7pF2rlQoPOBG
czH7GGXkBkVGEgFf52plACwlmTFRaMLCh3AAm4uAsgkSNSqB8aFTuJBvuE1RbbAGjghjoq1yxo9R
S867I/JlhZRHaIWk7zAgcm9KbPWUQrYe6mebGyoJIfGJjueO2MZ5lSbAmEsB9I+seH2rl9IdBwnU
6W1PKiaHAKOhNA689OZ4zevblpfS1e9Ghw2o8PPS/H14DoNOml+G9Xq+lqOo0uKJvQ1Ol6NGr306
VzNe9tDy4OenvodOle/rYOI+VNH4MGDbM/v3XeMrIi3hN0IKbhC1RzxuARQq2nXg9xqKBoactbfQ
c9RXX6YVy+pPoe+nyRuWgo9OMUfpsX/ZKbyaDrt2XMMfG3KExQZOPkDsSs7a/wAs5G3zYEO37u8U
WHuEOX6AFChVqHBbivQ9G3FNCk8efDNHIIOaj7nvE5JYqtBbUOpqwq/OgKtR6A1ejehV+Uklc7gc
wKAoigbACgCaCi/SpQ5WbCj8bhRW1YrLO4L5C8z5E8eO4M+YO2WSUsqyPFJB4WhmHjy15hf8ed8Y
SM5JPLNlWCb8rH7PkuamUwuVXp7zKBk4MjriQxwQtk5T5BxoYI1zjAphii/GSKNwp7I0xsws6sGS
GVqZGQp2LTCJmkyFQEsxaWUVBLcMPuaGNiMXmFYhMJmdccIJpI46xYZc2X+HuY8jCkw5oVEjCBQm
RE6Ps23nOyhtmD4982lcTIxIFErXWPJi7m+KosYLm3yPsbLjHjZpIzTAOdj3LHhGRnYsCZmQuXk/
mxQTtucZbyA1Hv8Ak7Rlv82kygsuWJ8jLycbI/dMpzj4wkabHmSbHadVz5XD+PKdtuG4Yj7luGWp
27dt32dt0+UZu5xYGRBDkPuszyDDdxt2PiJBnTpDiRIJWaCLtkYwN42cRFpYQ8haPHyyfw2YLgIK
y0hhjPnasfALpIhRgKsaDhgjOwQJCPybVfIkCY8YZXEVLOhoWI7rFZGUh1auxTRiIq7CkZlBDkMx
BRCQxlbIIvTcnArMW4i2uFZttwI44Fstb+8xhyEjIxZ5cSWDMimFtOtc6P05t6dtBpz4ba24Dr1o
6c658FqNr634jXsPSOluLrRoDT24joODpXvpbnbW2gFGveranS+g0vVuegr35cB0ty5W0vfg6cBB
q2h1udOfGfTxNxGJHnbxkZDfH5XOTusCPuUuAjB9rao4smN1mmUjLZaO5WMebC9LLGa5GrGhxXFc
6HHbS/B01HARVtBwmvbhvoOC1W1uPpRwe+g1txHp8jb7tBR9AUfpscfc36tBUeRIo2qHOzag27Mh
aLcMuHITesRpAVdcrDjyIjsmTjDB3ibGmwt4x8xuROtqFcqvV6vXdV+C30k08cCZU75M7SXItjxd
5YpHyAFE2peZ7aPKgt6C2Oh0JpmryKKaRiDQU0Fq1AWFr0oFAakUxAqWdpgsCpRulRj7+6xkmuMB
y8uXJ3TSTgrFFFPJKkqrkwEJjuHizB2ZWLmzYWRDkySwZr5AmxdujkXefj8LwQ9FN9M0lJoMhY8Y
5YtDlAB882h8uTM9kjaR2pUIpHeOrGl7u0wSMHxjfHw5cmU/EsxYpllx5MZI52bDxhFPE5kgxmkd
okWshJEfCJYlV7MjGVn+LxrHKHIHylFeXEjeKQyx9rEGtmzIcTJ70K77mw5E0uSmMZdwhRFlBE+5
5W3yfzmWYNlSTNmyTCl3LMyHTIBGaPNUceW0gmYDJjOQIMQYxWV6eMShIYkpyiiSR52x8ExDtamZ
RTyKKxkly874/taSVJiY8qbg2Lh5ap+5icL3R7cxEISFdzLyRbN8i3GIzb1kblLhsklGZVSeZJDh
Yy58W9fEsL9veCRJcFdy7pMkQ1+Tk5Jx8GKxi5iBI48pC1Rv9ykq6yordmXkA4yoUCLRoEUTVzdG
ZSWk8jSNfzhWE1grKwI5IRTFbAt3yShURUlUEuZeVKbjM/1xyzR5ezTzzQdvPfsiOHbkkMrmMOip
2HFyXvTuqAMGGlrHhPBz4OfpHS3EPT6eidDwW0tRv6o510NcuPnRr30vXv76dNOmttDp71y0PpG/
Fzq2vvodDrbX3o6HlQ4B1q+vWjRvqaOp0tVqtVtDypnUV5krypXkWu9a7hVxVxVxV6SMsJlday8R
4q+O/wC7c/8A0eenjWjCtPjgDLXtaEuWc5UbR7hkIYt2ev3RO2bdMg1FuuQtfvTik3okpmxtFBkS
zTDoODloOnDbgGo4ff68a+/EOK3H04PkZ/yaCj6Ao/TYxtTddBS9fjue80ZW1CGMHM2QZEmfFm4Z
i3nxQYuZDlRtjY8iv8dlSeHLzMPNh3zHllBB9K/oGrVbjyMmLGTK3TJc7buICTb3jpTb7OS27Zdp
p5ch3cCJ3SEsGYwx6dKakQ37a7RVraij1JsHk5s5J7gKuTQWhralFKK6UdHmRTK7SHuIAZhTOqgy
StUs+Qo/InNbdC6Q+Qhux8k4BGMciWVyEdo8ByDuS/btpR5+243QIMXAnjmh3TKhwsSCXvK1ETW5
zwTNi4rZC7Z8YfMGd8cmwov1nGw1Uz46iiEvsWzwTRZezYE8WVG2NkYY7m7iEyQUl+Lqnb3m3yOG
M5UCeJpZ4kRpATFPHE8qRFZHjdvNHj1PnokSTB6bPnwSfniGPOzMrLaZ5fx23fNmEM1lyiJY1beW
EfcizoZki2xI3DMtOe4CGFaFqsK7GrxSGvA9CB6/FvQxFoYsYoQR14lFZEsOOFTIzmggihWRlAkv
XIVkSnt27DGNDtW84mHLvHynGgimlny8naviOBDW+tjSSGCikigkUEUHxkFPzcSaKe8WXmLLLsmd
t7Ye/wC8wY2HiJDFkDMnnrJ2xVG3/Fs7ITN2PM26siSNVlyuf+RposDJkZNuijaJPx6KYsePEUnG
TGceTzGxkalkBPcDVzXebEimBYd3bWJMCO8SNJ2qXkN45mFPaWgtqA5TD7YSTHki8MW4wwL8bzhJ
EwBGbtGPkK8Cxy+U27yaJVlxs9ohJNNKgeWGWCeOdNeep9Hnraunp24Dp7cfOr+oeG1W4Dwe+h09
rVbj50dOupq2hq2h9P396tR0vwGjV+RGvtz0twDU1yrrRoaXo62uTXOjQFWFEXo6ihpbgtr0151h
7RmZcebA8MhidqjxBQw4CDgx0NvFHbWpdrlYNt060cPIFGCdD5XjGRmI8U2a8sPx0/59zz1Xdky4
mpZY2oEHSQ/bmC5+O4yTZOZDEZjgwMf2rFen+PRMuRszwl4QhZO5jC14MZ2OPHDG2du2PC0G548g
Rgwvy70J4QOD30GvOh09XlqOEfQDitVtBQ5H1d5z5MSDJzJ8ltBR9AU30orH/Seuoax2ncThnDz1
yIwQdCoNZmBHlrk7MYMfEydywI8DfYcmRTjS1NsKvPK2fj5a79GrrNGw9bHg8xkwlC2JowTBbcAr
JzYMZZtwlypZJnereSPvySYw1nKrUSSTSO4Qk0V7mXkL1fkq6Xq+hOjyKgMxJ7mJboVue21KugFA
V20FoAWFHo0qCpJmaieQBJ7gis96kfuqGNUhy4JHXC26zyS3oym8OTHEihuxoXQEc/8ATl5ih4MR
icfb/lskCZ2/yblmPJNbLXPy3RfHkpUAULvGKmLNt2bkCXb+1cSazLOieaJwTI6ijiljsOTEcWWW
OJM1osnLORDjvk5UcUayhqj3SXbWk+cYJjkzpcuTMeTx5G7Zs9RSG2SpljC7mwjWUCWOSRI9qIZI
nUMoIGJjK1kFfYaVY67CSFeux6MMhr8Zq/Go4wpsWalxHFLGooIldq2tajzPDk7isbY+C0jEBQDU
lqkNMoAwofNkTZkMInnOVNDtqRLBEMidZJAtqJq4qyGmgQ1uGRNhZMG6QTKL5TzYQkjxP+CcjLly
5MF8SRlYKO6E5U++bThrufy2fOxcba5swpsmIqjCMTr5Cx7mLF5KRHMcjOlbjjCREhkV5olkjcdr
hiCJGFealmU6kA1jOVkaxIjWniQggIY2QCQKDMLjFb7HF0g29cxfjUUEbW5SrJ4t0xMuGdEIA7kJ
lUgsXEbg1NKqrg7gqojq6100OpHFbhP1Xt6NtTwW4R0Iq1HQ6niHADofX6a3rnr7nTnwW4ufBa3A
T6HKrXPO9Xrlr79NOoOh51ajoRbQmj166k1ehRIFGZBRyUrB3DAg2fdc3y5ccgkTW5oOwpZ5lORk
5NQbhJIDkvaUmQy40hp1K18dH+ffnb98jbIpZMtaXcJkMe7MKbdAyzSGRfjckcTzSK82ncRTDuqb
Ciep1hgqORVYZGLeTIi8cRWSV7I2D52XNyMqCLCzWWeKeKQXArkfQHrD6TnxWq3DaraW9G3GNDW/
w5MrMjIdBR9AUfpRUJtFw4QIO0tFMkcaRgCrayRJKk2w4JSDad0xJ8ff8gZUe6YMzzbDizTTYudB
lLvksUiZeO54RxRY/dHFeIbhuceJFsWZHnPNnYkT5IUT0KzNyhxjLu08hMl2WGAOAKtau3mzhajh
eUvLHixzTBCWRhGhJ7SKsaUXq9Xq4ruq5rmadyKagOfvYmgoFEXIW1BbkJQUVa1Che7MEEmQSDzo
EWY0CoLML3eRjh5dwZO0KFpu5g0hSg4lfH7oVeeK2Zn2qGQNHnAgPIGhwT92OFTKTbst1SJwYPj0
uUN5+Oy4Bja4x8xFM4OTPt+0w45x/kzbW2d8yGWWklWV86XFeTdpMyVJQBPJOC0+85jRiS2RjyZC
RbKyOIXQNGXVcDGBSOJaKx12R10oBzXZJXgc1+O1fj2HgvQgBBx1oRwgCKGuxBVhXKjRrKzpIjjZ
P5CkVarURVtDV+dAaM6oJsubKbFwI4AeVE0KlPIi5y5FVfzJEhx8abJeDDjgOfMWMEQjjAtodcjJ
THjmklmkB7TjzwyY2TnhWx8WfMrwxBZsbFRv3ULUks0ku0iNX3HKxgsLARtkp27fH+4bhDjQRJ8j
22BAmJ9+UskMaszAxsY4MXwxzu0bNhwzPJtkaq2PIr421mVnxdugKY8L1NgR9r4cqiLEDiXyQEZI
NRT95iBInScyKwaN+mObS1N5wfj2TkGSIymO9x8nyInnCgKRenUqwbsqbyeWOSTuQXOLkmFgQw9I
etbQDT3+gtwX0twnit6B0FH0LHjtVz6HLh5UdbcHvrbXkaudOmtuHmdTw9a96Iqxq1699eg60dOn
BblqORsKZ0WnyoxTZjU87Me80HJo7pMccyKyYbCjxMOTRskm34zZiQpi/jPh4U8GZhRxybFCq5G/
/wDvJI6HH3mWKot62uakxvjuWH+LY8gl+MbpGG2/d8Y/l5URTdXFJut6TcoTWbnOJMHJjmi3PHDr
dqGJI9N3qcOEmTKx1CYcWQy5i5MUfx74/t+djZ+yZ+1I8uW6jMeMQ7ghjO5Rd0WbFIbiwIr31vxW
4beteuvpW05/SngtXtTqrLvtvyNBR9AUfpo7iDhxZO19snTyxNdeK1x+JjXk2BzkST7rjZY+QwiS
bbtuypMjZ8yPJO+5UGRFumFK3GKxsR56EAWJioG+boVfbcrccaRzkSVhb20axSxzJky+GBomcKXe
iioPyUBOWSWYsO5jUcYIftUyOLhfM6woAFC17dtAVehegtWAo8qLUALOADalWgKI5khQ09qOUGrH
lDije5dUDZJp5iB+ShoM0hYiOu7upInkC4Ks0O2KjN2CpDI4PaKklQM0bzGIxYyZuV5FXIyJKTAy
HMESxrifG8SXCxNkgyMGJTFlbfipNuhBFbmkaiIq8e9yRx7Zgs7puisHwtrPiSOYJLhrOsW14sVD
FgscaEmOCIAxA0IZa8bCipsIgT4Df8da8CivCteFRRQV2iggrImEC4udNJNRHIURyvZZckGmwslm
infFMc0cihgdMmWSNYJ++N0gaWDsiblXtRFDpTEAE81NBqyMmKBFjydwaGCOBDTVamPassqIGyZZ
WI+7HwmlMSKivII0wEM866Wq1Szxx1JkzSVkwSgRbajJOuHYYZVUWKQPukSCbPyJhFhz5Aj25QrR
2pV7KykedMB8uFjLy2nefBu0O64Dxb7uQzXhsokVJI9t2qXLmk+LER5KPBJJ/mVZShGQCvfeo1dg
0YAHfHQbIDCZ1REftnZnQwyrWHjtGsYKQNIxPYwVhegOzKossGXh7wryYuVDkQ9quN92jGlxgtha
1SLcdzGu0NGIMaOJVADKCMFu7H9vQ5/V24LekOO3CKtyFteVjoRVtLaDTnwHQ1bTnxXrpp7adOHp
pyo6WrloRVtOvENb24rHh6aHnVuXt789Dca2FW4D00lkWONtwNxuQpc+KnzogG3BqOY7BpHavuoh
q50qKakRlpcaZoYQ5aOB0kvccNqZLtFMcVZMpneLdMrHORmTZDfH5Xaff/8A3dbkVHl5MRx/ke6w
VD80yhQ+VbZPQm+K5dH49suQJvieQtS7Fu8Z7M/HL50rBJY74ebjJUY2jIrOi26BDKXfZsUHGkhj
lrFkOIMmWXKeTCV6j27HUTbeoMuNN3fg5MbNJlsoyXUY24MEbdV7otwRz3LQZDoPQFWqx4hR4ef9
CvwjVv074b5WgpvQWm6/Sg2x+FDZsKYJLhSrJEOG3BYVPsuHkS5mx54yZt/3DClTO2PJZthlbIj3
LOxszG33CyZrjUa4BVsbIusfbevlu3SSZUsEeOxNXrHnmgkz8jv218vx1FMztLIC3bGlY2F5mnm7
DHPG0LremAjBVWpDGtNIEjXI8jIbUTQ51araM4FMxalvYk2FjXKhQWu21Si9SyMQjdzYzlVM8QLT
M1O5u83LsaQDxR0j99OFFJiySGVY1pc61MzWklUCXNUUi5E5bEfuaGVonfIjqLFlyTFjLGTDapCF
fb8hMjFyJ02veJsiOfMad4sw/LMSDHffptzyoZd3iG8bLve4Usp2rJSVMrLysvG/HjkVwi8uwUFA
q5rnawqwoirUuhNHUiiK9syyq7rjNO0pjXLkighmEkeTmtHXlmyEgx1MUuLMhngDAQQhnRtvMuS8
ziSaWQrG5mZlKw5r0cnPiqLdOcU0coPQdOlOeTMkYbKkapNyyFbvmMkW52qKdJQamniipMvHYTbg
0jfjG804AxMJFUSq58qBs+UsuLCIoRRYKJs+JKMuXkFo4MejuuHHGMzyt2SyMuZjQiZMrKo+ECPA
yJzBt8EFWAoGjY0Y1IMdqMZrsFS7csjwusa5uXLDlY+bHkDJnCx/FJ4XwgxFbvJHk7tFiogz8AAx
7blSmLHaOmYqTKWIkLU6uQJwlCRXol7hkBDwippr1zNQxF5LG0/JgbjNW9Ym1xpLskUbqLht6TIk
wlYMDTCg3ae9TUa9zipyUj24N+Pqatp7Vb6i3re3BfjvQo630seIVaumnSva1dKvodL8XTg6621G
l9TXPXp6dr6HoNLa24CNLVbT2o3Ne99LVyq1CrVe1cqZlWvLHSsgfPymWR5neiaJtQJNGJwI8d5K
Tb2AOCpr8KIiHa456z9tgxEIF1y/8GBnpHisoR1OQKikDi3CeVNOsdTZ/crzEny8xIpX48V8+/2/
feVWFWq1W4VkdTHuWdFUXybdohH8wzRS/I9rmpG+I5NL8f2HIqT4jlLW44mft7bZiy52VhbRDh7d
HYpqOB8aKQpg4yVNgROH2yZ2/a3jp0m7UM4KZ0sKtueQGx9xWVnyoEqPJgkpUJoR0sPfS4EhoYSK
xihjqXJgVGcsevqjgHp3+n5VIbJvJ/5Wgo+gKP0vsSBBxRSMp2HMVowb0PSvT4uNI2V8ZwciWTA+
QYsx+RSQyfg7LI8Me+bc+3fJosmSKaGYa4MQSCRe5Yc/e2zvlEWesSYs8OHE/elzV6O4SviGBGKo
EpYRLWRiFaCSFZ5XhqJ8mUhUiqVkkiV4AY2jtPkmSopFUxZDM4sQLVeiwAM16sTXdZu6uZrtNKnM
RiuQo9ZVHbIO5hH2FppGMEHYHnC12yMe1UVmuLqoWWUhMdlCSFaSVS8cYkeRCq/jwsI1itLJyWWR
REjTt/Gs90mw5MQwLZJO6Oj4zLDPJAN7yMzJK4bQ4mP2GPD2tcvKTZoErbsfsZlNfN2gbNQC8lgu
Ce6JByAq1AVarVaraXIruru0tVqNEgCTKhSsrMK0GTKgjOZDSYqmMxNCyztGn5PdjtO4AbIsZ8oE
SJNTKyxxYqIHhQ0ccGkjCHlYEimTHmp8SXHrHy1mAFE2DNYTFQDJPlNBixwVypoYXo4vjqPOjAy8
xZmjw/ukeDGQmfLZYUMs8qRq2djqYpEeTFByMqfKjgUbjK9Lj5WRUeJDHTPFGN1yYlkyXSSsCS0s
pxw0LGJk26eV4cTHgF+BiAEmjcirCu0GmjACrJ2yQo9InjGWrzxbNl7hgyN8i3aYbbmJk0JgFx1O
dmwxRRR/JcbH/HhwzO8u2rGhCseyy+IOWgUI8mVCEyw1CUMqlZVvJdxJQLGpxdYT3R5Cd8Uxy3Hx
yXIFc6dv8ckU+NMpuCLh47jx/dYofIwYvettyGIq1W4TqfTtrbg68XLW1W09qt6duI1ahyq+vPgO
nPUjQ0KOptRGh1OluL3NcjVqtpYUTp1Ho8tB16V10Oh5V7Vb0DegaJAryJTfarTRrUivEGyArSho
yXmD5UYicrP3Soe8xTXmjd3GKbyRGRvxo6McTR5eIZydsmFHb8gBMGZjFgRrSxRqAe2u46Ym3yZD
ZWVh7bDlzNkuYyKRSzMFVlQMQAA0TRtEfIpGnKjyqWU0Mdnp8VRUkAsyEafG/wDdv/8A73Ber1y9
EMy1DuOdBUuTPkSYGbPh5O7f/wCQNwysePfpkEe/xGot0xJaWRH1kmiiWLMgm4iqEhVAkxo5DJta
SsdrRVbaZzHi7NuJaaHcoRL+ZHFiZahP3pLw7qZAZ2JNyeP21GnTgtwjS/Hb6aT/AF7uf+ToKPoC
j9L7Pyh4o62WV1y4x9g47cQqTBwpXyPjCyZLtu8OZPkbbnSLh5e2vh/JXSTHysfKSsPLREnyY1j2
ieGepIwybzlNBlYklqvUwkKqKQglnApmIQsyAy8pX7mxhfIljRWaLJJWSGGkMc8XaYyCailmSSNy
6lwKMjUSWAZa7xXIkLQjNIt6AAo0ZFFeY1kyeSjIAt2keKEQiXytXd2VJLIHUu6riyEJjRCoY1d5
ryTGICngkRlVhShWMkcpCHtVYpGHY9/jYjbPckj5HEjRZWQuMn58UEceRkZmeiSNU8cLxYiNNi7F
HGV2woHswrcN+x9sy8j5vtS42BFJu+VPGElm/Tt7Axp0Aq1Wq1Wq2lqNE1el0NZGckdePLyqTCxk
p1V1kx8fxRwqlc4xPDEaXslqR0QpkvRnyJAy5QpVLpDKYlfKDLG3chvUoKUGxTQEihZFeo3IOZjF
TjZaSK80S1mZMcMbjJyRigCN/MG75Fp54UVs6WVhgFim3FDkZEMNJiySsAsaPI8URkZ3nEfdiyTQ
iGWSNTFaLFzMSIPuzyV27pk0u0Xp8HCiGRHF5IsdmODtgkSKGOFTR09q61kKZGxscRkHU6WpoVNN
EwooKKKwO3ETLLZRuEmHumN8lwJYt73cZrQEQ1LMoOxbJjyw5uwbdkY6o8ZXFlIdiqtjyd6xpedH
Qq0rIPKXljRVkHLGN4wtgs8mC+y7vK+RBkCeJQe75PAzFQO2rUUBqSMMGDArKqDAlByL319vr769
eG3F14DodTRr258HLQ1flpfQ17e/BbQ6nj5Vy9U8Fq9699PehoRVqLICZEoTxGmnjVZe6FWyV7vK
e+OR5Ji0ndjkseyY1CpSA47Mqogx2jieNirQmOI0zd4uKZmY9xosxNydL6c711050aBNG9Xq5oik
jdzhYkMMm6b1ErZGVLkECu29LGY3iYvKg+2gbEr427Q6laZgtPIXMUVq5WllvRkuD07RXx1f8+//
APvfQio/1MbnQEisbcp4Kw92inXN3mOETZc+TJi90aQ59ikiuOJY71i4TSM+M6uMULWU8Uarg4jv
nLiTK+2O7SbZJFUozYlizNyQje2QR75jsU3DDelmiarX4BqdR6Qq/wBRbgkH2buLZOgo+gKP00vK
PihW42WN33BBZdL+tepdm2+Zo/j2dhzRySST4+PPJLifKMiJsDcsHNlMYQbnF+NFt2XuG6425fFZ
1yc+IQTA9ykgCF0auYLFe3m4EjBUQMxigepcFpHigsryw9jRp3RzTYzu5ZhZTE/e8ZVYhagakLKG
yew48/c0aclAqSQRhpFjX8hakmBovei5oJ3VYMYolWnkc1+MWqyogxVv2FEYkgRveI+Gi1g0tqWZ
mLohWBkWnlEiwxxF5ZkVsntaPAORjzfyXGSP5Bvo3HMsMhBikqnYgEEstJEiUbYu57YfDnv3hMPe
M6at+xDkSTLIZdn3J8Nd3jVciS5Xbn5Jaw4ZcmGINu0Ar92Fotxx5TcEUtE2rIyGkaLHRCzUZeZG
VSNIGJFCUCpIyY2xu3GSEeJysEZnWc41wxjQ00a9pxg+NCM5G523K3aOscjoyOMgKbqrXTJgjWna
GNoMVZR2eJoUZV3KeJII8vIJi28GRsax+5I582SZsfAWFXtfKftj3BisVqWNgfCEd5cSk2qWRl2x
I5BDn0Y841kSRoFgfIbG2uFCsSMIWLRimPMmpMmFKO4JcZ96/MsZgSyf6IpfEe7lerimyoEaNlYX
q9EAgxq1GKxMbinggekjMYz4ZpKwc/MlTNyVgi+O7rjzYGfumLiQwofNfllxl8iD4vmmPK2ibCmk
nEdQZApZVdpFLrJWNykVnL5cJdIs3Dw023eDEu25Es+OCsq5Kpj5etqmUUxAaPsBxM4LJ19C/Bb6
K9Xo1z4bVb0PfS2p1NHX219tL6EV04La8qPKr1bloeE0eC+vPX2NHrbW1WolRXctvyIAWniSvyoO
78pSy5qMDmOQ2VP2CXMJc5lxHmd3407MuI4pcNO38SHtGNF2mCJgQhHZHRY1c1c0eG405VzvV9el
EUMHI7J5YccPuqhk3VDS5+MRHIkgOhvV6NC9YuLJkyvFibdDmbg85YvVzSilW7ZJ8ceKOdqvaiRX
mjWkzYIZJ8xLgySmOHtpEdqXDkkrIR4Zmq/IC6/HgPNv/wD730PshtrbUEih91Ysd3U8yw7sfIMb
QzrINBU0oiQ7iVrYsWbcGbIiwxAHZpZQpyMpFeTKlkoEmm5L1oqKKqwfbsd6/ZcG8+yEsNp3BGmO
7YpG850RTf47JveE1RZOPIquh4BwD0b/AFbi673jkTxxs7MpU+iKP00/6eLFW8fx7E/z+mPRljSV
J/j2GYJtu3LAxEiXEXb/AJXuGHFmfJ8fPw9nIbbsjtWGdpnzsWTlSHxtLMZ5ivcBEABckkIJUY1A
0yFmFSuaRHatxUnGLt2iRmONOyuMh/EkwkkVABmkrC/eKxvuJy4e78xaea7eQGvvchDYRFBZRUs3
NCr0OgdwS9gimuhb7wiimXko7AzgFZEWjlM1dsjD47sWLLBmbRgzJLjTY+SEMi7hE+FiYeWZYXfy
iLbcaNh3WjxmaljVdd5i/wAbMEzxWTfFztzzExMOHFZq27bsPITekBL/AKsSTteBrpcALPCzGpJE
jWSfJySu3xihi4YpsLHIkxyjI0uKb3pTUrlz2qlE9qyMFV8uVj3OxP2oAjUu2yxTW7aMjCu8Os6L
PjQw+OlVRQiMlEvGFlQ07iu/yNmzCRxSqzNEi46vIqLHlITJNNlyJt2OFWNEE+ZBCPJnZdLtMRK7
eI3HMzTxw0Gnz5MbFix1lJs3Nsn/ACTZErSTq3a2N5J8iDHbKlhiSMTsWaRXNDcFx1kbNyDBHjx0
Y0FXD1AkcYiFlJ5SZAWmXImoYsC0CqjyOaI8yeQQGOFSJoXjoZE0oH5qgRSlcVcGeSERxPr70Lij
zooK7WFFVFTQeZMbBkxZJJrLtm5QqDLdNgeE7uDXyudA2NhR1l4WKyYu3ZHe/mhDjlHdMmrVk4qK
+MuDhVt+448mNjZEOQ2VteDNHIhhk1YA0OxHmk+6VAKxJxMlH0BxHTprb1zr04fbX24b8I4LaHXl
wn0OvBe2g5k3qxoi9WqxrlTMqAOjA5WOKfKx1o5+OpbcI7fuSFfzprnKyyyybmwtnuHg3BlXDnLf
hSeRdvhVRhJ2viY7BceJS0ETMoVQEVTc6jW99L0dLcJ1twE2056++FBHCm4blJfInikBANWFioNI
0sdR58q1FPHKG5aMgCQqryvFHg4+XkyzSraggNGEUI6jCo2ZN3yYz2p5iA7N43llJAkakxZnOPgq
rNAISRQzo44pNyeM5c/nYNQBJUgD49bzb/8A+99D7WtHxKbGFbRA8rqWRkLwuVMGUGr2lzoMeszc
5Ji8z1tO7ZuHWw/8krAHrc4mRcsMWVO6u3tom9Il6f8AVwCioajjY7GfCx5xJsMTtLsIVZcPPgK5
G7wQjfMyKot+xSkW7YMtLNC4BDVb1begPQtpamdEEu57fEJPkW1pWV8sQA/KNyJ/k25VLueRlSTw
lAbn0gfph1yOPb4HmG1YqY8FuWluC3BbgHT2twCrVm4WHlCb49Fl52ftO5NmNve5Y+Rtu9RbumVs
mJgNlPEJgQykU8KjDHVzyQSd1iaZ5/Ijs0k7WaQCSLcJTBADKQS1AVAQqtkkhJA74sscsUhi7ZoH
mqHDjjVIgo8YNGMWCLRRFAapXtTTteZS9JF2UqEAyOD3MwhMha6AnxipMkKMSZpGLWMsZnONgRuX
xOUkZNfHcyN8Od0VZtzjys+Rh2z7dLlSriwQCMPJUeKKVVXhyIhNA5bwY7+SHdIPJjyzzbnkNjxR
Ym3S+KTLIld7eeI9rYzgplxmSDEgg8kuZGtGGSRr2B5UXF1Y2AUkIHwoclWAl3GRkUInfchGap5G
levcuZIlniYHIjUWeQRspWwAkkWNW/HZI5JO6N+05SOoaeFq8mHUmUxW4NWtWCgBHTLyYYVgQZLo
scQn3DsMaZuZUeBjxNI47UmjuZo1XKzo4lx8eXLkhjVBUpNyQAWCwwRGV8iFIq20eLGw07ISwRZN
wijIgzMusfb8aIZyTSTY2NKsyEE5UcqyeLLVIrPCokavGqqTerEhpoEpHjkpWKl1DVhk92UORd5G
edSkM0fccRHcFe6B5TQNGZVdnsI8p2lJ1KggAgdiGuxhTRqaOFjd5JAWTKwM2L5JuRhk3D8jciGi
pm8tYmNFiwb/AIkGTgTY0Pa+2uSVsQoqaLyR/t+ROdmwfyKjVYazd0ghhdmZhRNCiKmjBC9oJZex
JzC0brKnBz0voNb6W9U8J4jXKjx+1tb69dTwc9OWnTTrp11terUatp2mrWokCjNDdsnHWvzMWzZ+
Oo/PhZvz0KDNlNSZM3kWTOaNTmsXjzS642T2LgSWfBHcuDAKGDCA2LjMFghU+GG9+QFquaOljwcr
6W4baHgGt9baGhrajoasdLabZiHLys7EZU3T7Gvc31LkUrGkYqcbID1YV3WryhGydwnmoOaBNK9q
SQWDKaymURJdnggUMYUdTjKB+LACsUYqwFGbtP5F2MpppTU8pYpGzAxAEIaINfHf9u//APvfQmm5
JxAXOw/HZM7ByopoZlRrooUrMFqPJJafKyCjZPdTsSedRNMjbZvmdiz7ZviZQlzoZmzsQBSrLRuS
KxxdpOb29K1EA1Lg4sxfa8Jkl2FDT7BOiCDc8Wm3Pd40xd9lsfkGEKxdzxcmjLEKDBuG2vTW2nXg
zd1w8Kl+T4DNFu+3zBciBg2XipWXvu34yZHynNkL73ubn91z6bd9xIkyJpDc8UeS6KVgkB5H1LCr
fQr+rI/VwxRlzsisrRThJFNx9NnvGJBM8bYm6zznP+MQ5VZXxeSLGll3TbI4z2zYcwZajd2wxXSj
kMKaQpUcwkAdixuwR18G6SL3+Q0WJEY7g8Y8bFRWLjys0UcwVI40DB72Ndxv9xABAZrKzFgsctzE
xpYIyw8SAlbXUCRLLHHmtTB40aaSQh5ABHJM8EaIkmzZsyPDJjriIUV1ZA/i80bBBuObuxXb9vyI
6CQx0BNLUeKiUoCi9c+LdIzDlbPIXw96zFxsP4/GizZHl7ph48kyRx5DIGdawZbo/apGzSSSw40G
Gjyi/WsiSLEjx9xmnyibvm4pR4DKyZuPPiywnLkAsEOR4ZI3ZV3HHBS5oE0kzxH8qN6GWq1HDPlF
rAMT3X+2XDi7II/JFjzh6DMtSwxZFSRNE5q1WvWEQY582jhxeLHlcOmLkZLx4sTyfYimXuK43dU6
QxLLkd4gwUBSMKOlXsJGvWQQIkixpMabBxsZGwEmM0cUVNnqtLiZuXUmDFBBDnRMkMoei70ZWFMQ
gLCwk7kG2QBkWKFAe9cj/jRTZEsxrCx+9izOzZcaPAZPFKkwqRgCrRGiIScbJlibJgKl5nSpJ5o3
V5+1MmSN1s566W0NDUorUcemiKgxRmsvbWlbHZ4o8zMlinw/le3nH3f5FFmxRd9NZa2jacbJTL2P
BmhaPtXImnQbNu0EceHu2Nnsyr27okUWcftYm4Dc73qVlWjD3UV7a7hbEzJYBjZC5CW+m9rUeL24
reseLlXLTnVjRstO6IPLDb8zFJbOxRR3LEFHdMcV+6Ka/cJyRl5xBk3EkruzL4NyJ/CzDX7axr9r
iLftmPYbfirS4mMtLjY6lo4moWUAKKJI0Ot9OXo2096GttOdHoLa31HHauenLiNEVfl73oVsWdDh
S5mTu2c287dmwlFJbxmhGaeMrVjQFqvSuQcPIVgxRRk5JdgrGhcV3AV3tQDtXaQJXc1FbuSQELMR
Re48iXaTtr8gMDJ3Mjffe9OLVFjyZEifHJ0xcvAWOgq00C1sGOFk3/8A976H3l48VA8vxzFTH2je
lRd5ayso5kCyWszApIhDVjrd9owtujxd2k27Kxsf8+OXA3UvlLmiSSXFuZoyhAF7+OPrVyBXPS3o
DQasiPUmBiSVNsmJIDsDqMjY8tahfcMNZd03VDj74qoPkOIWhy8eZVycdja9W9HdtwGFBPkSTyVc
ivJJYuxokn1xXfGa7YTXgJBikFcxV65Vc2qx+kjF3yP18ONE8j7bhZWNDhY4VmBWlYMv0u5yq+RI
ssUG1ZcSZIAtmQY2XuMmJiyV8g2fGxciNe4JnSq+JmxECj+lYmUNzjxioaSXtkWNhU3dbLjSISY8
8YW9YiB2aPHIixIBRChQRZUNu0mvE1MipQZ7lpTRR2IZI1eQBVtLIQiDGx2nbKw3FfjNDE0iqQ7G
i9h4DNSYM0rFFhHx2FZ9wYgj5Fjr4kQVLlQ4yvlTZ+eizFQIxSxzvUUCKPS31UMewzKEmkbdt1y4
lwcjNyoII2zhnR+aVHhKz4QFqx5WSoJCajN6yu5lXDRzjdsb7/GwlxcLMkeSKRaLWAZQ0kq0g7my
iRAXjco9zGA8TDtbkaNDnWHjK9O5IaREHczhJQwfLmYL2MpxpGTvmiN63BA0XPTInkjf86WsZppS
mC3csZVZcgNQURqULlIVSKaWOCNhPuDrixwiHtK6SEBW65jNeR5RJEwXCjizcuY7VmiWDFhhAFMB
aXDgeWT7Bj4GRNREiNF2SI/hWTmtIpNKUeONPEuVY4OkPLEVypmix5pO4CjIroQqhBFMpxk7Owqt
3liEaxKyxkICrPFkuYXlxnR1YekQGpsdTTQSAS40MgGEoTOxJ2j27cciTH3DJWGL47nwPt+du+Hj
x506xQYM0TST7NnSYWz5uRt9P8pyCJZXmluHCsQWtS1mIxqPIIrkyuvazfcm3RKkOvPg5a++ttOl
X5VbTrwe/wBCdbVarpdpIkLTwoDmYoU7jiCv3OCz7inc+42RcvKYLkZ7EturMY92Nfi7kQNvyTX7
YTX7bED+24hA2/FFDCxBX42NQSMUOQuaN9fajejR4TwX4va2vvVtDwcq66m1W4bVy0tbi9+lChXX
S/LlXtVqNWFhRtQFfHjjLPm7hAse6RjLijwwhaIUQFoZGM4bERqkxO1SpFAGkJFHIlK3aoJO6igr
tUUrCu6r1LzPQo4FOzNQd0oMDRPIm1BqTmQaYd1fGdhjij3GNfDm2V5MeufbsBBff/8A3voV5tJ+
vi25QZ8FFiws2UZGa/Nlpye2Nq7uT9rlo+0xOEaTNycpcNJpZ9xkxcTbtzfFkiwt0gx2xMp8gZ+I
TSxhXmkDsrrHH1+nkghko7TgsZ9ggkLfH5zTbXnQO2bvEccG8ZiMfkkVY++YkrfuWCDG6SDVjYb3
lvPlfSrukCYlxcdtRRTsGTKAP49GHGKm16x4TPIfjuQyzbPnRU+OUoqR9BELyZH+zWDHknZNoyw2
BG2MIUmehLKBedwgsPoBqzBFydxdwou+dmeNFlkbG2bH8kEuFEzY+cZm+S4W4TwRbZJBgwhZIZMB
brkZOOY8+CRUmaJQZZyqRo7oWfyL3+QgxSpGZsiOUHGgSkRjSwqtFHpl5xch382YW8q00rWDXouA
GNgssSg5Hc2FzmhwhOVi8TyZZDyTGQY/V1DGWAyVFGyhzGiOsUyYEj4k6b3gGP5X8ghylxpiYfC8
tKYYgsM0lR4yR0VuFFhbieeJK/OxaXJgeutb3E8aPlvFHt+5ZGDLJnNnYm8pNNh4EhhaUB5dhcWk
XtdDY4p5RNznW0WJKJIb0swZHldqvzjMUiS4qRr+ZiK69sbh42qXa8gvHGsSeURwsb1fmeZAuQOx
SCTkN4ZJZ8oIuQjwQZLCbISWTIVJUQTwyQjkuc3bj6ZIDmeAwtDLKhxsnJ7WdmrDHfkXuY1vU+4Q
wxQp5ngeJxlYzTLDEIk0lJv7xvEMueSDKlXBkc7NhY+LgbrCkmCKFTzxwpC/mEuCZzAhhXdlZjA9
63SMEEg1EAyxR5ccg5jcpVTHYA6YeSqr2k0e1ajRCOQqUd0EEUquv200SOwjYyp99G1BjYVGySjx
NjSDpxgnT3FcqZVtNJjR1G0bgxKamwIp0xsCTDr8xYZiz5cnxXbxHl2AGdseFuEu9bD+AWAalJFG
zr7VIBIhUxnGk7hMgYK3PbcoMK6fSXo+h0rnVjRoOhoZGOWOZigtnYoL5+OKbdIAn7oLDccg0Mvc
HPl3V2YbszNBuZRcTL7IsCUEbYtNt8PccDFsuFjBUxseOhDCrFEY1er6e2nLTlryvrz9LrQ4OlEe
neueprlXSrVeuuhq1uC1W0uKJAAZGpnRKV0ajNECJYyBkQmvLEVORCKMqBWyolpp1DPOEaORppZj
NjvC8s7tLKlQGeemkyQIUnmVhmWiileLFkniryeaKeSSJoSSzRCsqCyGAio0nQKXNCMS1+MakgCI
eVGoOR7yaJIoXL2vVqc/cASRY12UVNexPKgLlUsKwcNpItsz8abFlnZpMsY0zZGC8JYBq2KIrkb/
AP8AvfQCohdm/VxbDEJc7Pl/E2pSO1xzWwJ5heRDcncCUKHo2UxO7Vh5GSjy7ouTj5hcTY7lZcLM
kyXRfJFm47IQvNr3+qGhAYPhYslT7LhzK3xuPtm2LMjcpuGCV3OeJj8qlWPF+Ulmn33DZMlzJN9K
Y2C6AkGPNyY6TcS9KdslMWxpNHk7PPDTCVSk2ItJnzoRu+RZszbJKzRtRQqLxbBlTRybBuaVJt+d
HTI6n0YP9uR/s0UEn45tSQ4pxYDTYGM9DDdKDZCUJYmoFdOX0Gfk+CHaXbv5VkKFft7RJgxSuVWN
No3P8SIOskeLE0JkA7N5zJMaXHJSYikANTYMElPiZkJh3PsIyIXSMMxJN3PdQ7ijlkKrK1KFuhsD
V0UvkqK7marPQSxCsa71ZnlNd7SPHj4rNOFjfCWbyHygyo6RNKBSyd5W1tl2gbg2R8e25kz4cjbp
8Z3mSTFkih27NM4eRXA2/HEgZaTHkekijShQvVuKbNC0zbhlUNtkNDbohX7dEaTEzFpjuIUwS524
bdsv5WbJtH4eMQk2PDNG8ePLFFk7flxRbnucXjnFQPYQPcI3euViT4cibrKjQxyGR0Mjx7VJMrtP
jSRZIvlYUWdFBMVBWWOvLzHcwysnzE9OdAXode7uNZsCzhB/x2xJGQ4uR3qV7YlW4lYOpAOdEWHs
zdohUmsiLyRpjiNVAWhesP8AX3KtSZksrJifjFxNKceYQMrpINcvKiiLZc8lZIkRdklkkkzppRPt
GRvOBi5+7yZiAUKaNJFTHiiCujqW55SsrBQH3Qj8PEkE0CylaMrOZJo8cSyPK5PKgCai2+dxHiQQ
lmZizdtMI3VLECElea0joUhgeIp2LEu4PdGSWME06+aCNroKGntqK50BQo8hPOFErtkPbtpMvIio
7yqg7rPkFdvkMWFsOTKZ9zh244/yWQxRSRyx/JMiOTGyImxXVr0rdrE3q9OD2klxFIEKyCRZF7Xi
azYmYJvR6V7VbS1Wq1EUSFDSRKpycZVbOwwG3LCWm3XDWhu+PX7rev3LKJOZuJoy7tXZu9jj7qa/
BziP2yVl/aojS7Viik23GQjAxQTiYpLQwvXavaPtFzVyaN9BwHhvpf1SeO1WoiudEGrG3UsyqSQA
ro9NJEh70CrPAxfJx0JyMcAZeKScvHUjMxSpzscU2Zj3GdjGjnIFbMisMxe/80tRzh4/y5KkyZA8
c+Qx/JySHlyhGr5fe7ZXkUZYqJZ2LrndghlKGHL7poo7x47CVsc9q4Z7ZMYMn4qEnFTyTRQStFCk
MjY+MzwIsB8MIH48NjHF2+KKmCNRN6NizyvJVxRdibkG5v3Gr3q5q9Y2QO2RRkpmRTQPBmRSU0Ik
pcBKGKgDYq0uIAWRUWYd1SqQe29IlgbKOVLa/SsSNZZctLSIrV4npENN20VNBSaVaT9dwR7/ABiB
J9ryUnwJ13HJp52lPmKhzEx2V4TJv/8A730MYPofEMbz7p8vn8Hx8chJcBego8mkchSGBErJSEyO
+ekKYc8pkysVPwNwhkeLFmYS7NCiRwQdqZGIGTKxjGttLVbQaircHPivrz45ZUhjyfkz98++7lJU
e9bhGZt13Cau5nJ0vUMrIS3cSAaK/Rp+ptoaXEyYGidUvXbasDaczObG+JIoxtrwMWuejwxSCfZc
CWm+L4l8n4p3VL8Z3aOnwd1gr8jJSlyoyY85VqPcZxTblkmjNjuWg2h6O2bY4bY3NT7fkY9GNhXa
asdMf/bObyUFJr4/sEmTIAFHAVU14VFcxQseIaW43dUTJyPyZdsa+XuWWkMSgOZpVQpKnkk8rFIW
8WPJHNFuL9sCQgR/IPj2KZs9FVlbuUcm9oo7D8ONkfbQzR5GZihdxgkpexaZixWEmQTMrTBe8AgO
AaBioSRlmdRXnF5JxX5ThUYMzMnbAkzyNNGip456eNKDN2QRMwbDlyJBtiQiU9h+J5Kfjv03zcYW
3hn+3J285pg26HES7MFwWekijjB0FDidTKzKhJPKuWjzLEI5Y8g5G2Ye4DCwoMKIqGEkZws7dsf8
fPoEgz5ceTh0ha+O1grG/cGqbGWo8qZJTgSyPAnhSZo9xxp8NWTGl7TukCFmlniWPPyu2SWSUEmi
aJ50itIwQIhNqmlWMSTSzVir4qgzFd8nDZHeCZBExnhjJ7YyCMrDaMy/5HhxWkGHFFJM+LhyDJw/
AL8saZIzabKMEMUCz3AWCKSvw4DUcIhqTKnU/uMCI2TnZVLgqpnssa4UE2NBjY+GmxY0M24Xrd4k
izxQp5ooxNPG6IWjrHMUFSSuSord5b1G5jZc6QiTOnrnf3JoAk4+OmMrMzEyIK7qSYAkt505hj/w
4ZhPGWApbsc7LUrW3ylJyO1sckxxmr1JKkYfInY+aQUme4MciuNRXSp51jEsrzuBYMxFF3kbG2EO
YIcfHjiijV4YUhj+V4cDQ7LjxyN2qIsuEQzT7Xm5KNA8LWrutQW4PKpwY5CA1QP2NKy0Z8ZzjuCU
z8tXhyI5FJUAyxCjl4oP5+Hc7ngg/u+GCu7RNQ3OQgZ+aa/J3Q137yaKby1fjbsw/AzTR2tyP2mK
w2nECja8MAYGGoXDxlpIYkpUVa50b1zo30FjQZSfJGS0sSs8sSFpYVBmhCieAqMnGNflYtzl4tzm
4gJzsQV+biBRnYpr8/Eo7jiV+44Zo7nh0dyxQf3TEr91xaG64xo7vi0d2xxQ3WO37oncd1jI/dBc
7owr9ykFfuMxr9wnNfuGTRzsyhmZ1vy9xJ/L3G3n3GjJuvaJNzJvunb27oT2bmaMe5ECDcxX425N
RxM8kYWeFGDlU2BkFjt83jXbyGOB9x29DEMCLu/Bjudvxuz8DHZhgQLX7dhBfw8Yn8THDDFxQoxo
BTY8BKrGoEEAplRwqqlFELGrBdATXcauauaubc9ede3MVz0vzvV76X4D14ede3By06aCopjG2LFj
5yy/HMWGnxJVMHYkue80c4k3G0ck7mONZXy9ueES4zsIdvkYSr42Ck12iil62/bszc8rL2KfayRj
K6Q4oAnxYz+VtbUz7ExbH2Ngm27PIRsO1urfHcc03x+YEfH8snAh3bCbcU3TKdMbcFrsyUGRlyJT
5M8lfHoZRJv/AP730Km0fEvX/wDx/D3Z/wA/mC7b204+0DkBTj7nAFMxJdTQBRUBJhn8cizq+wbf
mRyrkY5hyvj+NkmIAAMo7cxiZHtw8+G2nWunHb0dxQvjzxlJCb0evCvXQ0fohXxzeoHxt+woZZI8
Uq0aYaH99khWD5FK5G65DA7juIo75u0dL8qyEMfyvGaod+2+WkyseQPuOFG2RueXuWfl7bNDBtmN
tufFJ8b256z9h23FqfaI41h2ncJKOFvMNSSZSV+TCKb8GU/iQNX4E1HEyFpsWVqxlKzDCORNB8Q3
CStv+K4GKwAUcZoAD07cGdc48yGNYZpYn7WlJBAZRNWLdpgpWnZ5jt/5Hla8y4Gb5l+QGU4OMHyG
xG7o25FedFaZkkjRFQHxNU+3Qyk4+VjPHuEgIzcPIE0dgJls2V2r+QWouCwdbRRTzM205xY7XuEZ
OFnvWNtWQzR48MSyuFUgu0brEkc5eTEwMvKrKw8vErFZSW7guUYe+BlQZe5boY8TbsyXJEUUYJmk
qPF5gAUATWTuksT4uWuSgoHjUL3HNjjyUZJRkxGGMZuLeJlZAYexQr0w7ZaFbvjCfG3iHJliOBmi
HaNk/cEyoYMSCJu5I+sT1GeQpvJ2RGfJWJCkdwRBJaaXtXKUWrL+7C7jQUGmUU/WrcwOeJjiBTSr
dDEZpcUx+BJpJplx42rHmk8jOIxjN3E8wJFIbNWKLBiRkzJXlZkkjjhlYkGOSPLSGFsPFMr3RKjm
SVFYGpI2hZc2wl3WFQ0uTktiYGOgmBEZ5nLk7T+75Hkaf/j7ZuW4rkr8qMiyzPNJNLIaxDIQ8Uch
lwSC0bVCOX6m71jSaUyPQPNjzvVzRrbow0zN3GQBEGQFqVQ6r5Q5WK8Ys0DWOfC2LkDcMkCTKnlF
IjyNj4CxF27zD+maR4mjySyFRZFjagxWv8clCJsWQHQVewnnCJLM08igCpHVKix5ss4seNjgy0qO
1S5UGKMf5FkJjNv8m75eDkrDJkfIcKFNr3Bdz3RQLfMIC2RNBJAHAIgktUshSbJi8iQ072GXmSS1
jpCMaOeWMxzyySxrkeQRGyxfaIisYEwQNmKqy7kiibdQom3kjv3kgneSD+8Fe3du3s3XtEW62XH3
alxd1oYm53XD3AFcLMDLg5PecDIMkm3zO77dIzNtbNX7Vy/akt+0QkDaoKTa8daXbMZSdtx+47dj
knbMSv2zDt+24d/27DJ/bsKjt+Ea/BwxX4WGB+Hi2GNjLX4mNRxcYlsbHNDHxwBjY1/x8ejDAa8U
Vu1bKqLQRAzAGiTVzVzVzVzVzVzVzVzVzRJq5tzo8BrrR0Jvwe16J0J4+Vc9L8658I9A39Ll6g1F
EXrprzrnWDlDGkl3KZyS70WN0AMeEEdmMccqFJX/ABY58SWPFRNw3DuBW9WFuVIAa+OZcW3ZXybe
Js2aENJNckdjUxtSRk1KDWPFchuxslJMdsbLMinKyQ/5udEYdzmlC50tp8iR1kgVz44lrZbeff8A
/wB76E/6+JP1f/4+gtH8/wAgHcDzqU2AFAVIeZu1Y+P/AJHi2o4eXjwhjjui4m3yzzb1G22bAizh
9m2986XGzYjN+cO/mIs6T/Nw9Ktrbg5a2Oorrx8qFOoZN2xikrCxPEnWugN/WtVqsasdI4ZJDtm1
bpJPHhIsUmyJIw2RRX7IhEvx7EZZPjsy0+258VSflR02VMtHLhJXIwrrl45WWB5axMJsY5r5+Umy
yz4E7b1t6x5GQ2552JiSZWRFGsUckqRJuvyglxvjvX522yUE2mUriRCvxM0GePcYlw4mmydr2/Hw
IOLnx2oVbi58GVmJAP3A+WaXzs/JIWss+QqjyGOGFUjjndbKWYws2K8UX5EeBCYQygruU+JgTwHs
mccom5UUBoxmlU9ghYgooqXCx5Fm210JkyoaaVjTTsRjYWRkLt+3KATAkbSy3EQ7AStEmmhcqzZS
lmkaoY2d5oZfHAkcQw0jixc2FJoFx0VsfOilO65UmdJi40/j8UMTASyUuOi6moJo2kmSJckQR483
7n4y255Ln8rdKXdMqOot2geo5opRTOEGa7W8bZbxtKJscxyRTQOmRtWPlJKAAFNqt3QaOodMKWKB
pfkGHMyY0GHgj45vuQox5MZkPOE3MTCgaFwWd2qWIzK0U+KMPICTyMWlqcj8IAVazxYiIO8KGGPM
Dt8BaOHHhLSKGZ1ZgakxspJp7phxHsxkYg5gtmdglaOEIuR/gVsnFyJGKZGRlTxww4cMkRV0vkSQ
wu2TLMcPHjhaW3fZVEMYByyI4W3PkIJZnXCiRI4mtjuWWZuQ5tjoJck7HimZ40kGBh4+HjfIMWLw
BgWkUiFsqID9zS7ZUz0kLksbBATW5ZQdgpZotqkI/a8c1lYU2PrBjyTtHFHjoSAJWjmSYBoXyH7Y
n7cSLLmSpkRgv6t4IvoAWOPAuLGB3ExZjSKywDJjy0OJjv2wkkpCGUA2VrFf8iRXZZGyhKvSWRY1
nmad1j7WklC1jYLSktHErJ+QY08CebJyjj4cMFEdw3XbJMeXbtxXJinhjyotrM+0ZCfLH8TTZGZk
TxCRe6x7bVkNdMdyyzIEkkPKMQtNNiLOW21FOLt0UDR5EMcuLlxZA51jYOVlVkYk+M0EMk8rbERG
6MjaHX31tXLj5el19C3Dz4PfiPPQ8teQ4vbQ0K6UdD0NHU624raHXnR1aaFSMnHoSxNQIq3AdLaE
VejQoWvpbQ20tVqNqNcq97VfW+gtRrloSBTTRJTbjAph3qAJGRIZ5KxWMcDuWbBXumlzsM4e4Zkm
U7hVostMRVhYcguX2tFk4+bEbwSY0sTxSSg1HCSWQ2kF2UhVtevHZcnFfHbEnSYrGLNCYXja9MWs
6k08bmtlicTb/wD+99C/JeKP9XweHx7V8wyfP8hjNxIPuTrf7XBY4caeXcIMKPCOJNkPtnx4d0ux
rNkY2HiRVvQyNx3D9pEcuNNixJtLRwvtv3ZG4ZIx8S9zrb0xbW3q7viCWPIjKNR4QbUpBFhZuutj
VqtVjXaa7TXaaCUmPI9R7RnvSfHd0ep/ju4QL+zZwCRY2PO+MshwM7E2wxfITLX77toWHKx51V0a
rVfS9MkbVJtuHJUvx7Bepfiympvi+Qpl2TNiowZ0J/KzUobjkXxIc7dJBsse3Yo3GWF49+ylrcp8
rc6faskUcPISkitSotQ47SNtezw4q/KM8mT4/id2QBYcVtLaW4LehbTN3BIxI5ssapSy3YpU11OO
jTF3hik7u6SQeVVUKAn2YwT8fKiaRTEWj3zYs0bjlR+DJNQm1A8utTydg8zFInkmpwVkkkVaacLW
M35DzbdCyTYbxNj7hLE/7pjyLDMvbIQywrIqq3IyXqOYGmtQCWgsqdq3ZZJMjbt7WGHeflG3xY20
vKVgwSaSPHgq00lRwomh1BRiuMkWR29pmXyyp4Y680tCaUUMmSnx9vyKm2ieKkmzlCid2Zo5JRiw
MqhEWBlSjkEEyy1JMI6iLO5FhfQDnvuLJ5MXaM+TKwMdVfstXy+KOOZTUdwYHNIeVBSwjxt1Zmi3
Z5IIBBTk+MVuMnjw4z31jYbxOL2yIu6seMossrqUJtNDG8cEMcYjFqYA00ffFG48YlQKshlliXkC
TWZCs2M6lGw8poGx8efJYxTucp/x1hgfJCwpGvQdwu8yJTZ08tS40xMUPiHY4YO18poseXJlVZ5G
uSe1Gz2xTg7i+SJtwET7b8swnx963kZoVQJYIiuOzY0oZMdFiIIlLRpF5TUdvFDt7SmGDHx6dqSZ
u4mOaBtqmvHt0SE2VPuNTyLGWnyL48s3dkYgYwRowkiaVYw6In6s3I/In02xA+Sbs+U3gOPMzqxu
QeSkg3uc3K8Ue193czogg7SMjzwyIMxaeRUXJyTM8YtUkljjYiRCTJK1GhcyzxYohwpchlUKNLAj
cMObb58PNjyo54UyY8F/Du2TNhxHN3GJEXLDt1Xt7lSWKBcmcSGZ7jHhWOND3I57VB7xbGYYfgx5
R23y8/F2zbc3f9tysAb3DiDZMzLzoJ9p8mTm7d+OvSvf0OdDWxo1ajb0LV04b+h7aW0tw8qFHitp
zv01PXrR0Gh1ty09uno8uE9JpZ53fFUJ+GxBTISvLOtLnEUu5NQ3JqXcCaGalDLgJE8JoMhojl7W
oUdbGrcBtR5V3KSzKtCWNi0kagTREtKi154yWmRT51DSTdjec98sjITI4fOM7ERsUKEEY81kyMmO
L82dZPy/LjiFmSGBxDPcospCmGN2jAQOkbshgVkxg0EePJG0OIzyeHDyVCS4kuNjo6+KNRNItJAR
RXmyil/T4mIycZ8aTb81J0ZAwKtGVa9Nc14zW0x2k3//AN76AC7S9eLHUtJ8ZhMGz7rIZ9ziYiiL
rawP6SKaQpWLHPlJs+3vkZOLBCqb7L+Nirmfip+4T/kxyNLJ+XgY0O2zpO+346wpvE5yW1PochQ4
raW0tVuC1W1ZA67vtbXdCp7SRw3NXJ0CMRDjTTvH8UmEEuy58ZfBylMe1bjJUHxvdJTD8RW0Pxna
46GybUKTbNujpY0QXOnOmAYLjYyVkbLhZBk+P4qlvjGUV/atwhlmxJ4jHkZKsN2zIBBv+fIsfyTG
qHecGWkz8KRrXFtbA1uMMymNMHLdvjGIx2/COAs+KmTWdtG2wxeXbXIgdqMWYtGaVKmzD2eWQHDW
Vmmz8wpLtuXJXx/EWJLHQerbgJAC5Zm3TTP3FI1XcMwI1xUgvUby1jxSszqTGuEZ6x4kjLY6yPGB
byyeVu4mYyMdjGY6zJIBBkQzJ8qyCqQzZGRjwt3x/pljN65U0n3E/fiS3eeMFp4FlqXAyL4+PJG6
JNLUONHE2Tiwur4BZozlY7RZ4qHIgyTkMqqULPi7bkT5GPsWBBDvOyxQpjhkURNMcrMaPchKrU2A
s5jGPAAkr0saIOVZuU2MqZO8Gm3qIUdxeaMblkwNL4cyZYZFqWDtIhiVud5CyUJIjSicBZVJgmaJ
suFZjHDiu8axQhnChUlmC9JHNxt0jVFCrrHHHCDoKFbpF3wDdcXFxdv3yDLlffmjbfMTfcucVG1Q
G1IayHnasaTujLKKfJssEbO2VIEiSdWICZcWLj/iIzU+TKrSP+QEyktOWmjD5TUmNM9QeIkiiKJt
U8SOVcskP3MGBEk8cCvkZGUcTAXJyY9ux8rIRewZOf8Afj4ZLTXjJIdXyIYqfLyGaHF81RokaSP5
ZDM7RqQCY/Mhf8uONCqt0yGskuztkR4O2Lhx7Bs2NLk5W1YOVFJAYZ5JYQzBmWJMZzNjr44UCrKb
yNSv2ISzFmlVnVexblQzkivJZge8yMIIIIQ9TTwxCQBkjRpsWSKbHYt5IkAZ91kaPg2+UR5LHtcy
XAsAJ8iVwsdvtukzLWRh42WkcccEcfgkZEVKuaZrDKyGdkQKJHN41gxFm3KCUwRyB2mKvh4CwngF
Mquudgz7VkYmXHkxZWKkol3CZMjO2yY46Y/dK+1b14XD0XwcesvOEzQjuOM5J6KrhlXIiDrhRTmO
Jg24SZOHJJJLItlFPDE4T5buO2wfHczNyMjLxhNDLG0b1bi56D0T6duM8+M68teXpGvY0bURavaj
ccFzry05W5a34LVbS1Wq1qIrNeaGFc6ZaTLyEkhyI5o7R0y4LVJj7fY4m2MW2/C7Vgx6/AzgPw9y
FGPLWvMyUucQUzJGH5swczZHeHzSQ2WWVM+guUQIc2y487BsWftXE7lkxVZRiq1SY0TA48LF4o5K
MEJMiJJRjiu6I5Ijpu1quLE3PcbMoYywGkxQ1JF2gxoQu1jJqTaWVZInhP5U8Y/Odq/IU0rR1LKw
ou1BgGxMiPt8bSieCQrBtxL5OMk6xTZGDJH5MhY8Ptp4uRh5upAijZgkShcrFAGRjSYkmBmR5kfi
mBaKRKbOEZTcS52h5S+//wDveuOsYJeT9fFtsZfJBXD2goewpSt2qAGBIuSCWiBGxyeObBxY0bDS
yb/LOcjH2r81twjeMQiRDHKpGy40hTcc3xK8jOeVcvX6+gatxyxrIm9bd4WgsDLjKaeHtr8c9phe
vE9CGQ14SDtXx3DkjXBw0SLHghq2tzXPW3oe+lzRRC0+Jj5C/sG29snxx5GOw5UD5e25SSSxSqiz
RxR4m4bxIkXyLKMifIohUO97fKUzMOSgqDg+USFcGluKiysqKo92yqlzYJEGBjLFjYuPIuBHt23n
cvkpyl2+bcUnffNwxZk+T4bVHmYsighh0q/rbvkui4sojzMjdceBsrdJcg2IVJFhik7w6wPkELjY
yvnm8MkcqSRTQUJsnyjI8gHM95eVCyk1sO5QqGKquOEMe64UGbhSjEjxsRvtl5GM0b00akyi5g5U
JBdjGSfGa7VWnY2kcKip5qGRBDHI6uWwIpEkwZ4qinliKbtjlNtz0TJieOaP5Nmpibdh5HniyUWZ
cTZ8eIo0aUuPLIVgVNWIVYsjzNnzblDIIsaTJbBWFp4BLgY0CoXYIbhkXtvnw5C4pPMHkjstY4Y4
ljUTStWdCvlRQ7ZM0GJMrSrETalg8bDrFjxQyZcqxTwuzChpIokj3HCyZJPju3ZCS7VChLqGG6xJ
BuSmxiYgQvyy4XlxGznqP8qalhWIxZBebMRO1LBY38bzOrMOVTwyTv45ECxJPS4US0kIACkUkMaG
mpzUr9o88fnSdCWzGIgwJJWzZExMbGhaaL/FBFLkzZr4+JFjp2r25OfixouNuGSI8Joz2zSKiqon
DmNseZFtL2qxYiUtkQwSvL2hWPXLdVMG9IZJZfs2Hf8AEgnyd+2+GGWSV5IQDJPKBJ5PuM7yNy7J
G7ch2ArutSkgyBmYyo1R/a+S8qS/kAkyLMWnQFpSR3JBjRxBKxyxXB5Q5soaGJSMaE/5N6B8moNq
j3F1U7jUMEkwebtVRkuBiZD0sEsZ8gvJlRJWLFITVwBl5RJRO0Sy2M+YsKySySnGYJM+ZGE2w4cc
JycYUczDFHPwhR3DDFfuWFX7phiv3XGqTccSQRocXNk3CZTkQzZknYzLtG4Ptxzt33mXH3SLHWVh
YKO5mhENRFVEU3eEcrN5IHkHlFKVK90neDQq9ZhQVgb5kZC4z+SHd42EvFbhtr10OnT1DR+kvpfQ
68q99OdddSDa1chRZbmWIUZ4BRycajm4i1+diCjn4tfuWIR+54wpt0gB/c0IXdQwO4ydp3Oft/cp
wv7llMDuM/jXOzOxszKdVmz/ABh8iQSNKkyzTyK2NNWPkoogw8SRBt8PZ+3YlvxIGIjjUZW3RTgj
MwWxNzhlAblcmjFG1PhQsArRGGRquaveudX0ubXq/F7a3twi1e/Sg3Iha2/Jix2lmgMTiGQz4sZe
TDo4rijFItEPR7qJNLKy1tOeZD3IKaRDXfjkZSQ5AxMqXCmwXjyi8CXWKEEzYarNkREPkkB8h2rp
U8UmHLg7l+Qim9ZEGJKobEgG1ZivPv8A/wC764qEfc3NuL4zF5dy+QzLBss0S9rYhFS47ARBgZoJ
FEcYJXETtgx2hbbMh53jnCjIxY8122ODHx02dY8aT4uryY2xbXik5ixo7F3toOC3BbUfS/Ii7GOJ
QDlBalmLsMkWE8VCeGmzEC40mCsWwq34voA6Wq2nPS+tuC3DJHHMv7dghH+OYLu3x7JjafZ9wifK
xspVkkielaKPHxcze5o4N/zC8fyKMVuOft+4wPgvG/iIoIaK9oSJmMcBWGDZ8nJL7BgyQfxnaAB8
YQTZu05qvuGFMsUOJF4cbM3tBD8ny4DB8nwHXG3HDygCp9TfmlXIBftt9qqOwSL2rJ3GGMKJn/Hg
5kkUFIqMMAY1YrGhkliMTxYgiplksE7F2uCOfdAoFPBlw5mfFly4wx8rL3HH+yeUXSI3ANEDyQp3
NPC0M6uq0GUP5VMsmRBTTSy0sTLIVkakQXK3EfxzcXjycXIxpExsWdMvZUVimTivi/IsyEZEsu6y
QYwgjs0lQ7ewpUVNI3vTIGUkKIcibKkx8h1ny8kRmSXJyBKoVoXyO6LMnLK0uSz5M2GYJZJqx5WS
t2hWHMoUjBoQTfLnfGMGR+SkI++dVm3h2u0MaiWeLyQK6uHkRAwjmjChRoKHKp1EGfk73hYr7bup
Kv8AIMrKiyoM+PJU1GaDsiY7StFM7rB5siRsmHOya2stHkZUfkjjk7aRQxjMfebABrtNPDGIlQK3
aoBosBrblKQo3DcJMiSFCz3ixxgRwNFNkQ46xRLumRkZwglk/KynDY8Ucm6F6XCzMmlxcWADuips
3IchHepZBEqTOVhlVmml8b9wIQ2fAkWETr2ze8sRyni2XMabMgL4+1fFcCLG3nZosCM9wEMgEmVC
fIqPJGFa8D3GVArKsKI3fHXkU0XUF3Pf+Q5pcxA3ng8RlectGArKEqdQGd+6SWbsixoykf4cZkYX
hU2rLhOZAMPJLLtOUwyMDIgGm34glZ2LGOELTWvum4SDJk3PNxlbPedsbOjhP73Q3uIGXeUYLnRA
tuERByoFQuwc9zNipjNGNt25HTD2gsmJs7UuHtBX8Pbe04WAKGHgBlxsAMIsQkR4pBigZJ5srbMv
GyIZE3XCkni2vPzcLGzd4ytzii2/HEW4qj5O34KvWXAWECmsRo1yN0ysaN8aaEBJrmV0sGPeAe4a
ZhWttycOKTAQwQbgyHE99bel04+epqxq1Wog1arE1Y1Y1Y0RpcXZlFXFlljYvNChZ4wqZEEhfJxk
Y5GOFGZitRzsNT+diV+dh0dxwhX7nh027YQpt2wgP3fEr95gp96hAO9ChvUho7zJZt5yQBu2Wzfu
WYX/AHDPkAy83s8u42L5zN488OsGa5GHl0m3ZPa225TINsnCvtmS1ftktHbJjINsIdNtEbjbI1qP
bokobbjAJg46BcHEWlxMdR+Ni0sGOtGKKhHGhJqwFG5qaKJo8DLhhfImxoo1w/yIJcTJwzi7qrUs
iPper1JEsi5m2WqHNycU42bDODavJGTmAKcaYB/fUVer1ejXTi9+DqBcAVbl7317jRQGlW1GrCjE
DTY4IbFpsO9HBesPEaJizE3armiAadFnXbdwk26dchJVZ6ZqdqkY0LsVhY1KkaiWKTCkh3V5ldpi
Jsh1bYsjuff8aUb0Y1rxUITdsawMbCiCPSHSLkvH8KQNunzTNXsjNzawPiIbblapMSXtMTIwZg2K
pyE23KSNos3HBhIvJLH25UvlWXIkZjIxosxrn6Y+gHobzi+WOVHUtXaTX472GPIaXFNYOAJ8j+NY
jmGGOGO9Xq/oX4hwW4bcVzpLBDOrbRtxQ/HsTtx9lyoJItrzlyclJnlyGxSZcfBUvt6Rq23zAYuT
tWG0O4bK5x9wwsmlZGq2ssMMwnwMaWJfjcKK2zbr+TuELROYsUJjy7hjLB8k3NDH8rwWKbpt7hWV
14ndY0nyHy5HYmgtqVWYqnkSKMAo6API0z0oJpFoCr2GJLKzyhJayo5UCSFonYMIZjG+Dk5GRB3l
J/GbfJJZdvmMoeUgmoelMJA0cVg1pVngeJpscKJNvBVIVtD3oRI/cPIw7Sh2IxSbmTcfJUhGKnaR
l5+XjRbfiZTPNteNJUmx5QKZOfiPhfJMSy5CTwLBmvlIyl8yTJxc3Jz8nEy1zc6YwDKWNclfxQI1
qPLSUuMRZIYMNxIsOQOzGWAv3Ue1VQh33LIGRlAcq26cPEsylvMWXkAZkx4cKYvnk8wbRSvPjHOi
xZlDSEpLkwRx5c5eWRkRsrLV4n70FbxCXgyMfKabY8WXHw8GCOHH3/Fin21DyjPOGbtOITDTXnae
TIDRblMqvOzTwypkQ5mHdlx51ZZIo28sklBTGJJxkLt0xUv41MuZDGiSRyI0r9j5ccbBgVzwWw1u
F8YjRj3NjphJj5BRhhTTqifi4rT5EmcMbZWDxxRxKam5nsAruLGWUIouaFwI255v+5TdQkyyJL2R
sLVey4+TBj1BmxzrJlJE2252Nl43yLNhOPOxuaSSQE8l7lBgl71kblJEvbDHHJK6JG0pZ1AZy9hX
YGpUsqedZJJchWEtyZS0ZsxhgRCAa7bCQFVSLsKuUPlc0JmLwt5KzIliyY0aSTtEaQxgn7fPn5/i
TbcD8uXcIYYsja8nGx5YUhmH4uNbLGMoSFC0kcKAYePDBKyeWCKXIkTa5fK+3HHjjxcaRZNvxTX7
bhXlwcWMYeFtU8f7BgMrfGscn+NIGHx1gf4/OqNsOV2PsGZIczEyNvkgxc2YSbbuRXAizCJp89aw
ttztwny9l3zbE/8AsVoJnxOUzFlnkzjSNJG6NmGhPmKBPkhvyswlczKJXMyC0k8k8mFJJBKny+Za
3T5M80jfIZ5HPyDJdv5Bku37/lMzb7mUN7yQib3kAJveSCu95Jb94ymd91zSz7jnGjm55iTL3Exw
ZO6MYp91Mkk26GSefdlLTbykSS7sqxzbq9fkblKz5W4Fjl5yQxZuQGXKymaXMyJJJM3LSM5eTGxn
ymiSfOdw26geLcu5cXc5guJuJeTbNwRm2nOMUe05tDZswg7Jll22LIv+xN412NSRsSXbYcY0NixL
LsWGD+zYlhtGEKGz4IY7Tt5pdq29abasA1+Bg2XAwlo4WG1DGxgBi4yt4YCFiiSjHGastrEVc3ua
ub3NXNc7nQ0aNc666XOnPhvry0uazpOyAL3Nh4cSJzoqrVl7UklK+Xhtj7nFLV1arV0q/LIwIZlm
wpcd42yJhLLi4i/uEE6Y8LifpxcqtzNe2h1Ne3CK50KI1sNSTQtblpyq1di0TXPW3KwasvbW8WPk
z4Ui5CupkJrmabxrRyFWjJK9eGVqaOy5ETYsse5rJHLP3N8fUms/dVTO8uw5Zf4/seSJviUwqbYd
3iqSDIiJZa7ENDHjNfh3JwZhTY0y0UYcH/xHKHiFfG8j8XK3HNkzMqCSxXtZXjd2j/IipZfKfxUk
ST49lxldsyFC4GVBWPFNfDEnbuMkSLNls6k/Q3+lljEibnjeKVxzJsUyAB+TGB+Vz+NwrIPSvwnh
vx34bcJ0517viY0r5W1YmU2TsOO9T7HkCJ8Pe4MQQSR4wGG6QrADBNui0u7bvFQ+RFDH8g26Rl3H
b2a6g86aNGOZtmJmLN8bxwjbDukcfhnRUgwrQNuZTG+SbpAmL8uw3SHdtvnAIYab6zCOIGzKaVSU
B7EwWgkqYrG8mR5SBalBJUcgBQrlSqYpSxLv2MrISJnBZR2rjOksEiozMa+VYuNl7buaRI57iBYS
LYhy4pZJWLxiOCFplVe2R9wzQEgieGMm4jUXDIKyYwRh9+PMvyOFY/km+vvD4yTmPwwpQSecQbci
C3bUkEMy5XxzDlqXa9ywmw90y8HIm3OGaYS7hlxRRQiObbWNZOdkxYWRlRHEljUyZkiY+JHlQ5Us
GKYp4SBJPtsNO2LGyNgMuXnu8Z50BoGZSm4hlTIwKfPw4xk5UuS0blG8gdYZQphnaE5mLGjLjhmh
UIpLGg6IZ8nGmfDlc0DUyiWLbpUjj/d8RpIt6TDg37ec7NxIlsqHnGqvSLAkcEoWaaaWOZP8tQq7
VEJY2EjMksUbBIMQABwrSTYsMgjyInEqySZMcsclp1xpmiaOfIuxadkCiPcJPO7bTkPSbVuDmDao
sas0+J+8XhdvLj7ThqscccS3o0ehsXkIvbtLt5JHNK5Cwkd8gQy5Cv5IDysPHJyqc9sOXh5BrZ8S
aKLb9ifdM+X4pjxxvGBWUfGIgHqGKxlk727ahPaxJIA5N3vQ7EDAAyyFQqSSMkVqayVMDZst1X8p
GQ5LMU/RCZgUsQT2iySRynxFGjlEQu00aCoHVH3bFKPtkRDJzKjtTNneFWaTImhRMLG/AORkDGx1
ap5RGLmRncRrhy4iZHyGLGjgGBld+DiiAJGmJG8MhqKDIiJkQFZFaoJCsm44GM8n4GEwO2Ytft9q
/FzFojdVJyc1KXcIlqdtuzY45JtnyoZUdcoNjpPkM8nxDbVxdv8AkuQmSs+HNGFJqaNlOQuO0T9p
rGlBx4SkwyirBTcXtXeLZRZj8ZbGiynnwQdx3naZRJk7Q7SZezvUmVsrBsjZWDT7Qw7sA0ExgXxk
Bfb5FY7ZOpGzZdNsmfIx2PMel2HLYR7DlErseYCNmyVD7JlXl2PMkVdizSv7JmU2zZ5o7Rnkfte4
lTgbiHkx91ESJufbFJuIBnzYpPztyRBuWerfu+Ya/eZu797cD96Ir97jNDfMew3mAn97w7HeMMUN
3w2r92wbHcsKhuOE1fuODf8AMxLDKxmr8iCvLFZZI2ruWvexqxoirHT34DRtXTQ6W0toQatry1Nc
6tzOh51ajqRW4y3bDxxJKBbQUTUkMUy5e0laiy8jFfHzopxe9c9M1zI08oSvEZC8DIcfLMLw5UMw
vw3061bQjUnW3BbhFW05VYVajV6vVzRJrrrbXGVTkbntjnGzMQOI5ZcKZMlGV5JLA9zDwAPn40VS
buQzZneFxZZxJtbJJFjqo2ONRJuRU7h4o7ZcAir41hKu3z7xIcpcqGUTbbtGUJ/iuBJUvxPcIxNt
m6Yx/IaOkzFt5ccrBjxZMuZtMEJmxPHTC1SC0NqtwAXqDHklaDEMES7dPIrY08JWYhomjYQSIhxs
eDKji2aCNI5IQWePzflwl5Mrtl/cmCzZDSm44r6cvrbaWrc8ITxZMRikMZavGwrwvUWKSdjxJYYe
Ia29IcBHBy9ccBAYPDFJG+1be8T/AB7CMSbFPFANs3ZIHhz5IAMZoCmOIzk5vjk3Xd4kPyGVKj+R
7dIybptzv3RtTbRtrs/xbGedti3KLIzMWTGy5IMczh85Zj8h3fGaP5dhg5u64GU0agorkvZXrLlC
rg/kwSmMspASW3NVtQ4nYKFa4UBiQxGLEkcEwUrzr5hnZELLjywYqn7HtdOYtpeLx5GSQuPlTvG+
HlSv8e26HNmmxMVk3nDODlQs/bkHEjj2vIkau2WSlhghMSZOQYdujjJUAA8mNEiuYrsXtzcbH3Gm
+PSNUuPu2CcXfYRWNJi5VZAYiaPGmgw4ooowAVG0Yv5DwZ5laeRGgka0zY048G3WydmDrLE8TaA8
hRtXvc0DW35SJSsY2VlK+WWKpMTHlZ8LLiPfkApj5UtSY80FKcLKJD4pUhq3tJYXwY5ZMrCxVmPY
qr8oxIsXMU1AbGGaFqkldch5sfKqCCOJ51SOMSBDHO8brKsiut67YO6M+RZcbws2QJ4DjqE7lSOH
vLp3KfyoWbInQRY3ix4PIzLl5DeWeQls1FkWXDYD9tzGTGDCCmQsx0IF2P8AnDXqeNo5GburuuIh
zyTeZmNY0f2k2DczlcymRjrTOAPjuXCryPHDHlusuR5LhPGHnewJtUZ5lwGuCncbL3Ggne0sZenk
aOQFWDMyiFxTlCM9kWMkmgSDj5xCwSxqVANAA120VDU+MWMqlS0EjGNSgyI45IZHfsxnZ2K/490z
vKMCCGOCPuyJNJHCK8hmckIuDhSZbvs24/m7jnHzQ4m5/j4eIkCMBGASamZI4juc5aNZGiadYTjI
TjjS9c6N6AF70+PjyVl7Lt86+XP2qbcM7KXDxfFJNJuuTA+FvpiEm7ZEqSy5EbrPMxkxXdp1EbwE
M6qwkeAFYoQ6jFQlsWJVx3QZGwbHt80EO24Ibc8DbfyhhYalcXGU+GAHtXTnXOrmrnW/FfgOtqvp
er1c1cir1eutdqijHESYIGJxsU1+LiV+Bg3O3YJP7bt9jteH3NtOCa/acUqNoxlY7NjGjs2P2/sy
BztC9w2c9v7ZlUdtylYYGeaOJuyr+NuYoLuhcHeIwZt07Dm7iKOdnIy7jlMy7s4U7o4Rt2hCjckD
/uWOSm4471+44dfnYxAzcQ0MnHoZEDUZYrBlYXFAXqxoAmrH0pG7EyXLy7fEFjo86HBPiQZAyNun
xzjbnLGYcmOZSbDJn7W7i5VrVkyGo17yRPivh7mGpSG476X4OnBfT2Fctbir6WtVuDmdToKB0tQF
q2j5Is2JuUKOcuGOSoJZMKVMrHmRI4jC65DsmBNI0G1ckjx4A2Qxp2LnnfZb+XdJmG6+eQUjSZeX
lypg7QMiKNly4WoTBqDEUuTOlDPlovgz0+ybBkCb4cpqT43vOO02PuUDNMjKT3Om2s+L3Y1zBjNT
Yor8evDal8StteZtoVoIpBHNJDJhyRT1lbPizjKwpMN8OKbIk2vCaJd93RmeOURQd7d17n1raW4R
04B6PtqOCwNb3tYkWxRnYV5RUGSFlwpBJj+r7ctTVtbUOIcN+MeqyqynAwzFJsW3vFJ8bj8M+xZw
xs7btybF3DGljjm8MMq52SmUm87pHkJ8kmjlj+R7fd22DcxPsG3ZaT/H8/GEmFmwSSfizZN2835u
ak0W9xhoc3a3p3ikaLKOOXtJMiUKHAxCjvBqODzSTYxV5GC1BN3Hb0kiw5TJRZrfINwdJZcyTLx1
H2SKBHG1Ci1qzZGCYmB5KTEUJ+Iqy/H9wTEnaaEr8n3U/ugmBWTA/KqGLDxljxcrKqDbIIqtYE0T
eibUWo0oFSO+bIIgiPSTuplxMLNGTseRjA5G5wNFu+HkNLHDIkDFBHdjFLnmXIycrFkfDaJVy8vE
ptwTujdGjzcSHOxbEUBerc7V0qLAy56Oy59pYJoGvWDI2TQUoPPk37VKfk5UE0eQ86/ktUW6KEiG
NnKZcwNjqUG7R3SJ8KGPH3KB3f5Lgxrve4ZO4Zi3qM1jJGZJTaYGoyt8/lF0q9JIyGKVHV0Hc6S4
0mdkeKLGkJGTkpLURj7pHZajZ2eKCxlyI4okmk7+/uimhXvdW8sGAJIcaBiY1ix0kzcwzYgyCmho
MFbJBWVpgQbSV4zdhYQrd5STOsLeSNQBLew5tk98kkSznIzTJHi7V8WyJIs3bs3CaSjUALSSnuks
CQL08Uiyd3akf+mTIe5LRRjLYM8SNH4ZFdppkRJVWOLyI+4l2luBRsTj4kskZyMlKTdJBSbwq1+8
QGhu+KaG54ZpsvDI/JhYrLGaaUOYmjFT71jwSv8AIYjDGJMiTFjxkoY6Mwpm7ayJjKyqqCaUBdj3
YZLzzhn+O7ZjnP8AECudCkWZu8r47QP5ElXGyBHt0GJJPHH+JFuSBIZCyXppI1o5mKKSWJ6vUsqx
LHnSyTDlpuGBFnQpEuNlnZ9tkjy9hyYGjhc5GxbXgft+77PtsuF4IWjaJDBueGY6xEjklbDxSfwo
mH4cdfhxXnxY0jhgwnG07bOmPFt8ssWXtWVFP+3ZwJwNwu2Hud2xd2r8feK8O828e812b0a/+6q+
8ilbeBSSbuG827hmyN2DHI3UV+VuoH5m5kfm7jZc/PpM/OJ/cMwu+dk98mdMo/cZRGNyey7o1/3Z
RX7shI3iAk7zjAfu2MCN0gt+5wGv3LHNfueIVXPxmY7jjXbPxQTnYnYubimlzcVmbNxlL5eKtLk4
7JHlY8hbKx1ZpokCyxOqzQuWliQl0Cq6OBLESzohuCAyNRYA3tV71ejcVzq967EWjFExOPjsPxcY
UcLFJGFigHbcchtsxnVtsjcvtUTMNrAkG1ujLtcoobfmxgYOb2mLdFXw7h2O+5KrvuCg5WXTZOZG
XzpQTnyxudwsw3AeRtwiD5OUJFBLTwZWP2tl46UmTBIWnhWknhclkFB0NXFd6A5W3Y8wZJ8WSDOl
ljyH7mFA9iMS9IhFI96mxSKxdwlhMedBJTTRLQyIDTTxJS5ET088SUk8UlPkxIUmjkDZUKlJVcHL
hBWVSv5kIoSKynMiFGVe38yIUZV7PzI6MihTlpXkXsOUooSKUM9qDoUM5pWVk8hIjdJFErGo3R67
nLWIfMw8jFp4THUsZjHjZVNlX7+1mcHLzMxVXFmmoQS5S9suFkRbjDHDDkrJJ5kAaVjRN6POrV77
L/s3THVtybFqCJ8fJy9wmzFnjld/HICCwpZ2Whmyil3EUubCR+fjXnz3FbXvmRhK27PK2/5WHOmO
vdNn55GJpc1c6WFAEHb9zkgOPPj5KQBopcN45EmwI5WxcCCKsqVYce5ZuXD11t6XuK5emNLehy4n
QOu9bUyN2mrWoLz2Ji2LxjgtQ0HBarcNvQt6nv61r1LjwT1LtmDNIdhwTMnxoxyL8f3LHD7PuEEM
uFPjwfk7jBJHv+7Fsvd0zGvguPGoZ5J1DGAiK0dJJuMYh3SeNYd4w3WKeCQC1E0dJ2Aix7igzLUk
qCBER6mx+wbTuobGOTEz2Fvk8G3z5+UYZKAsCKh6BrjA2nI3Gsn4wyxh2RoHRRmP9uBmJkRTsCMb
Zv8AKpggqDCzMqsba8WGiAQwtRIphc8wZF0AvWV3EY0IKynlLKkNfkTuZWKusrgd7EvgYmTS4s6y
5W77ljL8e3iV8sSSx5m4Tq8i5WLM026RwmHOgmjhXxS4brbPwJ4pQAdLXrbMCMJNkhFiy43LvDMn
422tUO2x48gJoTSgt43G4wkx4aCSmy51olZoziIxwZ5ctPFNDNkx+TH3OSfu+NxyPJtGBCXzMODL
hlgMeQq2qFmqWL7TIQ0eMjpuXcGo9epxmFjIGUgTY8+FNkII5Fk/GdFe3Z5LVFZ5GyDjsY4nQ3sc
p46E/mbGx1FRTqaeZ5ZfNKojxfMkMQiTQ0VBp1DL+ErPk48kYjuKbrGQiOI5pI41AqU8kBvghTKv
hpipbFdJcf5BIi4rVOjq8K2VSSGPbUUDOnkbuJvUkwWLHDSSPZ0AWNTl+VVfyMskD1lQlqbywNOn
e7bY5aPbJlOPNjwBWimVtvh7vHlxjz4gKRbbLT7biGptsxlT8GI02AoLY6oHCImRidxrZPx3EmKt
4IlhQkAZM9Itq7TkSy7PFkYceDHtEez7TvG4HHx87aslvkP2Pm975Mv+KKDJyKQvZyyhGIrcYcaC
GR8eVRHlShtujYjbcOhtUFx+4QHIk7JEeIG9E1etw22LNiwsybAlVkK5m2PDkYW/jChzd4l3FfxI
0jhdcfJkUz4keHGoCAkMC7MfOOmV/pgxPxZNpx5HxRG0cLEljxHS3Be+h0uauaubXNXNc651ernT
nRAJYBhYWVUSkiiRmhhZniikoxQlEhhjoY2OpONjszYmKw/ExbLhYy0cDGJbBxifwMXtG349/wBt
x6O343cdtxbftsBr9rgFHacWv2vHpdsjAbbUMh21PENuTuO1g022m67d2hducVJgOZBgOIE2+QM+
3ys527I8abfKrHb5iXwp/Eu3zB2wMklsDL7BgZPccDKJOBmhBgZPd+DlFhhZYQYE4MmHkM8eJkIv
4E5WTDyWAwMgN+DlE/gZXYdumYDb5ldtukdkwZUWdnFYGM8xbbC7LtiAvtSMx2pCP2tFqTbJWobX
OGfbcm77blK8uP2I1/JFGhjbB7m/BtUmI7AYUgo4z14ZLqZYzLjrMsXkifHyceYmHDehj44ZsfGL
LFED4YLrHEp8MAKpCteCC4WJR4YAR2KvihoBO0RxCu8dgWJaDfaEjFeQ9oEYIlYLZb+RrDtWhLIA
vap8slfbfyyCiQSJnWmbvJypiHlZ6EjANK7C9eRrEg1L2yRoixi/PLxmmMOLKhUAA6jQ8q2X/ZuX
/oUbVyqwNAWqOcJT5OLIMvBx5al2zLVmjniWOFDWLK2LJNlTu8ubODK02Wq4rwSZN+8aWNBSaCGh
CTQxmpcSQnb8eYNFHmWhObj1BlZ8rY8WQo3XdY3jAAHGPorUOG3B09MaSwrMm4bb4JJYm7oMWaeX
bsMYmNx24b8r1f1hpb0RwDjtVtLVbS1dPRIBqTHx5mk2rbpZszCw43TakkxzsuSskuDlpL45mDIf
CCEAy51psnFkpMfAmMce5oqbpuEYi32BhHuONIM+eRaw8tVBlMiLyLr2lrBY4Lx58LyY+xfH8yCL
5PsGbJjTYf4kSnuUKWIwpVcKVr40yjbHFbplYf7zLLkrJnPuOWMHbGgisiVj7Tl5FY+3Y2OO6hc0
WAqR+6rCriiAaZbVIljGLtI1Q8oDYFirMEpWPa8UC15cRay82WcYcIxsf5JmAJ8YxBNM2L2yzFok
XP8AIPxcQRwQ56xtPuMUsWWkzRZTqJcPb8mn2NKi2mGNna9dkEiz7FLHFJNIsUU2SJY5XjLdvcKi
JJygDjBgtMwtDIRUzNG7P5BOsGbiR+SxhjXPDYeIMHPSBdw+V7bBDFkvNMx5wsQT3iHyxS1+QQmf
ZoCbg1iqzSyThHjcGlYBIwVjyFjjo5EuZLnIqQxqFYxL2RQqiSCOIhzJG0bEwRGMm8hTHZx+KpXH
x2hfkKMiCvajVtCtml7ywo3Z5m5wR3KiwqZqc9sWZLKi7HJLKggztwzl2r5Bt0Lyyysf0zM6zKT+
KBycEjDyggyMZy0URUZUihsbtaPIkeV50kLQIkYgaJUCRR0i8uyMkeMSdgISMAWBqTDx2JTNipM6
O/JlfCxXptvjuuNmRNJgZa02PmuRtUN2GLEMmdRDt2Ks88G3Y8DL2vMLKMmcKEUkySWOVkT40mFn
MuBG8cwwkj/F3YKuGLXyNsU5UkaSw7JjLBPmSJJuG4TpFShhTSEKEWIs7kliKUltHlZMcuZosck5
BJv1oCgLVum2x50eDmy4koIYbtDOox58c422o+5Nl/E1cCcRxq7rNMSrRdriY3lFZgPh22LIaTbj
34WbkNDH9SeG1HhsdOul6NHg99T6HOr+j7exrlR4r6c6vV6563r3NZMvjhne9YMQjx+mt9DXuToQ
DW4Q49RZMMSHOipcmNq/KhJDq1dwq9CrCu1TTJHaWeYSPkzAJkTMsKZCx/uKChuGOT+UllZXGt9B
y0HDfS96Gh1PDarULUetXo1e+p6120BXKiKsKNW5bLbybiP/ALHqKtR4poVlVcGWMs0avMnavi88
vxeDAU78+LNkZCN3RjubwADxC6QlqjxHAXGAoLGtNnOr/EPxJI5I9spvkeCJT8gAGRuuTlKKtVvR
FW4L620HoW0tXP1fbXppNBHMrbBEz4uBi4i8dqtpz47fX2+gtwMyqMzcpJmnhjmOOPHTzRxAhmdX
jvlSowxcfGno7ViEvsaFJNhkaQ7LmJTw7rCGyZWxpJoCk0gkYSS+SHd1A/NxpYpWDRkADa5s9pP2
/IMSMHiySqwmWWbJxJu6OAgP3kjKZO7bsubFOb8h3DxQwZ2ZuC4twnaoh26WeoMOKGlHIqaJUU0p
FMZGruK0CSHmiSo542F1YMt67e2sodsUADY00MjwTQyxUrAV3CioYSY6q0SeSWX/AHb8IVXa5Fxa
WDDz4hjTR4ibdnE5K5GDH+RNllotxgMTx5QEMqKmVE1SSgUCSJcWSeLGWSNpMkpDj5COjsA2fuEc
UeHvcjv+RmB8VZUXIV3hGys9LgSYTQRYaGaRpWhm8L4ckcU4cM28qyDIyZ5Z9ojkk27D2vDxYPk2
0Y+OhqM84Hsc2E48y8yVMmGputYUyxTZmGkxx8eVKllIPf3Y+TOkoxcRYlycdRCZRIISBG6E0kcb
xrYRpE7TZDiOISlQswiTyBIotySVvO8gmhbvx8pJ2o6GmIAyJAFKs1RgQLGpkZFCqKY/a9+3KNkj
x8d4FSGBPjiw95ItklDk9RmLZ0+7GVwR1pUuyRLT+KJJMlskxPLNHDiER5yoscblnKslTsSmK8zA
qbrGGpnKtFlMa8ighrnqGjSQHElhpc7tMk/dTIz0sCmuzHiohHjycURRyMXfbmjUYkXmyIVMeRLI
EQkyM7iNdrhhMmZtu3uZNun3LLxfim3wQZGdlbM82dNlrHmQSNI8wlbJiWsKdoU7VbJzwMrPA7mU
qk05R5d2yPFg9axxN5T3diKZGSFIlynmST82XHI3SDtxMtcgVat22lM1NvzXif8AWMvvwq+Jyr+L
kZMWNFLGcibKgZSVVj4wRP8AaFrK/wBWPO+XPsgJwdyFoD6R4/ajXLX24SPQ68B0try9DlodL205
8XLgIHAeA30tXtyvrarUx7Rl7g8j4ymfJC2HHzr2PTdmYuvU0bKi8zFYA8ybgd7VJkzKVy8mo8qS
2LHj5Bbb4mP7dGKXEiUGGE02BiuTiKoiiWNTV9evo+/DajQ0I4T1vQrler6GgNemthpsv+3cf/Q4
bcT4qNKYkcTbdOrg5sZkfMYZzTuIoWU4mybjkCXap8WXbsrFw8vP37CnT8zCd45MSQ5+yvK+07VF
jYckJZZcLKkY7blGvw3hWNtwAEm5d7blLAz7rItLucvYm6QyAbrillzsUgZOO1LJG1BlPAdRxctR
x+3r3q+t+Ea20t6HPS3FaresOG30+fk/kGRioUtd5n74pVFZExZ1kIQWMmEFYAa2tRqSGOVZ9qxJ
lztrjjkbEnSVUlA7YSiHJSQbnuCD90zq+FZ2VmYPyXd8vaaycufccU4GLtsAmVZ0nBEGazpuefiL
j7YJUg8KkNKsaLnQM8c5D4jqyqAR22p6nlCUlrE1l5kOKGnzswttzU+PHEYHzYo8fcceeF58vJJZ
ZDibm22Mu54GRKpAGZtoUJzoCpy5VWCPJIJR8gmEkmDjoEMMECzZ0rVDmkQY2blsHyMB5X82LSAC
ZDGxkilcA+ClyGYwvkU8aySCZVU/jOMvco0kyNuLFvJC2yLk5MShi+VM8QfPyGrFyZHXPxvxpDKC
IYopgkPbA8YWbOi8uNhbdgZEaNGoi3LDWL5Tv2Jlxo91ViKhbmyDIhmxmEkE8avPGYZjzo0jyCgX
8SrFEuYGlSLEvHGjPitAZMSLElV8mKMyoO6slFjC5PbU08Slpr15QahzA8U2RywrF1xrNLhJKcbC
XGejoSBU0gszFjECzTMHkx4goIoU/Q/qzGPcNyyBNPIY8fazv0ss2570yaZq3ixjdZI1RibCOMBE
iFnhiZPx74+Ljfjpe1Zo7kxoOyNoVcvCorFieIO3cyItp1AqGNmcBTIOlgwRQo51kBSjYrBhkPBX
5QcEk1jlxW855kGLCktYiDv8kLgkRpLIZX5KqRyzjA/JGdlbrGsu058WPmRy45Td8jHzMsAWzII/
HJkNeBHkBksr5C40O3SFzjSxo00cZyO6OFM3KbKmALHGx0xY8x5MSLEzJJFzN0PbBPAkgczCaeX8
mFIbAVeioYbvtC5Ue3bgbuqSLM4wsfafPnwAFVy8TcWjnftWDIuMwf4om7knW8OLPBFNt8SR42T2
LDxn6O3D7egfRt6HvR1tXLW9DU8XWjXPTnR4jW4yGLFPTaIjfhtwPKkYkznvNI8izW70ZjUxtUY5
sSXtajzpuSklmjQ2RGY4mL4QavqavfX21vrz06UBVuD2rnXterWr25VcUTQOgPM866aXOnKva9Xr
3sSK50dNlv5dx/8AQ9vU9ygYNhyVHsyTti7NgwU+4YmFHlsct/26HubEgZfw5IagwZzJkw5wqadl
SHMnSOPOyZXTcl8r5uMgXPgNBlkXxwihDDUmDjy1+3YthtuOg/ZwHOzFmk2zM7mxc5i77rAPy8tI
13XMjEe5zyCLesRqG64ztDn40pfKxo6jyIZKtVxry9S3p24QdL8N+L29UaWo/S2+iz8nwQ9xACOa
bICSTgPUsxOUzjtRjQY3wHTuoVyAvXUhaZwBkTKpDEkc3lxMeWOTZ0rL2/IVzG4GPkPFDPkZuaNl
3eMPBu+2ZQyPj+J+87lGuHkbbOXpcOOnZYkn3RVrImyJjDJ2nBzjG215IcxSUW5M1SxXZOQy8j8e
LEwHnk8McYdlFMqsMJscy5mAHlxG3UnCgEVbpCcSbGm2pljyscuJO0ThUyCacAoRcfkOjbjMs+Ti
Yqy4T5MgiWDHiOZIMePzMTJFjzmTy4kkbOsqWJViphmMhcyxSBy1KKYKS1lO9Y7Rz42JlZomilgf
YsuVlRVVct+6RjcqSrK4y4W5xQKcdYZmkCG6kAjNy5MBdszsp8yLbk3CXJ+P7VNDm4rYWYpNRm1Q
OQc/FE8UbspnH5MCuCFYNSMKmPaSrSvND90Uy9qn7Sa3HJkeaL72WIClW0kp75vHG8uQuPGVjiVF
yTHSKWhhl8dJlnsbc2WPbcmWag6k1epJO95pVDr3SsduyRDFC0EsVyG0lpebwDyZn7RheadUkXbI
YocLfIkfB0mUPHhc6JYyWu/IUnMMoKgABrV7kA0Td1juZI++o1VYgvdXRY4jJKYlWrxxmOdWYAg0
LVNzMBUvlkhnxl7hNPGYsqONHkMssUkFsb7EZozDkTXKL21PIpO1ZWNMmQVLfG/j8cmTJt+JJHJN
BhZuTGcl0nzIwuTnsEWUlnLU7xwpmZcmVJh5Rx5BkY1PnYyDJyZcg1g9oymaztOWRGUjMjyJZIyv
bCggly/H+TCJUegNOlbxtH5I27cC9SKJlxUG2x/G8hM7NC8vkcMC7imMI1zI7rhsDAwuuPjxRPsm
SMjCmiWRJ0Ecv0dvROhrnqRxctDbg5a+19b6Xr20NdavQ68HTQ6ctPfS2nv7UeLdULYzEk7fEI8b
U8ORMsQkmLlWJbJftjbnUCC8hu6CwhF5DXvkNZUFyiViwqoNEUb17/Q8tff3OtuC2nShR6exFe/X
W1AVyq9c6B5abL/s3E//AGA0vf0BwpO60uWwp8p2Fyxtpfg8URrtFS4aOYtvx0Mu2wNUuHlSqMGe
NBGYWxZMwNJnZXc+4SQFdxxe1dxhYRTRyqXjBBB07EFEBqfAxpD+DidrbaqlNox7NtKxt+1ZMksu
Bmx14dyaV8jeYw245cch3yVak3kw0N7w6/dcUUM7DKpl4zm4sGU6c/oraDitxg8NvTI1t9dbi3HI
E2Q8nYIBkzQy4Jjk/LWKAnulZbUDXdY4s8MKQTxyryFE3ABoAVJJ2CaVVMt2C9xOJjIa7S0p51kW
Jkm/5aSwCTE+I4s0WR8Tz9vw8t50yZMmRnfDyiFaARtlbhFX5byFbMGjvTx9tRSFTg57QthZaTxq
SQzdpIvRQrJK3lnhgGPjy/40kjjDwzCFjhxGSHGiVZP8dDPWKhLDKmR8dwZJItvhx17rnLzMb8mb
e9ujOPuuNlF2BYqCbmXOjh8Z3rbjkQwZkOTE0TZODE7SRNkRzpuH+SBpL5MLFgvd2g0F5KqrTMaB
BDkAZ0Ec2Ory4srO7ttb9mS0xMMpu1cgUkKPnIrrflhm6R/pvatygjORh7NgYcmDlRwzZO5bfjQ7
pmrn5686QgVEahcqdywRbGyGiklxE7UKmgbVlc8fHcOJkaRFb8R1KsprLUpFixTwvnZYx5I8uSdZ
fFBQtImWY5KSNolgR2jEVljWHxqrSPHhli8zYqYWR3RKwZch+yK7JRQuY+yFUnaRXZXU2FHSUfen
JXzmxq2vPkyqyMiQTYOdvW342duk+doDUkgQLeKaYdrQqHeQ/wCVFstr0RemWxCcmF6EZBCkV23p
1UKAKNxUf2iQFxJUf2yKptevJY5J+xX7TLkMy6bhA3ikR0ZSQYcbPkhkO4oVOajHLzAJvyGjwMp8
Fod0cybP8niwsmT5vsywyZ225DjNxMZ4908zvlQqR3SVkZUWMMnIlyH6npV+BI5nMUucaES2tdpv
9v4GLkQwxKBu8ERyW3AiUZyStJMsSQzrMl6JNbztIlrbs/8AIDRrMm2LNtjw/LM3JEaeRspkjSVw
VxAUapIJ5J/jhEeK/Nc6Dwz1bW2lvXv6VtTw8qPoW1tpbS3CaFWtR0twX4bcR5ce4PeoEEuUqhRp
bhlcRpPKXPaaiH3Z7i1LZYkHcSpVIBYMeSip27nx0u0SXKKAunPQX0NH1Dyq1C9c6I5Vfnob8Fr0
RysaHB0GgFWo621Gmyn/ACbj/wCgOG9W4PahqLaDQVz1vrbiKq1AWqWFJVTBxkeTCxnD4OQyDZ1R
fxjA8MW8RgZW597bpPFMN8hr9zRAmZisq5mM1CzDXnR50qotPBC4XAw1aXbcSSjs2O7S7Fjkvs88
zS7RlIDiZ7SSneYwubniY7rnF03dkm/d43eLdsV2O6YQaHKx51M8FxzFuK2p0tpfX2vxHgtxW4La
2q31l/RyphBjq4J7C5eRYldfOM2QRrCDaSMhL2ruAEbffthBlflQ50KchFmyVjMhIoXcww+VouxQ
gARiLRWUIwdvj2wbfPtgwpMKptyxfxdyG67tl/H9lyBl5Hxzbsit3+KPHWRtO5RO2KVZRLGoyQCG
icSxWMchVsHOaJsbMhyI8l3TG8c65D7nHNjr2R5kreRpEHdLE/euMJI0xZEabHyXBwAKeHMQtBEW
AzRROSa3rfJYXZmY0rMpxtwnjdtzx1qLJaLJ2feF3FMvccKBQ0aPj5izR5GNHO64szUzpJH5THJt
uR5FD2peRKdoBJolqVOwNZgwJrd8Ropor3x5CsuGWlhkkeGQtR0h/wAuIFKNjwgRJ+kVuERkx9y3
mcR/HcqXIGHsGJMd++N4kOMnRSajNIaSSwz8AocKXyxBHicdMU+aCLIaB4ZgyyY8eSI4widK5VuO
4jHll8uZLjRmKQxpkCKV5Y54mxpVzJXr8qVFhySrSP3NhY8AjRQEycNZVjhjjPtkp3pkZEXllyDA
HmklEMdk7RV+fSjUh5yntgTbIsrGw9vhwU+OYsL7gUuu5RJDn0OlgwyQsVGVnjXFm7EJZ16UzWpa
PQAV00uRTktXSgTeiQqqrSyKGSSN2aOzEkcpAHWaCa8rhtIk7jLIXKwCZ49vxFErhF/WzsEQ5oxp
ocwSYzRJlHYNswU23dttwp8LIxsUOMTCJh2PGyDN8ewlI2KEGPYpli/j+Uyy7LnJQ2jdDJPjbhjO
WyQvlnA/IcVi5uDBQ+Q45r97x5K/csQiPOj7JEjneNmEZhySxxsYtJhqyiBUjxpJWjSNYxpYX3na
W7ts3BcpZYY8iPaYZtvykzRkZUWy4Jj3rbk2uRn/AOaOmXLJjt8Lmbst9ueZvyLcFqtpb0Rw24Lc
dvT56W4r8J9HrV/Q9uG1cuE2AzZa2eLuktxe9Z0nL39kPaJ5fJIKb/VEt2YWVaY08vaO77oJVDYq
XocHvarVY1bhPodODnRBq9qNWoV76W5Wo9AdL20tRFCuWhFWoW1vajWzH/JuP/ocQ4vfQUNb8HLh
A05a2q1W4OZ0eKOSkhijEuHDIDtMMjSbJiGpNuymjG3bnEvdLEuPm7hFGm4ZklY+9wPQ3bEZodwx
ZiZoVoMrVY2tpbgHKpIo5RHjwRVNgwSmPasJA21BSmwY4U7RkIn7XuUUXg3ECXI32FF3HOWX94y3
kh3xO5d5SSk3LDaKLdMOUvkQRhJI5Byvw34R61tRVtLaW+mPEOG9ql3ZQ2dnHLQMysCqJcu5uDOQ
7x/aXlJF7kkGlv3YGMYhcuEWwZlC5OQVqKMRA907GPupSqBI1NSv5DKfJU86JjR2WPbMc4+3fKs1
sXGw8OfLx9mWHFxmUWx/kuHKcjJfIdmIqT8eSpNjwZhN8bNsnasjGkCS05cGKcA7duLQSYWfHLJn
7VIzbZtSwDco5opZcefNy8XFysXHkaN3kY902XFAv7vhU26YFsbedvdp8KDJj3PDnxhH2kbxEyZf
BH29iIznbMPLhx9ykyJZoMifGbH3mWKfG3vEyVikU1vc6CKTJsdq3SHHkiyoZm7eakPTkCuZrrRt
RFhlY0eXFPhy40t7PtmUBQfuGVjMxUfaRWCfuklBeCWR4RcAUyh1Ta8XLkwsDGwYtvyoXi+Rbrh4
+HGaBqM1Gbi/JGFszDMTqPOs2HNFWPM0SZhR6xGskbC9HmH7oxLEctMnCWGNZIVyt0yxjxnHfw5T
hXSN0WJwXk7S6xxhwAKXdGEmRuInnWSMk01dxYzXZvB2PGbiiLMepNg/Osxgsf7yIm83fDgbtNDl
/wAumkV3aR6FDkc4AwoytRnmC9peYV7SEAA8udHovcaIK13G3/ydTUfW9SGwx+6SYIpHKgKNOVvI
6Ot6Ud7SoIoI4vMIYEhR2CiVzK4AUYsD5c24bKs8uUZII8HatwbHwd3ytorI+RS5rZsEy5Kly+JF
2RTyd062uzs1GXtklkmIxlmQ/IEYt7wYGVNWPgw4yyY8ZRMbbZZZNpgR2hxguLtmPLEuwYzxtsRL
z7Nmx1+37kKEO8h5I95nONl7tjN++Z6sPkMoZPkKs6fIcZnG/YjtuL44ycHd4Z1kycPIraJIcXdY
fkG0Ouflpuk257YkJFwMmwb4/ujNmoLjeHVpT1PDbXrqeA6nU9K5adaI4D9Jz4hXTT2o1bS2pr21
9+VWo0avqdPfTJkKQ5sjX2uER43K5r34D0yXLOAKIp0ZoQOaiu4ePHQmpCKUcrXOTYKi3aKNax07
U1PD768tLm/OhpyFc9Oelqtr1o1cVY0K61016171arUKGnPgI099Nm/2bh/6HpjX20FctOtCh6J9
YaA12pQPKTAxpWXDxQr7b938fxyP2iTHkTbd0Dl9zjkiztz7o95yWdN+TyJvGKzJueE7HMxQUkjc
cvQ51yICqKmx4JqixseJZdrx5T+1YPil2Z5TJsUQK7RuTStt+bjmNfkAx0y82F5tx3ESY+8ksu9S
Mp3jCEcO5Y0rnccJZBLGyq8b624eX040HoDhtpbhtrm3/ECR9jN3VGhUzOXcWJmJCTY6IrkENzqx
vbngKFyCwIRRa9knnqNSTMGkbsYog7qWMLXeakdicXHYy76Y120OL4W4OhnjjyCVSKLBzIcnHy9z
kwqfbdvz1fH3Xb6g3XHnMadigcioNTwhkzNvDEGzNixsHxgpxcrcMU4nyuER4++bdMGC5CZTTYWV
HvOBmx5LpHFhbhlJkHMhwl27cY8+F9vwpjk4cYbEysjapshI54srF/Gk3vH8kB5cPxyPDkkVoMSP
PzZNwyYsKJFlw8ZgIGx3xsyWEbjnTZKMjMWhdTg7hDCzOLqnatxdjzsxBNISzFwGyMeHITOwpMaS
KcqcPcXFLj5b4yxyxP7YZ/yyj/LjKVhoVzvmP+JlT7/lPkwLLmYcXw/bfDu+0ftOQtRkVG3O/IGo
Gxe3IhfHdFlWJNwiKTLhvjYzENG4FXNOHYRwlT2isvxiKXb0yFytsxpzPjoaMTCWOENjyoyt2KcS
M9tO6yRxmIhIIRWLOripz2xxiyixppi0OKVIptJOanm2QPLkS7CzzNEq4+wbfBh7fveJDLiC9Ch0
kkCCeU+GAlmhJLJ+tdJWuVvU08WNHPvU8hfNynMG45cTYmYmSgHMvR6i5p07lxoOxALUaNMeUsoa
ppqW70WEYx/KSq9oLADJmN407VyJO6sTdJ0yMnPU0rRquOpaDeZkedUZ2tHHWK0bFSsUWZAksyr2
rGSrjxiaRysitOWXJdV83OXIAo5chDyzSwTiKGdoxmBO6Zy0khz5TjN3kPPMZhk5EOPFA6Sx4zET
TIFn5U0UUlPhYkgO14BU7Lt/Yuw4QXcdmlxYsDEizYxsk9kxpxlCLcIi+BuM6ZGJkq7QZkow8R8a
TD+T7h2ZXyGZ4hvOWgXfpFZd+sV36E1+94tNu+GKO54QRc/DMcWXjTLHkY8p8sfceVWNWq3oGrUe
A6876ngtx219vQOnTQ6dBVtPbT3PFy4ede9bhLYKGnyo17E5anQ0KyH7Iibm9E1GzWlieOUNYkkn
EYgsgNMCFHSdrvAl2gW7oLLreva3oW50fQHBarVyvXWhpfXnfnoNBp0rnpccWzX8m4/+hp78FqtV
uK1CuWor39G1e30I4yAakxcaShhYgVtpx7/sWHZdnkx3XZcxa8GVA0Db5Eh3DNRYt9kCfvLiOPfN
vdRu2EaTKxnCyRNQ51b03jjkEcUUYfbcOQzbfiyomzYvfJs1qPx5fD+0bkJH2/c4WU/JvGdz3SBv
3ncFK/JlZRv2NGU37CkJ3bblSHLxpwuRjs1wOO/Ber/QW0HpDhZQy50TRMF8alu1V5hReggNZ0i0
zGkUs1hSn7sJUEQFyOmRKoFlpvsEcRcn7nJELrKsheTuaGFYiDJHW9bwmUkU5STFzZvy4ZI/Dj45
yGVVUE3LbYI3TcnhbK2vDzlI3Haji5ePlK7LGrtFNFu+Vi4wj3TDnmkxnhDIrhkKGKVDS4eHKIdm
3SNZM75HCsmbtsxih25i7ToI82B8vHXGJdO0wPHOZsXtraMtwmVCmRHJECNw2GQ0ylTrjTmCZdxw
8nb4YsWF2kovcmxoDtLkWEYllmfIjGwY8uTkcjMxFczQW5bkLB6ZAqyd1Ke9cmFZo87EbHkglK1t
+9yQpMPyMUmwwnbzyEGRFsNBW5xCSCP49hTPjFIHVlkX5fnQTTKbhL90Zq9ChQ7ZEZXxGyHVji5J
rIjEE/d3BSSC6ir30kjVxYCtwyPBj+bJnM/bC0OU8KxSs5lRY41IpJIo6hxk7MKJUCRqgrL/ANcs
gjSGLvx5I/8ADgdoq/JugqQ0ObtNHFk425R5JycuOKto+U48OPuW9jMiGns05LTO/jxk7Kxz9yAk
ryr2b73C9o3HKfJng7XVdrypCNjy6hgycDJJFiOZHNeZC3I6UTTuqiaYtTyFqCLIWIUwY7MFQIDW
RL2iJCTK/YuFjQtG+14sMmHtGTuOYPic0armSwSLBCZPJ20kZZNuX7mCtU8IWX8bMQxxyCb8DHLt
EqT+Yo0kUckSRzuZo1iSTb82SoGysebNiZ8h++NMIHIxIlLN8glU5UefG6PuMSVJM8r7WLY+CoMs
9u/ibs7cvCEGac1rPM7v3yGmkneiDXOiTQNe4dhTxJKIsAAR7NmAnatySSbE3NTIuUArRsokxGCj
CYiHCK/jRyEY8z0MbdYljg3tCj72S+XvAZtz3SOhvGZGP3xwRv8ABb98xQV3nCYDdcEn9ywDX52G
aGRjMFmhc3FWNWNW0tXLW3ocuA+n7c/QNcqvR1PW3PrR4xW5T3baIe+fS1Hhz5OfSiaJJqGUxSTH
A3XGyQBJYEpEVqOUEMO6nBQE3bGSy4qfdw20vx3oejYV01sasatoOtqtVqOnSvfnwe4o3r2PSulH
qRps3+3cf/Q4RoNR019uHnV+Ia29D2rlVvp+dc6MEDEojK214TGXacKUPsUMtS/HwS2x5LP+Busb
iLekZdw3NQu8bhEh3+VI1+QY5H73irId3wLw52JMPysa4KtVtLcQq+lzTKj14oyo2zb++baMOYfs
O3Kv8eieST4/MFOx5sZXC+Q2Eu/Qu+47zBR3vcmEfySKyfIIFCb/AIEjfu+2UuZiMElikq4qxq1W
9e1W+g3mePtPOpHvQWkWjyGWQF6sSAzNS86wYpPNyFSyhVICVHG0lSlWHd2CKEJSQtPSLGtFUjOJ
G0kckZZd2xUxcyGCSZtuSFJf23Nun8khU5vyFQm97vEP5PlCj8ri7f3rBUxfLZlORuW2O2H8mxyM
r5HgWz5jl5Pvtu7ZGCr4cc6EBxJEyVDORW27jEsePmxS1LhYmUuT8U2+UnYd5xKyW3+IbX8ojx4k
3szZKZJAdpGSYnGzPN458zb2eWRfFDufxrHyUzdvycGTgvWPlujK/cL0WqUjxwzdkkJhlkxRAsMk
kMQ/PxFqKVWoxi5UmlVkJ5n7TXZ20VIXdMQZMUiNE8ctYG6PGsUkRcwJUQ/yqQW0FSr3xy7p+Fjb
XvU+Rk/tuRuL5/wyNIgCpQ1GaU8xQrpW4Sd8KydwxXBDDzYmKwK35KEZq56PcqMCR5ZJcWAHOw3G
NOuU6SwxznxLM0iPXaxO2KzLYXoVl9etL3hwGkqKIRrajXvIaDdqPiSZabZts8M2PtK7juf7FtJh
zsT8PJq9ZLyLHJiZKMYW7u0pJFyWLoBTX7UULW5zGLDxsSTJODBjhwgFdorJCpHE0bKym/ZYKtKO
duVZc8hYuxBdpCkJNGRqgxAK9qkftFzPIzKi5bTGPZpnEoz4pZvjWTGM8qAu4vHk7k7BavQdrYKd
scoWzCLJiUKq4kTM/bX21JFFMy4GIlBUWsvIjx2R82SYwZkmVNlzJnZWLjkY8LxV5kgTImeeax0h
2zMmEEH40MH2pe50vqWCjMyWkYgmgLaXOl6Kg0Urx0VYUSBRlShldtYe7LG8UqSx1fTxQh5MTFla
Tb8SSn2bAdX2PGkr9hXtbZ8sH8Dd0Hh3uOhNvq1+47pX7xkrQ3bHBGdgCnn2ZzfYnSPG2WQDb9t7
22LFYHYUajscwr9qz1P7fuiE4+9gAb4refeUps7d1I3XOFfvMyqN6JYb3BYbxjdq7tiM37pg0u44
T0czEFLPAwEkRPKutWNGrVaiDqfQFe3vXuDXOudWoadNL8J6+9tJ3CR5kvfLs0JWH3q9dOAmwyJO
+Q0dQGNZDlmjWGQAvC9gwjlKmcHxItzCnLHTtFjxAcFvR5aWo9KtwgURarac9DwWNGje+h4DQuKt
RojTZv8AbuP/AKHAL1er6X9Tr9F19cfRC+vOudFVahiYoaXb8SWl2fbwTskKOfjmL2/sE5R9kzo6
O27oHU78p/K3fHU7xuxA3+SN49/MhHyDC7V3rBIXdtvLDNwyUlierjTnRrnrfUE6MquFiiQftuAX
yNpwZx+w7aoPxnDNN8dkLtsWfEP27fIqUfJQv7jvoJ3zORl+SBQnyTBNJvu3s37vtlhm4ZCSxSAc
+C2lvoZMiCITXyJZASwt5FW9KtqmfsWSNpaliMRN7sABtsEc0sUaI0rqq94UrGcppe6pSIhiwLaR
vvlDqIrkz5GCr4eScav3HFmrclXJ3LYoYoxBgwyzRosahTSXonl4Us+PjNTbbguH2bbGrMwsITR7
fguMrbVjG64EKVDG8kwlmxpsTfcvDmRsDeY3BV5ISpilKnGnZmjJES86ERNLCaytrwZ6yNhM838Y
3WOpX+Q4Qffcl4f5FC8WFuBeLJP5UCFiu4YCZkW47fLhTcOHJdKdgFeQNDWJkFTtWSrHPLF8+SaN
4d2aM4W+A1FNDMipdHiAGXN4VjkWWIc4ZEJXfNuK0rFTG9YuSRWNyhGJm408ErHJ0FDrn4EU+Vtm
wJgzbU6VO6QR5MokylpDyTqundYTTMTFADJA3ZPjEGoftlPdeQ9qxN3roayB3Qy5uPkVkvGUIaLF
xE8kssAEmQnidUldsR5cSL95xyysGVeubyZFNof1RKALV7EUeVPUn24+PlwY8QlWRcHcY8LMG+bV
2Z2V+ZkirVzp5JDTL3Pa1NySEcqY0vM77IBB55fFt8Jhxu83dwg3ae2JCMlmiVxGbVGCaC2r2mk7
RBFAiOFY+SFax8pgIsUxyaOwAyJC7RoEWV0d4Ux5YJYU8exbLEVyNtgxYMnKbJik7YIyaRCzSLJH
NExRcxmEHZ240LebHnyIsWLGzpQFN1HKieU0vaJJCGxiIzj5ZjWSWHLypGLvmyxYtZGRJO9RxPK+
Nhw4wyJfEkEwyXyXTGg/KiEcmdMaTNyAY5VlXTMJEXKjVtL6XFXpn7QdxVTNkySNzau5BTOrqpN9
lz3il5EcXXh51c0bEdqlVxMVC234bNJtOA4fZMJ6Oxwdp2Mhv2nNQHC3dKC70KbJ3qMfuO5Rn96y
O0b6jEb5jBv3nGAO47fcZeBOF/byRiYTU2BiNX7bhW/asYqdoxVJ2XGYnZlCts1fteUGG35yB8Pd
wqxbmGJ3RZVm3URpl5pWXPzEpdwnVxuUzn95hCncwqDd8RmO54qsNyxGK5mIwGfiEmWMKmTBIWdU
pWR6JA051zq2h60OE11qWURIZcjIaaIAmACkyjCE3SQUu5ua/cAKG4YxK5WO1KVarVlOUiY3blR5
VeudFgqhfLK2Ojr3GOirQlWVw0pjWFwZIgCY1Cpr0qxq+prn6BGluA17W1F6A47aGrGrVbg517Ea
EUOmgArZh/l3Ef8AP4BryrlVtbV7aDgGlvrhxWq1W+jtxe96PMCCCpcPFmA2rbhUmyYUjnYcQgfH
E7X+PzXOyZkbDA38J3b3C7Z28xH993MqPkgidPkIKfyLALDeduKxblgShczDYlkAHMWqx9Dno8cc
gbDxHU7TtxH7HtoB+M4Bpvjas/7DmRltu3yIeP5LAi5fyGOod53XuT5HL3L8mxTQ+RbaR+87bQz8
BgJ4COVW9GTdSMnN3UMqsWLkguisSPvQcqyFHiZDjwzM0sr9pIj5Y4kY9xRXkCLBAjgy9xJEYji7
6C2AhBkkaKMZQ78YxuBtyztFgbk7srlxtEDvDjxhKUAUovSqALgV3E0oFdtMErMg2+WpZ8WSfDii
Ybh8eWNsnaI8xJBKkhINbSu4HM3nAQYkM4epYTUcjIYs+aRdpyWMy2okAS7xg484gx2OXlxw1GrZ
EsmNj+bP2rGWDG+Ow50Lbd8hwaG87ljNH8jw3G7Zm15sTABuDEBCk1kTXCv/AI6uQYsqVG/c3mDt
NeKLDlo7TEximkhfH3oRibd5syR8PdUrBZZKCgK0S2mVZk3PDOLOrEVG5ttW5PEY89pHnigM99BQ
rcl7a3D5CIm2nOfKi/YJdwi3jYX2l16JyMfVdJX8aRYWfIPF4kMcbS45IeQduQtEXCgKCQKklEaf
uUkso3CZkTbsZgMeETTwBaxEjukxRjG00mPIrviyyJlfiYpblXeA2at4o2DLD+qLrRsK603SS9st
u2LMxMkSYEEkWHsnxmDLj3H45j4iDS1Wp2odB9xa/cskaAEEEipposaLIyZcmTasBciR1DLHCqLJ
G0py182WZ1iXHmE0Koa6aMwAd/IzuoWaRiMaAyiMeNSSTRNqyZu1YIyBNIwrccHKRMV5sLA24ZmY
NpkbFTcNwjMOKBTt3tGne8+MIKM5aoG7nzTWVnNDJFPKgXLZJcB4ZGDqaJrKykgSTcO6naVpYsTJ
nMmJMVixFx44gFTInbIm022ARwzp4seLM8whcI2Z+4LlQwAsIo7pDjOYoDiz1PMIYl3KfKmvyJom
1czXKrigbkC9ZKkxMCGHIFya60oNMCrY0hWTDl8mP6h5elz0vQJqwu8MMtHFxe39swbHasPvbZsU
k7HB2tszlhtWZGfwt4VT+8IRlbrGRumaR+9kD94jjcbtjNSbjhuq5+G580VgwNcxXOiAQAFDQwsW
AYLi4ymTFxpAMLGr8DHVv27CKrt6Ksm1YrKdv8lSbZZ1wHL/AIEqyxYuTGq404Ijz1hUZPZK+esA
nyLT5OXHGcueMy57pTZ0kTtuSdxz/G7bhDc7jGhk3HFdcqbKnVMuaEpmESL2yI2JiNT7RgPTbLDR
2mVWbB3FKZMxD+aUpswTALevHMKIcVdrlu2m75ajUKBzBVHVlkxjJHYFmeowe7HmS4II096POhQr
p6o16a2oUKtyOlteVXomr0CdOdWvXTT219+VAG99Nm/2bj/6HDbS1c7cNuXPhHr+2g+nvw2+i58N
vQ51aiAQsECVLt2FOzbXgsq7BtqiX49jSu/xyNVPxpwE2ferQ4e9wSytvkLz5u+II983BoU+S+N0
+SRgJ8kwZGO/bZc7ttqpHmYsscWRBOFkiduVc+O5q5oAA/jY/eds29mOy7YWf47tzM/xnFZpPjkw
Muz7slS4u/xLPJ8ghSbct6hjk33MiQ/JokUfJNvIG/7URn71t4xzkxhi/ke1qEjXM2OTFA5IFqAN
FQRmxl8hY3EZhYBmviKyRxT5ipSF8lnldRFIxYqAOgd7V3Gtv2zKzpcv49mRq0JyAy+LDhwEwMWI
qI9qw1hwhEsaW5ilYkmiwAy9+SIiXec4rtE7V+zwqj42TiVFu+KGG8Yl87bc/cJZ9kx8itt+H5+R
kbX8by9q3L5Htm4ZT4XxvO/MlDY80sN6Ryh2fNjjnG8r3pOsi7tiELh7jtniDfkNmH8MfvEyMk8W
XBLgPE5zN7cBitZWBPOu5q61i4cuVRBU6YO1ZOWpgERy1mjGg62BEYJdscwrtW5RsMvZiKXJKSFo
pVzojjmFsh5IcnMXNfJjTMjcSIalhVzvmCJoCO1keo5CDt+UJYLaHQVkoJIG2Vs1tswFwIIHSWL5
jkQjFiPJaipdASC+45zxZCBsZOYi65A/zLQppI1adlZXZirYbK/ZGXeRxJOhFRuEXHsUzVEZ/Jdl
2xA8wxYhNRrIjZXALRTNFFJE/wBx+yYcxeulMeTc3zOciZEPc0grYtxxpMbd8/GixRVq507doJJp
pVFY9ioMbVOztNjbplQJ+7ZJrLzZcuu1a2850UX5O60crdabN3GNPyZjNHLnqqS7klfmboKOZuVf
nbiDJnbgVOVmU2VOaSWZmTMy1X8/Ls24ZQr9wy7tuOTZsmaRvzZVMO4COf8AepbZGWco43yHw4m7
78ztNu5kj/dO2I7kLQ7r45czfVyoVz+WPvmNAuTvsUzNvsbD9zhYruaJUW9Qiv5Gwo/KMULmbuuX
LDlwh4d8xEH8hw+1vkGLU2+QSzT73FLhiWMqJYyomjAXd8dYm3qERvusYP7nEpXd4o3bPxknk3TE
Vju+Kpl3vDIbesVV3HdIpcbDb/MaNGr8qFX5papWFZWIsdMTcC9JGALqKlHdUQ+7bFIw6vR0568t
T0tr71bjPrXNXNOqOBBjijg4l32rAYPtUDqdnjDnaZkY4G5hVTdUD5O7REbnkdybt3AbtjEfuGIB
HmYspLxihYi3LQ63r3Nc6ua51zrnYKq06LJSxRJUkMUtCCBS+LjuVxcdW3CBDD2yI8iZeTG+0Za1
BuWXhtjbljZFXrrRCsJMdWE+Hkxl9w3LHK7nE47ttkpYNooRbdUiYVMkFEE1ZrBWNeM1a4kheMiN
2qN+wrGz1jK6w1Ya21FX0NHgtzq+tuO2l9PY8q56XrlXKrVbg97UeH3NWo1s3+3cf+/rbhv/AEH3
+ltQoaddTx24xraregOC2luC2nPQ86IBCQwoXwMGR59swJw2y7cVX45tiKPjeIah+P8AY2PsU8Mz
bPuKz5WDvKy5UXyFTJL8lEX5/wAiMQ3ndWx4d9zGgg39ykHyGNqh+RYrVH8jwGqL5Dtkgi33a5RF
u23SiHcsCcw52FO0eXizGOeCUq8bgWNc6tenijkMuFizNNte2M2Ttm2sDtWEWbaIO84MqSriZiNj
w5xdJN3jj/N3FIjus8cb7wiVNuMBmxc7CKS5OLkiLIhG2nMjjgx0MxgLIHYsccJGrP2hCHDj7nJF
fG3Q7UwFfIZZoN2ikZ13nIIwsR3eWGOPHim3Lbo6/c4TRzNwajNvNCTeLZWRvWacTadziIxd5ajj
5qVJgzTA7JG+T+y4sFY8sm3tE0ciZ23w5kbbjkbaJt33CTOhVvDJGCPkefB+diSSGGSMOEdkMGST
WBkf4Hy1iQSQzCfEh/BbwQrh5uJPDixrCrZwgSDMxsl1yIY1yMueUfIMbzQ7fhRYsG7YYEmnx7Fb
8LdlMc0DeRMrGaB9A3LEkWN8jNMgi+0YW7TxNPhYu4Q5GFnbcUaKdYsB5S5kjJxLHHytxjqPf8iM
pv8AgPX5uDkx7lj+HIU0rkViZJhkgmWVNb0ByWZcPI/kavOgzZ48v4RPIhikx3BtUJBKG+t6k/68
f6QwSsg84cjukkcRxu+RuEg23NV1gK1kmaAPJHNUcd5JmjSsQAkIHXOxv+Ljg+eKFllilzZGpqZE
KlZWZyWNiKnHdEk4KpMxa96e1lF5M7vebFE/5OXDNLWP8LjEM+3y7dIDQINFgAW7qYSEFcZ3abHR
NwyRW3YiwRscbCmjz0XD7mrBxXy5G2mJR+35Ckw7orZeXktWCJkRd1dQu9Y1Lu2EaXOxWJnjKuxN
SykGS4GHHjCNeQ0c2GRIZHRAipG2VPm7FlHIyZ5Yax8XcJMfF3uaCHNzpsxsle+aZLVYGkxmkODt
+0PjzwwrjhYMUQvG5mMDSOVjOLjpkQnDKT7vIu3okE84WOMDD3XwjB3TaMlXjwQrw4kmOY8eoEx1
CY+3sd2wFxizG6li8O3yyA423qXx8N6XBw5TnY/jyIttxkij2/Ddp8LbgYsWB23bHh8MXfHKrBqL
WIdWo1fS9KxBy1LJA4yIJsM96YfbU1gCaVr1hQGWeJBHH6A6egRwW4D6Z9TnVzTwwyE4uKQ+14jC
TbIZCNqaOVMHMhrx51HL3GGNtyyI1G7wNS7lAW/NxRSywvVr1auml+C3Be9cqvesrCXIX8+PHKLN
nTzYkcq5GzMpi3DMw2xdzxcjXqJceN1m22FzLDk4rR5CmlKkGwN1AuaHcaCOQylaRAaMcYE2I14+
zKOLhLAte1X1NDS+nOjy4L62058fPS2l7aWrnV9LencCjV9DRvWy/wCzcf8A0PQtz4Bwe1c+PnoP
Q5a24La+2nM8PTUVz0twCuX0Nq51bW1W+lvx29DnR51YWEcarHjwRVFhYkLLtm3rKdm2xpX2La5J
X+O7XJI3xrbWk/jWCZT8YxDKvxmAOPjKK6/HJEb9hzbnZNzJ/ad5YHbvkDpO29ZAZN1AA3CKnfdF
oNupLy7k4XJzkRtw3FCNyzUb92np91ElPuGIXTN25pvPtnY74xitEaXwIwnYSR5MyKu45UcL7nMZ
o93KKm9QFGztv8uFvcmHM3yhp4pkw5ssbliSPv0sy5Ox4BzsuHbsQtiY2LGrXpxzkW4m/wCuiocl
cdFOWciSUYOKGbb8Q1jSTQTTIUaVEMm1SMELXrJgjyYo9mwdtzszfsbbcLaPlMe8ZU/x3bZRNskG
NgEtjSSRhwHZDhbkiR/lpJHtqquXIxixsnxHHw8zDLZYdwBJ+OnkWk3F2oNjS1JHJGJAvbNgRTLJ
scUk2P8AFsWGTtWKPdpTJkYrWaaNJUlxTHQUsSCpvUQJZWsI7Cto3Bo5CqSLu+JixzxZpVppsiVP
2uVUjn7WgzYXD7Rtswk+OwGt02ibHj6G9I9YObhww4m44mUOdEUKFbzj99Y/xzKafaDHFOwtW/Sx
vuqmoiAUNDWY2gjF1TFyTNkNF5BMXksDSYsaEjk+SQ25fkSSjHcSPhDsmgaCsaZo6inKiRO5ThRx
TZ8hTNjMwC37TzoO6JIDLGnehNJL4qZY7iRGkFSdEFirorpuOIDt+7YEWXG8cyfIMmKbKcmzSvCy
u8mikqZgHQyYuJDt+M8rtPDirNt8ubNj4MESTbCfJi4sOPE33KjPbdNxeAQxSTzY8P48C2uY4zTY
eI1PtuG1PteMC2Damw8i74eeojbcUr87MWjudiN2x7TblAVgliAmyFAwNzSKWeYduDi482ekaqu9
QpFlSTor40gYXu2PCzmOIIGKRiXKbIObM8k+KoEkUPjkllKnGmKtE6Sp8i26Wd1wspjhbMk8W37R
uGE2ZsWPlSfsebjkzbxFEuTOkuPl4ctQtjI2+ZEfhijkmeDFhxgRLKFwoXptvC0JUgGRvuCGyN9g
kD7nlvRyHuu4ZCl90yGH5JkKTDtY96mUqyTggMDV6Dc+V3+5WZ8WWOaKeOQJaa4NJ12HEvxX9DlV
vU5UdDofRIr21tpblpy9GwFSQQSU+BiMf2uAP+0lCNszEAj3ONPyd0RP3ZlX90RaOfjLSZeK9B0a
rXFjXLiteiNLA1PiRzLkbOymLOzcJsbdceehYi9qysYhlMi1Nt8Uplx8rFMOShMaQsGOMlHKwlqK
IOk2Nj2Mm1xU25QrX5WXOZMJpGhRkj1tbi96vpejQq2ljp04CNLVfnpyo114efGdBRr2q1Hg2Yf5
dx/79q6cfKvbQ17cHtVuPpQ+ovxDh56X4xxD+gD0ra29ZI2dzhOAQQcSJOwJj5YeMI2UxjxhG6CO
WPvOO00gmZQrB42uahIsL0OjE351YE9qKcjwrHZZZPBErPFB3mCHx4+HjSN+34RE+17ONuXGglki
2XGVZdu25GzNqxsbF27CGVFLtUAEvxTNOPLGSPiOGY8fkDilkDG9Fbh05Sxdy5CZLxwfI9zyWxdz
xpnljBLL46kh8tZ2aIYY3MmRg/7LnSSFJk3/AGPMXH+IbLkYOQWTt3UyviZO07pBLl4rYEkkYdVZ
ozBkGoZDHL+44+Tkq3iypdpgdseKPFruEseOZEZLWRULg090c2rLjWSKCUhNwy2igmbvePkZ8sRq
8jOykqTIZDDt4mikx58Yxym6spCsaxN3yIFmkMkm34sm5TZO2Zu2nEzbsxw80TYuVh1i5rIcXcIp
xm4ongy4WhmvSmpR3KjujY67wIl3Hd4qT5AQY9/wmrK3DByIf3eDHw9u3kZU0uPvm6NuPxrMwY4p
FJjNRnkDrkEeOIxgtD3UiEPy7RQrLmMMCZbyGNnmljjaQphsVnx8ieYwBo9vxViJs1OgYZ0KiZcw
IEdHEYjAY2DhTXcxpmpYWamh5eABqkIKlu2LKDLi1HG8jQZkkLDMxaky4jX5KvXljQ/lY1Pl49fm
4wVmydwmjwshkiw8aIMvICwuaH2tIwUBABk4MWVWPjY2KBYgWFEii16BqU2S1RqDXiW4FqNEA1KI
gox4ppJMTGUYm0LkRnYVR8Lbdxzcxfim+RNL8g+S48k+77rNNNujND+4xhYc+IjG3PBATMxXfKyP
yDDiKlbh4e+aaNUfK+2LDDCDG8bCaMrG6yUqIhmgx5ljhy4gueikFWBJAzsaOSJoi9DbHmqWKNJs
dJYq8uehO49zpueC1TbliYs2bmy5cvABRGmPKSkU1SdrAqUpMkqVyQaEitVxSm9ZUXcqySQPFnQy
1kRqW7OcURZtriWPF+lPpG3oe/Dyrl6B5cZ6Wo6CrkUwDBsfGcHb8MmXbYWZtrZn/BzInC7mHXMz
ImTcJpGi3KKSv3DFpMiCQd6E8BNEAibFjkXJ2fnHlZ2A+Nu+POBZgY468MJowRkZW1QtU+JlY4w8
zEjV82C8ublmpJWasTFmyGi21YwMaMBVRKFc9OXBblp72tRrlrbgvVtRVtRQqx4ed/brVtBRt6B0
JrnoaFbN/t3D/v8AvVtOeh9Aai+o1HByocvQGnt6nPjHAat6/L6PpV6vxircQ9Dl6A47VhRSd+4S
tjY8HyOJczN+S4phw9zzsTJX5C94dwgzhkuFns/aZJVDPLMyv4lv5WhKdpdUSKdZT7Dr9oqfItU3
fM0YTHLSKtEkUMNjRhMIRXAmgWUfHdthxtvyMeOeOTIhxdwlZZFciJdq28ylhEFi+N7XEceAQK8I
eg7R1FODQIIKXpoq3SPFZ8bIhFYmC2TklE8ccRrcMmUAbc6jEkSVMfIeOfCyvykUAUoFOquuRjz7
Y/ybeJZ8n4n3PsksSSL8syMeIYmQXMiK6gsjQZFY+TJG2bL+TUWRj5ERWJo1doKyJRHlycgWNIwV
cyV4Fj3GKRFmEkSZMivumapLZC02RIaJJ1BIKZkgqHcg6nDxsimhlhKWqwtKhZNlz4MOFXSVNx2G
HIp5MnDbG3Bo1yYNunXzNGcfdpoEzv8AmyOpVga7uSr3Ps8haHlTQQNT7ZgyF9iwXE+2MJMXY8xh
tezb/wCPfNo3Pb1iyDA8bBlhNBqMqK3kFSvdhSlgzEF4QVXnRNg+UGCMj5OKuVBPjZUXchVlyCWE
eOySQyoDHKDEjd6yY0Mw/Dg8ONiDHJAFHmZLd45AIFoC9EV73qQgVkt2424sFgBS2I7RSrI4mGWL
Jk4lBtvLBNuZUTGFS7rjq+fnxSJt2MIIRXSiCauwoMDR5rKokQntSKQs04LNG4AcEgsST+lWKSZV
xQFyFAFX0Y2GVKWKIEXLnsdp3V5Z3nVpNinx4s5QgXPmSfMJAGbKvjixULy7TGHGy4DU3x/CNN8d
UJPiZUIV80hI8l3lyMsuN0yQJt2aVI9xx1jx9yw4zj7jizVm7hD2YCzGN0Dq23qrHIy4KGTjSt3G
kKxB8lYSuHBkNkY8+JEjZZbJm23w5UyyyUENeOuyitWog9tRN2uzWZZCytIykFZKeIrQZ1pciUVH
lSCjmY8lZGI9uxgYklFERGkKCsXNmxmxMuLKj9O+vT6a/p216+ga6UeC2ttDVzegauaZEamxcZ6X
CgUjbIhS7YVpMXPhpRuQpM3KWo9xmkqHcY5QNwxjSZUEgMsNS48Uy5W0KCmRnbe673CVhy8fIHvu
eQI4sbIyS021iVZMbIgZYcmc4uzgFESNdbjW3LhPHblXtwHW2nv6XKjoNLCrVbmQeDlVqtRGmzf7
Nw/9DTrwm2g9EVz4RVuMelbnpb1fYcNuG/o+3EdB/TBwwKCWdoxl/LYQMkYWVuPtV6wsloZ2BMk0
LTSBpDQuWmyYIlKpIICqrlEiOPIkiaPnRdS7sSJGEhJESCARIn3Ng+NnzCqw/l5GUVUStIrxvtO/
w40U294TJlY+Vk7sSI1wsT8iUFQLi6cqKC9uc7xxVJuOJEY9+w4aHyfBvL8liI8gllgjXcHxIxDj
5En5uRtzyCGW8mZDEJcdIFQ5G2YeS8GBFjoAFGbnLiJD8gYuGimj3jYcby7XlYceNL8wwDl5OyYG
Ww+L4uLkZsT4WSyJKv3RtgZgjrFyQjthLHkGefGmxpp5XmhilXFyChkhKHmKjlhljO3YcbSeNYMg
wR4uVIHl1twpNIhg3MkLDFPRWSI91JkqkePvEmBMfk07lRibtibjgnbJfJj5gLZOGYpEapWXIEyS
RvV6gcK2xzAnUGsweHKyN4hik/fswNFg5+6ZJJUYk7sImoGsjHSYAeGu+QFJAwdFdIsRpBHIyMpo
gESwY8Yw9vkLDGe34MPfYAWvTQztNkxKskfjSLHFgK56NaulLzaNbuR/ksADoak5nPNo9wBfKOI0
Zwi0uXgLcFVNGCFqODjmjt8NSY4iGTGFaGQxSQsJIxRBr7r8zXaKXlS/qdgAptTDuaNewdQVN1Tu
kll/Hea7LCvcffQm1TyhFx0JOQ/amLgRtCu3YuFXxz44mYuTsGKkZklYMwVch5y0rtI8YZoIou0A
UKyZpFE8+JlMPtR8pe8qsVQmcjB2qNqOz4qxz7dtaBdtxJmHxwFW2HMUHb9+Ulvkit+6b0JMjOaW
T91lxivyBCYN0w8pI8vEUrLBkVFGkQ+QsFxKA7avzPLU0b6q3cEe1OecXVWvXjBLGNKeQtpBleOp
fG8bs4ZXNCSo5KwsrwSjdcMkZmITcVYj0ffj56nTrxW4joaOl/R66npxWHo9Nb0TpagNReiTRRGp
sbGYrhwJR2zGuNveOQ7ZlGsqObFEOVMRkRIzRmfGkj3iV0nM877bhfjoRXWr219+C1HW9X4OVcq5
ejyrrwjgPPW3OwrlparVahR5aHW19DqDrs3+3cP/AEOG+ltBVqPF76DS31luIcA1trb1LVzq/D1o
Chwc/Ut6FtBrfjA0tVvRUXZUjijzJE7N/wBuw58SGKKDI06AP3CYs0ijuRo1Y+FanxZhlRqUoEGs
mSSo3aKsWVHgiyBHO8ssjC0auVjEsvkNgBEsOMEVO9Zqh/68iRhcoePH2zcHyMYsqDFxXyXjhVVj
TurcsmbEhh3ncopG3SFcTJ3yWWotqzMxo9oxEdNtwK/aMJ6l2lFR8Uq215+OwzUmbHihjxlwWsXL
wZkUF0m3cQnJlbKMMm6fhJi7hK8kCyLmrCsmM+VjFM9wd0xI62/atwk3BSrplSKE3XE3uVlwpsXF
ZUlX7ozjzhqg8ixZEcprFzoyWiC1lY4mrHyWRmjsQKKipeePuua4h1AvR40ldDj7sSDHBIveAZV7
wsgjxts3KbCx8h58zIfCSOODOeKmxopAGDVLGe2TFIogjTaslo8hG7kp5I0E+840VZOfk5tNi5bm
InbsX43tv4+3/JNq2vbwnIRnmrUIwyxd1xBDJWRjyQOh7hBIYpM2MMIW7kHOt4uBiyvJEnfYjU1+
OGCOEdVB4DT8lUdrxfrvaYOpBqwpjam5HJtLPlbdmmdZcqeo4MnFGCLQ0NCeZIsynNy1x4EVRYcr
kihbT2IohhK1yQDdR9ygEaPMqSP3K5JNYw5Air6SPYOTPMSEGXLIE2XNmqPOgnl+Pblj+HO3PEhg
klWNZMlpnSRImxYy8kKXKDkKzMhYklAyBi46Ysfl8USRwKZpYiU3SOI4e45GTWTllFlDSViwGBZ8
2RKxvzpYGkCLNucCU+4j8pzG8m7GJp7EVDO8Riy4paxcJGWHbs+FdwfK7kF2cG45EtegeAjSPuLG
4q9RCwMgFNK7cIJBjy0dZMUhQ1K1qjltWFuMUgOFhT0+2QlmwsoOy7qrnMzkf90j7huGISJ4CbVb
U8dtTofQNWo8BNH1Trbh9uO1e3twc6vXU68qvQ6ac6liSZIoIYFkjjkEuPDKp2bC7ocLGgPCdeuo
o8FtToOAnl6XKuWt69uAegeEkVcV1o2vyr32b/ZuH/fo100Gnv7620txj0hb0fbjsdLUPRHCat6o
q1W+jtwX0vpf17UPSGqSTTROqhfkG35EgjwMjHxI5A63o0O2rzy4sbkDOknUIVaPHykyA6EOBzZ3
kBPLBkKKU7pQ3ZTSeNQXlYiwJCDG8csKNHI+17Mc1Zdhgjjdi82fAs2LiYceDBjwSZcyRJFGlqDB
RnZOOKy8Vo5ZoXVdtwkCZDeJGLTv/wAlGi/OesWUzwblCzZmfiAY205b5OMhPbkk42RLFHkRd7I6
52fGuTlyzTPlSzhd2wIYsremK4sEstJgPFDiRyCLIxRMpVoZd7+Ryph/Cs3JzXZQw3mGBMNMhI53
RJV5xti57quLlPMcnb8bJU4uVDWM+QDPFHlJ558YiaEiOeGY7ruLRyZOQZ31AsCfRSeRCmYrVBAZ
xJEUklEaKucIQ+Q0rR47yUPJjPZMiI+SIySt3LPDMJYOw4xaI4W7QmCTcsmWkx8rJeHaIhTQxiGK
b8eHJ3CeZcb5Lv2LAubl5siGlNIaU0DfSwniQdjX53LQY4/xisrHXIhiycjDliDvQ4ZERqW40Ol6
cBonbuQS2rJftolXdGfvp+g5tkziGfH3WaeSRIsAO2Vk4+Ex8ZIAEpNLkG9zbcMjsTbMXwxKbEOW
Io2oLQFtCaNqsBRAt7qLaSMFDM0lfb+PWJIvcykTRggMbDLmKiGMRpkEO6RY8sc+Mnh+L/H8WHD3
Ha8c4/sew1NHs7QjAUoAe5FChRWZLNFBbInjxsEvWXFGZctLPkRpG2MYCyLArJNkR0GaWGPHlUpi
ZNR46FcmbtM2X5KUOKDKw8cTVnRvHkMwom5BIrGyAx/OaOKWTHZjk9tR5LNUnicGFqsBqasbWvWL
CETIRq92gCQH0IppImEuPk1LDJDSPaopCKj3CWB8XN3Bx+4ZcNLvGC1R5MEwtzfFx3Y7biMTtlmO
Puqt5d2Vv3KZW/dMW4zMQnuUGx478Z9fpodfeuXoiuXpWq+lqGnvevc9dRer0dDperamj6Jrrp04
LVahzojTlp7DgGoFW4L8HLg5eh1rppzr206VfQVsxPk3Af8AP4BxnjNW9L34T9Db1rVb07aireoP
WvV6v6NvoFt3KDWXEsjZ+2oa3PIyxkwM2PLoEAb8ueCNamh86HHZosbHlhhLKInlChiwq9OyI9yI
zLYKGkPaEruBdsOM42JB4xbtHxzLjfAchRvU/l3pAe1I5cqWDGix4uw1akPZWfi4M9YVoROsmTLi
beMCKDH/AD8u0aR7yGw87bsiOSQN+FkN2MNw2zcMltswmgVJbytkSqgSbFWKfGzEONJHU9gBFA1F
9tjMWFEyMGQH86RGinVvLuMCwzYuVW6bS0g+Ow7dixZvyTbsN8nCh3nFm+IpBl7ljSYWQVSVG7o2
wsoRvFkhgGjdcnaYpavl4jJl48wXbYWd5Gx5twctLqBzPqYmSkBG4B42/GnOXjv22KlcpgEmSVMK
UpJMLGQx2dgH8j3xYRkMkSwHFSF4YZwGHSt3tjymN3qVyIMKVYck3WUUhpaGkZs06gT0v+mHkg0z
8Zpo8J8jzRt3LRq9Xoi9AgaGj0UWC9YsloJTPiyCOOC/iXuAUGpOnNahxVyxibMuNIm3xZudHBFE
nybFTDzj2uoWVZJ4uwvO0SYOO2VM7qlIbqpuTS3JqwrmK5Gj1FhVhbsAOjnyM1rljYRu5XGNFJg1
rVM/aIgZpZZBGuYZQNunaDHwssy1sO8rFjblvkEsFqlkCBQbMwUYsVgBXtkANGihFzMyPGghz55i
3kQvEskXf46byuuJkIEhhkZNtw8hG93YKs0bSVixJeaQXLE1Ezq++Y6rI+qMVaNu5JcdGp4nQg8h
eoiVowRSCTGmWirLSqXaYFVBIK5rFZJA6YeMGbLnMsnpQ5UkVLFDkUO+NpF8scORNjPi/IX8OFLN
ueZucOBiZ0LxGo8jc0b90aMx7nhSUkkclcxVzRiiZnwcR2fasYs+BldzLvClszcIyN2iFJueE1Lk
Y711BGltT6PL0uXo20I0Ne1Wrlrej19+uovoK6amrac6OttLV76W161a1c9OeluA+hcehbhA0vXK
uWg06ac9Dzq2o58VxXKjoa5WvXXQdeVbP/t3D/v8VtbfR24bepb6S3qXofRD1h6A9M8ePPOGZe45
/wCYUyMBok3DcY5sjFnEi6LYuORViAZW77kUR9iAlskFXUMalPYUJYW7yxSJXlqNHllvH3ZsnghG
dlFsfJleDG3N8yJceEynvyHxMZcaO5uL0bVn7nDhvuG5bXnJFD2wxZEcOflgs2CH70yTHJu+EueV
ZsWePdsZ0jWYJHl7nEGbdcqolhgRgCsGOVnyNrhnIl3LEqCbHykyImbJaJ2WKTI28zNDuEW34+TA
mVEiRYkrI8+Hj5FQZ2RhtmRNHNnxZmRmbNc7bOLJ8llyGnwjKUZEmSzRNj5Bvj5DERyGiEkXI2eN
6K5uGZciOQZ03ln1AsGPqhiK7zdMuZKV8LKE22zLSB42jb/NMbQyuzVYjRWZGxMkZCY11mOGqkVv
j5sWTLlzzMZkJyLeVSQS4dFNJQNqBryAVDKrtOQZVAueUMfJA3Or0a5DQmgBcm1d6kqoGp0WpCDL
hyrHKYonTrQUDSQmzHti/cRiLh5v5ULbiYZ4Pla9mdlyZ0qH8eTuICznuz8lsifb2gjooj0ULEcq
JsFFqJ0B09/cDmzAVHKJGkNlkPavOkjvQFgGsS4amPLKkMjooRJ5VLJmQSscRcnH2PEx1wtwSPxg
1NkqtIKeULWNC0rWApeVc6mycaKTcNwSEqHyKzBGjCTuYZcSu4kZtpgyZMSX42HrHiMBxIpo8o1k
XJzZ44jDnRtRF65iopmjx8kGeOZVvQF6taoJQtFlemsA4QFJI6DIajoGiSwaIQs+YWDEE0lHMk8X
qKSDFmiRQhAmv3o5jbEynEWIqPmDYRE7bfkIwkMdBWkL4kik5WTBS7xkrUe94pqLNxJquDR0vV70
2LisX2vCcnalSvxdzjrybtHX7lkIV3bGJTOw3pXRhwX4jpy+hvRoa9eG3B0q+po8+C9H0ffT347a
miK68FvoOVWq2t+I6ChoNBoeD3tXWrUL62rZh/l3D/v0eEcPL17178Iv9MKGgq/CKv6Z1t9f7D1L
cFvRjfsdciEggk7osRgydk7szLWHHnR1kRWKl2LO3dfJMn48J/wvzcyOKBYKwdyqBVnBklQdweRI
UExldInleHEXFRElnzMjb81hNt9qjTGh2/ZMGSHHYtI2HgvACDfyRxxNuW7ZJypd1lrE20wrMsBp
J7GdY5jHN8iwa2iDLGTuEqDBImkTdVDy9oKozMomkVcbc3ZkyVlZLRqCzUHYAs5WcPiZAQPUkQdQ
LGWNcLIO5QQl5/yZMfFWKDHiYJmpL5ETxmfbWMS70iYnx/f8/dN1eGJhNtmJAmUPxchkSZB3RNiz
g1BOpoOLK9cmHyOaCBSbnQdSbD0QKPHgzU0TEHExmMqdsLBlLV23ogioHKSR3ZcYloq3aWOPDg+P
PLDk4qLttRxSysmLNFhR/pSl6C5EaoZGikxSiyOXlTHAkMuIn6eVA1fQ2OhNgs0gkpVArpp1q2jy
di2JalP/ABgKtpMayG7cebZzkxYOCMKH4xtUAfLw8fKhIKmeJZUM0kAz8rHKBj3Itmg3KIBdxxGo
ZeK1BkYjQ0L17XocwgIV3S8QADi5l/XGvdQAHBPKEXGUtWRIVG4SAaYWbNAu07gcWCfc3nZ8p3pY
yomGU8eLj8gVFAUzADyx9+ThxZLtExXckmjw8bMSNI5g02VBkOmFLFG2FuETC9Z+CWlVFuTUzlpM
+KHJjjRIo72rFj8smVLH5kWVzl7cakUqwo3oE0HIou1FjRpWKnHmDUKtyKg0+JG1NhKKliCV2N2e
vBkywMZcXMqbEmhr4/kGPLzdrx8144vElHnT4OG9ftoSvHukZeRO5Y9nlo7PDKv7XuEYWbeoSm+y
hk3rBdo8nGkNuG5p445afBw3ptpxKO35CKI92jC5O5JQ3ZFpN0w2pcnHehY1ajwW19qPBbg539C3
CdLV11NuDlpytwX09uD3N9L8Htwm9craGjQ05WPSjpyq+lq9qAq1HgOl9LakV76CuepOnMH2669a
6ai+l62b/ZuH/f8ATtpeufBbQcAr24L1fUcd+fp29UaW4L1bgtxWq30vLhtpf6gVj28gvRQB9wx4
5cWbb5m3NFfFmo0HVsN4klWU5ByjTKWYqQMbFRTkqUk8XcXcIrwZE9Y+0TquNFBiJNlJb4zixR7V
NGsiblly4GdhYirJIxNbdhmJQLVYEeNCIGlImTxBpHUZGNBAAYWOJs8py23Hb4pJd3aUx4eRNJjS
9w3XCkgnyIpUpVWSJQbQizwI8qwx+GvLCFP3rlZ2BiMuRFkRY6JAs2RiQ1+RHLnS4/mxYI42LQxG
I4TKRmZsjtIzxCIeHb5+w7vs8eZF8a2qDFzJdxxIUmds+HN+JbhAc7bn26mVJU+6JsbIBMU11XJi
Byt3WOt2y2yMjVRTH0RR9GLJyISu6d1JlY5BdXD4mJLTbbIplglSuhxZn8OzzGSK1SX3Dc+0ATRY
8eSm24Tvt+JJgK+HvOXHEHQpS0pIpnLsJZO2d1MZYs2MwbAha6HS9c9W6Su6nyrXcDQo0WUUDenc
Cp2lyGbFmQXFTMkcMUnfXKjUnXNcKi7zCrvMOza/kceHPP8AJ454L07qtM6urBCXxldl2/GTGXbF
7fDlxV5NvNLhYktfgZaDs3RKObnow3RhS7timjn4jVHPC1BriXnUfcaaRVM91ljIWBHvXLR2sJCZ
pSVVZZfGjSO7KGeoMKURnIilezs6ZUaVhyRmTLVlkxBlWVRc86yCbLt2XHmYkk65W47z+NHmZcM+
KcJmGMEihnySEgiglgwp3lzoZUlQ2NO5rIlVUlzWFCeXwtIq1YihOY2WB8lmRVqbKZjPhJOJcGaM
lGFWNAGiODFF3UGgKsaItRBNZQs5PL6CON5DDlTQ1jJBNKpWNI5dwC/mbtGo3tFWPe9tkEeZhygE
NXOnx8eSn2rFJ/Ez4qOTuUVfueNSw7TNTbHhkDas2EA71CP3eSKo96wJBFlY0yghhw3rnTY2M7Pt
uG9HaYxRw89CzbrGPzcqOOPcoGqPNx5Cs0LtbX2v6Bt6V9CeA0eWvtXtRq1HT3r2q9W1OlvStR1N
xrbSxo0a9q5Vy0vwjiPoW0GltTpyrlQr351ar3q1DTpRrZv9m4f9+vfUepauug151bQCrcB4reqf
oBpercFhw29G2o156ir8NtLaHUagaj6a5FRzysQKmx45xu8Jw4ZMjKny8eTyR0xsyAGj2B44e6mj
Ici7TghYjLM0jKtJgswQRGnVjSwRyV+IgG27w23pJv8AFMmVthzM7tEYwsS9KTclSADWVkpiweOY
TEu1bc2NFFls8yrNuscezby+aBj9szLKKZZWpBKC/fLHuW1RlIiVZGWOTslLbdCq4+OpknysBZZI
4srbnmxzk5G3YwxUhSOSs/bcfIjxoIxIe9Gxx2ZKwB6eBoxLHHMBNLhuriRWYM+POJY8/AMtfIc7
MyX+FL/9O6/b8jyo4YYGkDSCNwGRKkyHNTZLosmS9MSx0A5nkPRUU3oAVarVallljpM+UVHnQkrO
HDrA1TIsWPsMjiXccoY+PteP4YJcy0mfizRmZp2yNpxw1dth8shhjz0NIeYoUL+PMlYRot622UAI
TG4a4JC0GU6HSSYlnVsxYcTsewFd6W74zTqXjSacCTNQBpPxaxMoZIbBRZZCC0Qspo3pub5Q8k0m
yZPmkxTLDg7VhYePvm2Y+JKesxClG7qjxGMiw4Pjxm7o1cA/fGHSKRZNtxmIx8+I/n5MVR7ljyOr
I4eGJ6fb8R6k2rGptsftGLnLSybognzclmbdoZU/cEKx5GOEimiILcsqXtEC9glLSPnZHlkqDBYR
wSyYasMXLrJ2fGVWxMuONopEMOdNG8W74zlMmCWnkCDuHb3Iaz3kRPw8kKJI55RMxE07y00ypUmd
mSRyQrjDbt2zcNx8kZi++owDJIZowyKs8gjaKF5YHyJoMCGGp8tUEkjylIwKmmSOtvwY5YZIIFab
GxkpocciSAimUjRVZjiYpQW5W5EURWRMIlZi7fQgkU0zOMURyHDyYTC2BhTKdtdKYbpHUksNfibD
MF2SFQMDeIq82/xH95yUZd+wCy7hgsT45BJt2HJR2146/wDtYqO5SR1HuOHIHxMLIE2xYL020ZcK
W33FjXecyOk3zH7l3PCYrLE+nMcVzRVSBiYtft2PdtvkDvFuSs0+5RqNyKhdyxWKZWO57lvR+jv6
ljV69zQ4uWvLQaHi5686vQ9G1WqxrrpageDoNBXLgNe1tBXawBkQU2TAKGTAaE8NeSM0Cp4TcaX1
Nc62b/ZuH/fHBbgNtff1fb+mW0HAPobaW9AcXL0OX0qN2tFIr0YbVuGJPkplRbZtkkE8TzVKPtjI
dSR2pIxAMfa93df8gmRvJhwDImCxyStEqymSwx8hXkcukcsDDH2Azsosgw8bupE5dqKrMgTJ3LGx
Y83dsCTHwJC0sDSSQrNJLtMO15G7bRjyfLsJfw9yxEys7coVXfN1C/v+4Fpdx3OSot5zYmAGbHn7
KsjNhOtRYLVjxMiMEgiOfnQSSSpkMMfKDGfcHK5+5LUeFn50gxY9vxF+xZHCZRkZKlyZpHkPilPZ
KiRzwvjqWlxX8WQGIrcduTIrb90HbL817szfd1TJmjynDCQMGYUTcZFyr9NVHJj6I510B4xVtOdX
q+oJFY88rNmSlpNgJeV2GVua+TOMUMcK5kfkx8PwvBHkxsx3/OUbw25yZSHkvVTSgmlIVs6JrISa
iwMlgZizK7xlZ1anQKy5KlRkd1LNclI1Z0hRVmRqlAISM92UqwyJnBRKXLKl6tW3oQWglLrKoYdD
0c8urPMkeXj7pBkNLlrDWF8t2+SDc9z/AHByaSWJJ8qcHIypGEd3RkyZUkilgySi9hWYCjMoJk+0
+QtNgDzDbFFKu6Q0u5zLSbljSDzI1MhtLKsSOxdttTDaI40FNgYrUdqhtJgPHQiynZ3zYqhzmhbP
kTIbGSMTYM+OoE+O1HGjvNlzINuyMVpcgYZlkw8aWpdkgNTbLloS24QU25Zzh8vKNLucrUm4Ozq8
CvPkmV4cuWJpJJciWHb5DX7VjT4UGNO+S+zQLAcKUVLBNEUkVKVNxlSKSKCl3HC7Z88EWaRgoFJ5
cmXdtrngTAyzFDJgxSyS/HQuJk7PAzPszLR2ma67O1RQY8NWNWoirURWc15PpNiVGypMLEkDbPjq
fxt0ir8zcohHvGKxJ27IB2vGJOLuMROTuUVLu0AKvgZJk2zAkLbJi9z4Gapf96jp9yz4F/fMcJ+d
s+Uq4e3S1+PuMdNlZ0VDcsejNjSq+3YMgl2OBy+y5QZV3CAx7jlik3OehuKrUebiuVkjcEang5ii
b14YbfgYhJ2yO8mJlhid0hX87KjRNzguufisfPBfrodBxHj66c6HAa6UdOWvO/tXTXnqBr734COH
nXvYcHTTpwHXrXLitoOnXU0elY+NJkSbnLjbbFPueVNRlY13UWruoSuKGVMKXcMhaTd5BSbrE1R5
ePJQZTparUa56bNby7h/3+AaEV00v69vRHpiraddbDh6fSW4raD6bpraraW4uml+AeuKwpFR5AGQ
ry37bpsotssuJgRP3xsLjEP2kXCtalZXFgYu8RhbBIowhlK9gH3bBtMeWk204EibnNHtuczpNFGk
ePHhYjTmDDIE0ccEEW67m2RnZ23xNkYTYmRDt0LwftuHHUqrhpFsq5ccWecGDH3PHzG3KHzw9kmQ
0+M+3ZccuKGj3OYocsZM0e5T48afJZ0M+6q7ybviymXcMqXIyMtFAf8AJnhxwobJNu57rIyAXWpF
UJMxWLKRXEO6iOjkNMEKyLjsaZbiKWQFwVpGDoK3PbEyo5NrkglmlZiintScoVdWDGpCe1xqKvRN
z6CimPpXq/ABRqC0UNyxxp1wsGHdPHjYWfiZIvRUMM+SaFdhEz5ezQoIN2x45sGM0tJQNlkt3WWV
cbE8Uk+6ZC1kZ0k4i3GdBBuMbE5rxPDOJxt8eNmSSQ+OaZwC7hY1KzLIWbHE8ZSXIhlJZI2M3dXI
VFA0jKiKh8kdduVLCnlSg1xIeSjnLjtltt+25MeRtu1R7juUu1bbJFlwfi5DyWq7EeNmfNjYR3DU
RzxokoTZHfH2OWiAF+0dzmg5ajkKCJFppBRghkdtvgBdcuBZpshz3isPJgSNWVhRa1ZMpNJF2rGh
yJ8jZJmyMlciWfH+FzPDl7PBjznZsYq+2MH/AA89BFjblHJ+47rBHFvrMy79j9mRv6IU3TGkGXkY
kkoxUd1jeNnSSnypyUy5AIxPkNtWJBFBNCs+T5ZAv+DHV57yRTSiTJjllobfidr7XF3O+44wyctJ
KjigcGPIhojcZYdrzHgy5chc6tk+NnIkm2YYUeP8lxxHuWSuZkvG6U5jsVgagFA6121arGsiRY0d
y7k3+jggeZtm2hsU68xTxxyCTbMJ6O2SxgfvMIG6SIBuWBKPwduyB+2PFVt3iH7k8dR7jhSUrBg6
JIJNuwZQ+wYDBdlnhqPH36JpZ91Dy5mIHbNxQTnTov7iqhM7FcNDBIX23GYvgThpIcqIsIFpHgqN
txt+VnIRucAKZuI9KytRFuPpVyKsLNhYho7XjU+3OKEG5QxrLuKFdye/7ljXGXisqOji2vLh5a+2
nSuvEBrbjPPS9c6Fc+C1c9bac9L8Br20t6Jr2471yqOJ5ZJPDtGHuWc2VOWq50vx9xpMmZKTc5lq
PdUNJmwNQdGHtWzX8mf/AN/r9d11Gt/oeXrX4OfBbgvVz6NvUHEfXB+hFYriSAgVOqtHuMmR+fin
tJpG8eUvS4Crdl8jpHI0spgXtDMnfFFGkaxctk3WLEqTccFU+RxTbllRRCGPDxZMyZMIUjCNc7yk
ZW1tky565ciYUWAg2xjn50224iQ7n+FCvxqdoxuBRMqcNjyThcnC3FS6M0kw29Yvys3Bx8Z1kxYp
V3hFSabcp0hyJKLuHXymjCijDdTGWLEsBXI1bQMRUsSNHjZEcLviQyJNjho3U4kqZmNKVPKdSrgh
hgse2ga+U48fhx8LvMkJs+HKaUSxEkESN9rHnJCe30xXQH1hR5CJC75Ul2iF2mgkZMLD/JyMbEgx
UoC1ZiBMsGDHGFuUmNW6/JcvLqNrstIa76I7g2MExYJi+PkRlJa94cqGKNZIJDDPD3QzzJKj/dkR
h5Ei8gWMwSRZIFTQXHidSLCgrsY8WmlCCEMzgLYXs6wJQHJ2JoGywZUOMIcyPIXC3VNsyW+VbWVy
Z2nm8VxFeITreOJhPDNjNE1gaIKmLJlaRXZJJZiAZO1jIEkkBDpHGBIYVSAJFJNmwd35aFozDDJu
kWO8OLAsj/i40pO2srCLcYg2ZlLSZSGWTLi7ItyEEkuSvbtUmNBnRhWHyZ45cxWdVVbUWLnEhsM5
gwXufIfMjhlc4eSfx8ZxNhSRypiZEYK5IjKuJJwr0YGFYeTFjmDc8JxJlx5DPPDAk2SawttaQQxp
AGbuJJNTTrGs07yNHCq1NDjipy2O0G6ZsWLOz5uTF8LgaHAxX26fdM/cYYIp5VeLP28KJIZVfHQ0
cZQfAoopaiAKa9T5KxjIldx9GOu15mPjZGNkw5UduH2050+JiyF9oxiww9yiIyd0iK7zBfv2zKLb
TjknG3KKjl7hEV3XFJjyMeXTnpLi40xk2nAkp9mjKvs+WtLgbtjlNx3aEp8iUUm87cw74pEkxceQ
NtOOT+JuMZbJ3KKvysBiuPtszfi5AFt0jAzMlKG449JPFIoZTVuHpoa6Ek08EDl8DEajtUNfg5aH
t3ZabLzUI3WIBc/FYieBqFjXPXnc8Ptpy15V76HrRr2q2h1vXWrcRHKrUdeWp0OvvwjS1dPQ53w4
odtx/kue8p9e5pJ5EqPcplpN2vWx58Uj5/8A365fT2q3oj0RqKvr7+hf6i1DQerYcN+G301vRXIl
xJsWeaRpg0iZ+RHhSLnDKyDWa3jMbXXl24xEZELRFkRSqntZmIx+wylSDKjM2JmSLuIJIx8eXNlw
8KPHj+0UxDVJIRQhiat38GVnY23xRY3euDIm4Z+McjH2rdQuPFilZDk5sqs0eHlMm27jIkWOHTJw
8XEmaOLDjRnkxbSiBKUYt4jHHMyosqMpAixlINA0VvViCrA6jlWbgx5NfjZuOUkymEGABDNtqk48
hBMsjTQylZEkEeRyOnyNGfHWMBWAFPUp5FgKcXpxzx8p4ayDAx9JaY+hbgOoo9ce0MRNziR9z/Hz
jtDFhQYklW03KMtj7hPkPLsqyvi4mLj4sHyzAgipDS0gvTsAJe4sAURxBkrLhyoUQAFFFCNWqLGY
s5jNNBIhlyCIR3BIspvx18kkKyOrNkIa8+PQyY6Lu5qIdsIW2lhRPJzUp7YcnEmlTasWWFMbEWTJ
fFxBQItYUwFObAO6GOeKcPho1fhWqPEEbTRuUx1lMkUCM+RDYqwaYv42/IhmR8fHdMmNEk6FfJK8
SxzJNDBHIRjCGnIFTsZHMMXZBhxzyT7Sq1JHM8kUuREY/kLAQZu2yVK7JKgnnOLigAKO3JwoJYRj
QY65EWM0JxWaJFyFr866QYXcs7t5cnK8og7fLPjwz4cG040anCgeRtriMMmM6nHxJ55ANzSRc7cF
pd0m8b7mBHNkFijRLUkyou1PjzT7vFhyHbJsqaXGijhzhgbZ2bpJktnRb6IZMfc9typc1MVsifHw
ozH5mIfNRgQwapJFjEmS0lOxYutwRY2+ixcHIy2j2TcFnxMZMSH0D0o6c6dEcSbbhSV+1FCRu8bN
uc0RG4bfOp2/AnUbdNFQO7R0Nz7KjzsSSgVYe2lzTwY7mTatvkptkw6O250Sd2+YqruufEU+QY5d
902yUNjbXMg2sCvHusVfuGXHS7nhvSpt84/b41H4uSoB3BAmXKSubE7eeAUOYo8R0uaYBg2JitT7
ZhtX7YFb8bco68m6R1+4zrS7pjsUzcWQLJG1AGiKJq1dTXKr0evCRXIaHQ62vrfhtxDnpevc9PpL
1seAZ5flG4ySZ2RlSZBCMa7WH0Xxv/duH/e9Lnwc+EDTlpfi9vfi96vp7+nbhv6I9EfTX+qH0GZk
D8nBU+NioG8YonyJdvjxIEbvTLi8sG3yd8DdF7bKzdlgtPKA2Fg5WbkfxiNFz5czCnhceHD26HHk
VHyJdvxY8aIEGuypGCVm7ng4Ql+VbKMfH3jMObZPFtEAKtGHM2yRSOnx92OPtsECS4iNGO7Dn7UM
b/H4Vlmh/GWeGValh8cME4hnbGxZ0z8B8F8ezRQy0DekN6FGjQBBHQEkcrhudc69wTWdCEYdtZCK
qtnROmBukXf5YWrdTA2PNueFFU28FqbOyWIy5aMyvTNyCM5MDUECs+JdWwpxH6HSjxjgPAeQiQu+
ZICQLnDTtGDP4crcGY5EJmBo1IoePBhhehNEDFvscUXyDcc7MaM8lNIwAIYiMkhexlyYTC8eYpru
xXBSAV5YEpsomoivkynHdAscR7ZA2RkkOs7x42OmM9FAHWMUFUVyFHkCbQq6sKN6Y2DdMo2hhyce
GIuCsGXAkrTwEsKFxRPKZwEHMeM0GkQ+eYgB2Kf4qQ5DoyyJDHkKUZIYpMqBnhIsTyr3SJ3rGx/G
GgSWOJVXRjYTyWWFTWQxI/NljyMrIVQrEpFFH2ZSQsv4+LEyxSOY4N3Ai3XcIaj33F7fzMWZuxZV
TbYiDJi4cc7yPBFFDJUMsmMMjLEsu34IcLs0Mss0ywLKzSDtAqVTIwxcbtjRY0uBUjCsrIVVHdKx
iRzIIXnk2lVoGPEnggmlfYIdxhmzdy3CaGPMdlfMMsLEks5FYGRktMmPkGnwQ4T8jGkzN5jaL94i
J7Y8kTY7gNE4rsNTRkUpIJ+giQu2zYAxsfitxniIBqXHgmp9oxifxdzhBzNxgCbvjEhdsyaO1Qgi
Dc4iMzNQpueKzJPBJVjXPgZVcSYsEgk2XFapNjZGOHlY6wy7nGU3DdlL7nH3M+zy0qgKuTuKhNzI
qPccSQDwylsaLvkw1anhylDvmRqclo0TLiYJPC/oHQive5po0enwcRw2149xt0yjx7olHI3GOv3L
tpdzxWpMvHerrVqI051bQjgPKuuoq+p4PbitbQ3HDzrnXM6Dpblpyo0DpbTnbQ1h4z5c8mXBiHcf
JkbhtnxJY4t2zYFZpL0WU1ZTRUVb1vjf+7P/AO8LemfVt9KOPlparaW4xVuDn61vR5+vb1TxD1ch
ykL9q1hbhjyRKQw39BJAcadtzwmLRkXG3f4siML3S9tR+ZEfv7lX7vjGTDFmOOXzeSCVcDLggwJd
5xLYu94GM0W/5szxz/JpFbD+TSJLsGXNJJ8QxpK3749gbRtsakV8O3H8/Z9q5QsKUAURQLCgSa3X
GxZ0/Fy4DFOxfNxpHxJNxRE27KMCTYMqouW8BWb8tMiKwbMaF4N6xgcfLilKWs5sMj8pTEH8QppF
Ws1Wd8SHIWZGZltoOuRF5Itw3pMKszcsvMa5q5oSyCmdm4k7naGFUp5Iam8bVGGvFHjrWVio5lhk
hbhFE+odQKJ546+KEkkwJ3NtmGuSF22aPKkSe8aSK1GrVuJkx5sF8lszY4Y2OVDHkQuojnU8gxsp
YUKAqde+K3PxNRjahG1KoQZB5RyiVWxCp8/ipYYzUkauzBkpI7UeVdxJSBq8Rpovt8RFRydwqS9d
WzTaLLxshZMaOVcL43seJBhb3teKuObXeXnYmvEpDYqAEMrEcoe0yziz5EXfUDsGklGIMaZZTkEz
PDlPAG/CnDYeNQxsRKM0SCGWY0IZ3dYyGvydrBrzSEhRhRh3l2/FE8uE82TjYwiLThWypwgJZjhY
pNRqFUorGTDxZal2DEYNsmbCpyN4xZJsyPIaZ0RI+xaZczcJMvFmxJ8PPnx3h32PsjlwZkVESN2N
QwiNaPKpZAoysvtCxtKwAURRy5km4YeRHlwz/knF2mLLzotpwYYsh5trzcvcs3MAiuqY0UiSLZmB
FK5UwZssYfIkdF3FhT4kE65G2sDtoWF5EEgnXKjo5cooiKdJEKNCwqRIxRBHqgXOxbb5ZOQHBbgv
9A8ccgl2vCehtuRFXdvER/dGVvy9ulDbfhzD8DIjPduqE7ksZTOw3rkaPB7PBC6nb4QBj5SK0bsD
j7dNGm24zV+2bolPi7k1OZVaPdMuIx79IpTd8YvHmYkp614oqGLDZcMIfFlBzLmq5zSh/JjDLNE2
h4Pbg9hVzTQQOW2/EejtkQb8PMjVRuak5mWpG5IAudjErNA1daI0txGjVtTXLS2g1vXI8HPQ6+2n
vyGt+dchwHpQDE7fipt+JNl5ORuuz7DBgLvG9idJXLN9F8c/3Z9vzqA15a8uEcNuC/EK99RqaHoj
0raWrn6Y0B0H1I4BwW4B6o4L1f0s/MMU+5SqzwwxTSbdDCXYXrcNrgmj3DeMnFjw5C0vICQpDuBz
cZA+6Yqqd9RVl3PKaZDvRlOBuvmOTusWSmxQlBs+Ap23YtuhrFx8WAhQKaQASS0XvVwa3zYl3eLf
NtXa834vuM0G4ru2VBlQ/JMaV03GEhXSRTRJAlhaSSfHLtn4LSY+DlflYmfhRNW3YOCk4jCU8KPU
iKK3Q4MMG77jBkHnUE82PJse6DccP82Puky0yKxkxjCMdCshu8kiKmO+arEi0+WIoNvlmlitTc13
9Cu4+kqOxQ5Ea/koaTtndoBG0fN0ypCsmL3pLiEEgg+mBxjqTYRRmSTNkF1FzjxBAGZK8kk2JfKy
adp4T+Y143Eim1bnGO2JcZEws9sd8redwyYn8iZQoGlNLQAo8x4WDdriu5KIBq1ICz+MloociPIL
K5aApU0pRmbuQR/40ilkMUaRgLVgAbEBvGTbvJpmuU5tmsK80S05AXafkEEGNuW+pnQysa5KEUkg
cgalhEisrKfE1wr9sWUFlmSUM6xyq8bEI8kVEfaRVtMeHvKqFrkaVEjdmtWRLYRJ2Lkszn8jIjys
jORGAleN5IY4fLjSRyM80mJjNJPDH2AUKAJIrIyEx4srcZc5DCC37LI7/tKS5MO27lhR5OPlSzRJ
tkQlbbcg/iMxfxo+Oc643N46j3LDcPKgGVl0sZZrhBOZXg2bPkvlxzZWPsHx0SRZOAcBP5Kgiycm
TKmxsQvFE/jWJVc5mwTI00EkZaIkpiyxJLkyB3nlkkCZEb427XL4+PkKcbJxSmZC1TQKVdQjzS+R
gpJRkSuy4t6m34rZE2NjrjwkVbhFe/oHh5aWHDyoqrVLiwSmTbkCCLcYETOyEKbnjNTRbfkltrjU
iLc4j+blxlN0xGpJY5Bw2U02Hjsi4TRhTuC0M5r+TbJnk2rAmafYcWSpdhlCft28RLFm73GI9/sV
3jBYpm4j0CDRF6MUZDYuO9HBtTR5ylps6Ovzu0Jm4z0JYmNqNe969rUeA3o1c0yhh+PjXfb8Z6OA
i0IM0Hvz0Byp0Iz8e65MBHfHYWNW47cAq415cPKveuWp1voeHqeVGtmwwx3jIkfA+ObdDhrvnyMt
A2Sxh+j+N/7s/wD73riremPQPHa/oDW1e3p2GtuK30VqtVtedDit9Ff1rVuLBsuKaNWi3DDikxt9
w4dwk+U4HZmfKcp0y/yMqQJO0iYOS5mwTBHDtkBWPbsZQ0MARoJXhXM7FilRjLhSZO6uyxrte2hQ
sIsqlaDsKkkp5DcygFZ1pJxXzaMNLidgy5c3dIGxJ94c5Me1Rk7bDDCkGXInj3VaUbmKaXPBG6ZU
cu258kU+576ZJZtwBaLeoWiyPk+BEX+TYzVO2Vu+RkJAmRJHMiu/dXxHCyRhybfPDLFgII1jxY44
g3b5/LWOsJm7ex8qVY0v5Y8mWYLhZQMMU8U6/LNvlEvoQ42RkN+xbighyMXDeH4xk5UeX8SjStr2
fChyd8xsYZZkaGWKGZ1PkimaKDJXKwGWpcaSP6JRTG5xQsMJJYwgCoYioArHhZsCE5WC/iMxfHhv
CvYpIFZKCSDNyMgD4+Z5BAixQ/MIIhCDcDlQNJyBcmulKAaEYNGJSDjC4LCTzxrMEJqVmaGKZ3ry
dsCHyUccNUcLoVUkgaE0aIBrtApzy7gRFzGee5+7IE+Z5Tj7X8c27Fxt52uDCpkYssYFAULV7qay
ZiqRymi+RaQoskL9wMfaSsQXkKa1qCmkjdmRQuhFE1I5FIPI7EKuGVDukHdsG0R5G6CBGTMk/A3P
Oz4crLhngSXHh7cpZhUTd6i9AUSb5Hk8cTzyth7RI07OqBI1sYFZnEaDJZMmSPYwUG1BUbbdwTGl
xcqKl3LcICdw27Iqbw0sWSp/LaOmzonlkyIJMYRTy5GPs+aYtv3HFx03jcIJ8ZlUU8hFYGVN4l26
QSQwpAL1kY8GSrbFDHNn4njDC7BlZLyLSSd9I80FY+4rIJIMTKEu35UBfEyGMO0zyVlCOBRc196G
6SUylTb0Y0Ltse3iCLg5enyo0aPokcdzdsbGan2rGamws6Mtk7nFSbpEFWTDyafbMKSjhZUQEm4x
BNxS8eVjykaHgfHgctt8dePPjVcrJRY8+CWlmjegiAvt+FI0uyYbl9ilWvx98ipdw3GKo98mZhvW
KTHuWDJSTwvRF6aGJq/DgBGF2O0eaC82XHRzURY8qKUeaG9e2ntRGnThudBjY9Nhw9wwwKEOYoaT
NRhlSLQzIiv5OPZWVtLc7UBVjU2540EkW54ctIyuLG54TwG2tudxoLVJkwx0M6A0s8bUOmuJi/kz
5WXHjrt0LTJ8kyYsPE3GS5vy+j+N/wC7P/72o+ntQ9e/ANBofTH01vUtpbgH1FtQdfyccUd42xQ3
yPa0jb5ZjCMfLJ5I/wCR7pOkuXvzQzwbllMu0TlYNiRqi2nBWWDE2bIhXEx0PyGF3lx8WaCIi4yI
vJj4EvkgS1+9RI2M9s7GYw7K+RYdsUeBjKWjvEUJs321JNTyMS0yIku5TmpNzyVrG3DcHr5HmZWb
i95DY+2wT4eR8e3KGodk3Jp8VBt+Fs7N+ISKVuWRnYsBn3nbkli+T4mOn7/uOQWXfc1o/j+5TND8
X2tRk4+w7edwxtyljxtlY5DbHi5UeH8W2qJ+/wDHiTJfIKoy0C9RS9oMovF2ks7RyySwtSyQqZ4E
hTDxISkW8YWNHuO/RZMU8ZR+CDCy8g4XxHNlOL8e2nGEcccQ61k4n4WRNvceJhbPvf7zkLtOMi52
LMMpSA/ntTIZW/G7QjsolwkkGdjiP0RwnQUTYRoZHzGC0ilmxMSOIHnWLAZZcmfxJjMfBztcihrH
DF+YrQRVDvc2PDvGbuOYITdAb1emf7e4iuQCcqBNWNEBqkxUcDHhjprOsSCl7rz9xix1lsIloIBQ
FqBomu69Xom4J5sxux5QiyT88wqhklDFcL5HhfjbtusWfV6JNc6WiL1cgu6gG1nBDNGXdGEYDLKs
vehKh4o8ViVgRR46EQJZWVgaJp2qVizIoUZJLHJTIjyfyilbdLmwO3yhgmVLmEzY82LFetonEeTM
MmCbFfuQEUbVkZfiMsxigjyWlzvGCSBoxsMiRsiQAKJMlox+RJbKkP4MAKRZBgFZQheQbevZLjTQ
L5JC00ZZ1LK+25UkOafkeJFLm5235Usk6tHFFJIYcKKGpZ+5AqGmbmWsVN67F7nxoXTN2cRs8RUr
LPGHkkcpK8ZEkM5E80NCeXtj3aNG/Njyg+FcSYSgnFlUvjPUXOp8WSKrX47Xra9uyJqOduWOE3vG
NRZ2HKBzFuK/q24eldeM6XoEipFWRX27CcLtzxsBuqF9xdGXccR1GLgzA7Yq0U3SMncJ46j3HEkp
XjcEcFzTxxyV+FACmLNG3kz0c58aHzxX6gqGqTFhkaXZMV6k2actNh5SBHeNMfMzmZdyzkC7z9w3
fCYR5uLIFkSSjDEVGHAjHEYPIMxaaXJjoZqhEy4HHkjrrR4vbU1c0L0C1vHDd8aFz+HED+JMtOM9
BJuEuO2VvLsHzcqQsxY1FkzRHG3lhSbnhOfzMUsLEWq1WOvPg9zWNjTZU2/wnbKiwsjIobUBUuHn
JX5eTCY9yDUmVE9RgyNuOTkw5G0PNkZGK6JF8h3BsnNy3WT6X43/ALs//va29Uela30o9C2lvX60
KH0FvTHBb1RrY1cXfLxUeXdMKJzvuCZG+UY7vJ8nymOR8g3ZzkT75KZNq3vIEvxrNyE/i3kRfjOF
2JsO1oo23b1XtS27MwhDBWXmXdUEUn+TBEYxz03vIddxXccvNPshY1g/458eFCuRjqA++z4QfL88
SJFjpjwySGGeKVVdo6imMbtPTt3HkTMzZU+FtmKqu2AqZmJFAm7QvuGIW5zfKfFseN8n3eGT90Zk
k3falLfM1AG7/KsqhtvyvKOJ8Z/JBwsTGmG07dk4pwc/ATFjZllysbGM2NuGTSY+DjhssmGGGBai
aSURFSZFLLIWrzMREXYzKitJIqK+YsLz74jwndnZIv3TJhlw50eRMcVHjZTphbImXJi7RBGd7+KR
xL4Xvs23YOQcza9pgi2uaMzWJ4CoZd+2qUR/FNsm2+WXc9ux4t53mbN3Lb5P3KE868jR1Jjswjnm
gcZqMBAMiPM2AGv26cAwghkZTwW4xRNziqIomYs22wi7PcwY5kPcmKhJYxm+FG3ehGltN2DRt+Rk
+f43D5DKiyrmxJj54PIdAaHOhzKCwBru0tQUU0QJCBaC0QCAAKvau4V3Cu+u6jRrvApnADNUj8o+
mdIyTYWbkflZHnd4fiUBxszCk2+e9G+l6vTc6vR+4dkKgoKKvSSHuYA0saXVQKFGgeSn7QeTNapZ
LVEtEhVhniSWWRe3EiDZCpKtSkmTLyXGPu4bsqHHDLDPHkyY+VNEwyIgs2czDCx5Xl3GJZcb4/NA
73r2LADKlLAqEErtUOEBXYFAjVGmmCLk5LyGGEIJHEayYWRNj4Q/ELbbDJHtey5mKm6HZ2c4mHM8
G1Td0WJmOcbP3LFqL5IGEO54cxXLxZUDMixy2jMkSB9zxhWTnsWhnmlrNWEu+3zFHjK0y1aosh0p
QrEyxTVLjSwVBuUqVHmBxbHevGVpkRqUItZO3g06vfhwsV55cPFTFg51JDDLUm0bfIf2Z4yI97hr
90zYmj3rDeo8vFlHtwcqPFz4DwHS1HjtrbVlBqXGhmL7agRIdxhK5+WpXc8dqaPEnD7VjE/i58VH
I3CELumPePIglrrwnnUmJjSB8Ll258aLlzqsWbBLQkjaudPHHIJdtxHB22RC+PnIJExyfx9tkVNr
AIxt0Vjl7pDS7pkK43RLLuOGwjyIZa8URoYOMryYstduUgWTJpMosRmYppWRhVq5Vyr3q2ljxSwx
TplbJMGlxciGiCNQbV3iu7upZpFr8vKFfn5dDcsy/wC6Zdv3DLDLu2YKXe5RUG6401KyMunx/ccf
bc7dt4j3neNLUyK1TbbjPU+NPhybBiiHEl2fM3HJyFGNLi77lQCaUyyE8PjcjxvRBHrfG/8Adn/9
4a+2tvTtVuK39MHANOvAfobURpb1rVyucvFDzbphQvJv2AGk+UY98j5Nlgy75urvHk7zOkW27xPD
D8e3JhF8YbyL8axCybFtqlds29aEca+jvAbvCAnxkLBE88s2PJC2LCuNjSh2Tc8qOCv/AK+R9MDY
M7Oyptsnw4pJ4u7dIVz48fHjx40VWkiyscCWaJzHlSALKwH5LGlmDUwJWAgSwTd8azCRUZkrK3HL
xcBv1LHlS42Ju8M0A+PYDHF2/YYWghxEQqadSRtxMT79tKPGJ89GObCskOBmzR4+MkEcsQqWCFy2
OFjOJGFh7UEeQ3leaOsjctuiRt9x+4bpurBsbfnXC2L8pF2Dbogn7Jis25uQI8yaXD23CiWDc8SJ
vGmRC+35WJPkzz41ZUiybcnJ4MfAzFOPnQNkeYHE3uFTHNDMLGjyCyo5maARz5jYck0C40XidhhT
tiStLBuUBtKoLRMEgylyMGaCsXcZYTHlRT1NjJJWXjFKmMhpolNEEa86vVxodTyEaGR82QAQxNLJ
BtRSNcPGjpsgICSxArCkQN3PjuuUKWRXF6JNZ6eTHxsbEZMTMaCSXes/ITcI8mPJVwavzpWoFaBq
9Xq9A6XvQ6kiu6u4UWosBRlUUZhRmNeZqMpoyCjIKknQUuQryoRZVWTMi2/HikgcwZsWZiyR75uM
GXmd60GU1er0DVzc86FFrUDerrQtXKr1fQmg1j+k91g70P8AI4tWU57c3CyPLI8sUWDtOfkQSJPA
GbtGRKS2ZGuZgEFWx5FjeCKENJl42TG0uIVhSCavxY8QPFLJj7RhJiC9EjtmmDGMfYAWqOFUo0CV
M04WsmdpGiiCiSZYxk5kqTLlXxztEs+dibBiwRybnJtE25bvDuMX7cryK264T4udiRzM0WUj4eIU
k2PFcTbMIY0wNxiijzs/HhytzyTjnG7k2/xA4ww2UxJUoWRH2/FkXMwYopHhZSVIrmCs6tSiWGjH
jZVOk+M2DueK4jwMSYNtagZqJiNFkwuyCDLfMw/AxAOq9fj2DGsXFepMXGmMuzYbt+1ZUNX3vHb9
4kiZN3wnrzQmrelarUfQtwmud/QuakxcaUybZCWOPmwRplZkQj3OAnz48lPg4jgYHYLbkhGbKrLn
4jBGVwRXTW5poIHK4OOj/j5au8+bETnQJSTRPV+XOzYuK1NtkHccXLhjjkzkZtwhenO2OoxoJUO1
KaO3TRtJ+5w0M7LgK7m5H7hjkJkY8tEY7yS4MEhOKRSplqolyzI2Wq0MiI0ssTG3K1GrVY8N6IDC
fbcKVZNhk7hsMlHYZ7nYc+v2fNtJgZcdFHWrkUWNd1HgjnmiOFuIeJc3GY5mdB4MebxzRyLIlHQV
uBjkx875GpxJN8zWiZixvxRJ3vDEgjOPEy5G2OpbHkWihFFTVvS+N/7s/wD71H0Rx2q2lq6fTj0r
epbS3CP6BY08iRh8mCNDu23AN8gwkU/JYmD/ACTIlr993TJAyN8yCu3b1NX8d3N1j+KvGV+LYCqm
xbaFTbsBAkUUY1t6ubjfkwuWV43sGhDNKsndixtBj5M7RxbpgnNTEw3xci2m1vG+35hAxxPkRbtG
gVMnJVFTILGPItSZF6SVTSm9K1BhUcpBkPilEjx1GxUhUNMymXfYVTdvjAmkfH2KOIPt0ShcjJiG
JNHKsWabyEMskixbhuuQueV+OZuWuds0SY22mFsCVpYzLPEFky9uieTfsEwndsmRGyd7fIO07nMq
bUqSR4GPUeMY4cnLx8dMjeY2p9x3ZhBt0uWz4+3wxwYcSnKjhZZsxmWbFiD4kEkpjsEYAjcAqbdi
gGbM2wgx5vON0YT40GQsuzxqyJAlSnCZWkwInXMiZ/xd5yk3Dbp8epI8jGqOFpWDfgoeybHNpVVj
G0E9xkbXFOJoMrEkxd5NkWOVMzZUmEsE2PJ4mZfx2JZGUiuVW4FFMeeIFiiZi7bTjFpJFkiZ47Ai
rUqGRosMCvAjKpjucYGkckEin+5MvKyMeLZ8ueWbbo0jxPlUET7apq9dwoOKVxQeu4V3V3V3UXFe
UUZxXn5eevPTZK2OZGC2fGKO4CmzpCTmTGjlTmjLKaLMdIT2zRcxNO0Em3blNJkBZ8nMPxfAfH3T
FydrzBlyChmWpcxKXLUgT0JVNeQV3Ve9c6F67qvV+YYWL1eu6iaZxUkl6iTtW9NlRpPFP5ohEssu
2ZcGTi7tmRl5JW7TzOHmNjtkYcWUFwJUeaAd7siLjxhmLDFjGT+RQAVfNCWLALlSqY55RCuNIMna
nz1jx4GDRk1LMFE+QztHEFqWURhs5IZhi48uPs+Fn5GRHgT7eU3bEZN0xly8pIpVfGx2uoKrPFDI
JdriuW3XFSXd3uFaQQp4Vke7Zm27k88Gy5kyT7Jlqrx5OJUGdkqse5Zrr+6u8QPewQuW2pnjmwZo
qMTCkeWIhosimaeAPjI9Ym45eE+DvmLlDfVBrGaxwG7c/dJZIsiTCEilObRuhSEX27ElSCPK3HHE
G740hingmGp0GvWpcLEmabZ4HaXb86MtkbxjhN5sqbngvUckcq6n0udHlofRPo3owY5ptrx2D4mZ
G35G4Y8cW5RFkzMaQtBEzSbfAzLBmxn8jMRxuGP3LLG1W4WAYSYeNIBhlEjXLQLlS9/5uMaV1YUU
Ql8OB2bbiiiHc4ic/LSl3LGeo54paGPBT4GOzSYEZLbcWRYp/Gj5aOubkGvzVWhl40hP4jAQR+MY
hQrLkM7SZMZ/LC1+RDSujAiufATXOuddK7mFXNXNNHE4l2vDlGZtpgYix4dskkEubhZLvZgWBB2y
zYoU2knhjLbnGW7sues5XgeKOXIcr9xFiRbi27EJoDUqpqTFhkqTa4zUm1yinxpULQvXYaItxfHP
92f/AN4fQiuepq3qD0DpfS1e3HbjPBY1z1t9Y8kcY/Ix/GN42/ul+RYaGb5Tj90nyfLWJd73ORu7
fJgNt36WRPjeY9J8X76T41hBxse23G3YFdq2v9Df0N3xBeGMgTI8dHuasZ5nhyHRI9+V8ePBaQzx
RGRmxhaLMzsFMbfJ9whiwsZcjKyUjSeeaeZZXWo8uosrlHkiky7CLIQ1J3SRRZaYmTiSDNhHkgEM
0ePmxywlcqURrvwkfO2hH/Ow8qaCsiRBBM+M4fH7mxN1hdX3OaQthvl1h5WPHG287Yse8b7G2FjZ
29RvDFveTF+zB412jb1YYGEKy5I48eDKMc2ZNBjSZG+4xjXP3KaRNt3DNWLBwsKdsbymOHyyfjyJ
FHH3xmVe8xyXMsRMUUTQsmRjGKZnqMy1u8qjCxJQMqLe9tmnysfFdWx9uibJ3CCGQbvnyq2FvGZU
Xx+MVFtmMSNsCsGNsmBMiLMxkkjmgmxcgiWXJzUwdq2+DKDvIiuI3ZGx5VplinTL2Q0kuVhSJu8U
1TET48uIiSphZMqy7eUM2FKAVIrnV6vpeo1Lvlt2ViYzTy+COF82cSpJ9qEVHGXaOBMdCb0bgyds
leJVLEFr1esiKL8zHw4seTE3XIxU3TI3HcI0aw7qVwaDUGry2oS15rU2SopsyIU2cor9wo5zmjlz
mjNM1EyGrVYVyolaLCrigrmhBMaXEmNDCc0MNlMQ+2KBJ5MXbYcaTBlXF3D9y2/xfI807huRuKuD
XdorOKWeUAZbClzhS5yGhlA0Jwa8wryg131313V3Gu6i9O9qiUsUFhO5WPJwJZFSObCwdn2vIy45
9oXDhj7ryY8UsZUq1AlagbJL5KRZZWAsY0eKeSMSuyY7xRJmTNDJDjPPmqwkyhGE7p58fGghx8nH
WeSKMxLNP2jInZ2jiCCWTtGXlCIFWrbjMMf41JjmPcXiixHyHkpZ2jF5M2aONIkLAU1zRFFbmTGh
lY7fGsrTZuMI9wjlWJ4gp6tYCfukXcdqgihELkGaVAmT4zDLBJPInc8yzoZvGkBhxysePhMMZcJ4
M7YXjLy3L47oDmZBRHKtCQmbJiY+XHt23QTZO8bDsYRtveB22zNSba8BcLGqTFx5Uk2eKhDvGKI9
3lSoNwxchlZW4zq6JIr7bgurbJDdMPd4Kizdzjdd3Tu/cMLuuCOC3AdOn0luAqrK2BjtUmFP3PJu
GOkW4LaPMgdnjSSpdvxZKOFMld25RhM+kzcZ6BVuD2tenjjcyYcTgwzKqyZKomYrgZEJPUXN5I45
afb8VqO3zLTPuUZXcWQLn4zkSI1Fb06I1bzjOoWWRTt+OZYWhmx1y82WkMivDn58LR7lGyM+C6rB
iPQxSp7MpVkyWib8+ARLn4jBJYpBbQnU9dHVJFzdp7zkY8mO/BtEd5edbwmyfjNEFGNHyU5yQfti
u0WFjxnlW5TRytPhptO0sxJvzPDhYxmkjRUUaX097Vyratpfcp8jaduMMvwrbpV3b482EzQOtFSK
tr8c/wB2f/3uIcY1tpz+sHEPQtrz47fSNLEhkzcSIfve1in+SYaLP8njV2+S5r48O87pLM6/IZmk
2ffZzF8Yy5Ei+LIHHxjaxS7RtwEWFhw0FUVz9Ua30HrvGsiPE+O2Sllw1C5AH25GCmTW5402MfzW
mlwshGDyKsbyIY9nwpkgmyEjXJyXyZABXYDXiU14rUHkWo8xwY84Gos0ivLjytjT4wCLgZA3HYI8
lfwM6KmwNxya+T4bRQY7dk23eSNszHSAzb9gRy5O+M7Rx5ueJod6yceHFXMGJtm2HJzI8XHzZcl8
qXFiGPCc/AiQSxtTzwIs2/4Eatn5+TLNhboiLtuE0OJjiCcok0a4jPDmpHDMrzCoOUjMYhKXWKV0
kbFRxUZjkLQwSQwCPwwSYkjGRFb5LvHjYuSdpTHyy+zbzOE+Pyxz/tGJjVHnYvjTIikqTnHBbuK1
ahet3THfH27GjOXnbxjPmwbViPFn/HtvjbJhOJNIgIilKnHluqMLZOJBkjN2mWA4m45WDST7XmxH
IfHbH3QTPPjY8dS4SSCba47y4c8VfZXjJBBFYMZURbdNOcHbMVa8cUkmThySUkbOxgmDYuMIFdu5
iaJomiQaNXFE1nCwzN0yfN8cb8yQiw+S48OPuZyUFflxrRzK/LkNfkzGjJKaJNcjXIUSDRK0WFAk
0IshqEGQxGFMaG3tQ2+O64ENxiwlzjohsL9grtAoiiLiE3UZgxn2/cRlNixyZudL8YwJI8rDbGna
JaaCFim3QtR264bEyBRinWixFd61cGrCg0ooZEy0uWRQzUpcpGoS3ryUZKMgNFjI0fQVlTKHw54s
mpAJW2XPihbOzcbxXqA1uEYWejEscM7tiyJjxPUbtjZ+Y08U+VlzCSLHTcIHyZ1kjy5sl8HDTKny
8cY+ThwMGBbsqWYIMjIaRlVlMkhWs3L8RVWZv23IUriRxR/mLkxvLmKy5WP2QQS5TIkcaM4FMxJP
eK50aNErRQM8sCyCXZ4jQbc8Zot3x3JyYZUymZYVhhyY2DYcJGNIxwsoLAkLlcPOiqPKnUJlYUiY
+34WQBsrxF8Pc1lyY53qTGnxjfGyBLiTRVHO8bfyTMCS5WTkyYW3ZU0uJtMpgt2k8LAOsm2YLqm2
TQGOfdYWh3qBmiysWdqPoXNXNGGEvJtuLI8+0nukj3LHiGfmwxRbtAWizcWZq51b6M114bcXSiAy
tgYj0+2MCf3SCl3N46j3HGciRGpsbHLHb4wTHnJSz5YI3DGJSSOQW06ac67QT4Iw74zBpTkxr5mR
Y8lHAdTXueYbFxno7Zj1+NnRgS50TZuScjFFYO5S4gyt0myB3oAzElXesbGjkxJMbyUMNRRhlDyS
5MI3CTJlPe5AvYOwrF3GeE458sJ/IMjSdgEikeRL3Gk08UKTbxO5llMh1AJO3Y/hheSONMbaXznj
2HYY4/w/jzPPibnjEboVEm8TXfdnli+LfHxJXyrPGRnk8SL3NiY6wx2q1EUatR5UK27bpdwyMbFh
wcdVLtu2eIIc7Med2ApoY2psRaOK9GJhXxxGE2ef+dy1v9Hah6A1t6w4BQ9E0fRt6Fq6UWQB9z2+
MzfINpiDfKtvAf5eby/Js9iu677lRJB8gkmf49u8zRfFXKwfGI4nX41tICbXgLSYuLGelXPCOMem
NB9AzBFlzJJZpckvItYvnLmSONNz3zbJ8mDbIMJIclvO2c60uekZ/e4lWfcvyCJUpZY7eeGySRNV
hbxg14wQYuYWVaTIlSos7nFm0u6yojbtiRtjfIcJZN53ebdcVQpeLb8ybbZNsXLiwNhwcdshcSKF
FjZk8Ign27Dy3Xa8lUxds8KY2dBtYyt5xBJ+6ZstNiblm0dkkhk284cahrUGWShGkQeZHqLO8ckO
UQCIMicAAJDNM00iRRvM+XLBDd0lxUjfuhV5UCNjXg2tYjWQ0qne8aKXBgi8s23/AB6D8gvGhuwI
MnbLjxwyzJNK9vtMpgyU5jt55GV2Pj4YjfcMEZUeLsmXNnYmbjiDcMuKWPdhuiOvkhEkdQTFTBKT
Qag9xl7TiT1H8cSep9iwcISZeEjSSySFM3Kxjj7wkio2NOJ8CNql24rTY03lxsXBx8ZYMbOj/ELp
Dj5GMx/JWST8qRoUyrk2FwKLUTRYWJtRai1F6nIaOLbcfKXEDYof5JNHFumQcqSQWflVxQYVdjQj
lavBPQ26ey7cxpdujpdvgFDGhWo0jDGRQT1WxrlVxVr1cAqDSqO61KncXjZCKgAFDCOVWBtrYjbX
krhbm+6bdGm45gy8yOIsrxmgSFVQasK7QaMSsHxIGZttx6bb2WmxchaZZlosBQcVcV3MKE8or8uS
jkm2OB2LRPaPwny4cHBnw6+ObZlbrLP8YhhhoGoCC+4oHpIJGBjlx8fckQx5QaDGhkhyY8WeP83f
4uzLjleMyytMVJB2pJDJMsi5uMgEZ5VNN2iedpWjTsMkoQYuei5WUkk+Ts+EGk/aoQssCKw2uLIe
eDLhbFx8CWly2gqbJxo4s75J2tl79lyhc/MWTBnfIxKN2ox2rktRBqZu0DlUsWMRLgxoMh9w8eHu
GLCWVs+ZsXEQGLcEU2ITAlFTZWbC2IuNNEdpx+z8TcYVObnxKu7YrDPyI5IcrDFRtmYlHcseSvyd
ppswlviuzZOQkEXgTMH+f0L02PA9Ps+OWXE3OCT90yY5U3PDduvoW4CAR4Ye39pxCTtMyvMd0idt
wyIUi3PGcRZWPObHW1HhtVvTt6hFw2FhtTbVFYw7lCVzMyFU3KI0MjHp40lEmDjyFcPIjMkufFRz
40KZMElFgFBDCjV6PMeKOww4lPhyVd5sqGmy4oyk8Lg3q9ZATwkXZEuPCa8VqMfLbo1ORysWCnuB
q9c63HDWSKDHKmXFC0yc+2oJ5IWtuUJGdm9h3NbQZ2JHRlxnO4TxwY4yH4okeSSJWWLf8vYji4eU
ZK7rVu2I2NPlZ8kGMu9mWkh2vMLbTmY0kfy/FTFypjNPxQIvch+2pJo46/MvS5IoSKdMbHfJm23A
hwMY3kbPzEgj3XcmnkY0TcE0TyRrHtUvskCiXP8A+9V6tVuYq3EeXoe/AiFiQQdbctB6NuK1vQ99
LcFtSNRxPNClSbnt8VN8j2pTL8rwlp/lrED5LucpbM+RygYW+5VR/GtzJHxGRqj+K4S1D8e2yIrt
O2KY4YYxXP0B6g4x9NuTlMZx3o6NGcWbvrAC/j5gQ4+XtX5+5rs+AIsz4pBHuvyaHImbaNpkedNm
wSybbgpQxMYCXGxkVtuw2p9lwGqXYIbNtmVBTDKjr8pQTlwdrZTFSuawONlXWDtaCPFVoGhirIZZ
sU37tvxi8GIrS4z5U+Fkvl4skSKgU4uKiZGRs+DkybxIWEe7TvHsUbRx7XjRExVkLkKUmhkXMxgW
i3CNIo3WVL3qdI1Enb+VkoFysKOPLR0njncmKstpc2JIpIDPG8kjCNmdo1MIXNxIsdHSOMQIcnGk
XeWBxcaP/JtqvG0zO+TDkiY3NdgdcomMqwqaFZkizHwjJubZa48kIixsmTzC1ZuIWret7mGL8Jkk
yMHKgjni3uKDxwSgiVO042R2lJAwD8lksN1z2VZHLGoiwaVWLL3A7fNihmnhKjIjlrCwYZpY48Iy
LjJjRDLVhjyySwCe9CVAksi0WruotRai1qL0XqSdEEm5KKkyJZDBmDHiw92kmn2XCiGP8g2vGycG
TGLuuAlJhQilxo1oRgUUXuYKY1ULQtRIAJIpujA+MDupQ/ldioxg/jMZJsRXYb8hSAO3YQStM1w5
KhOUeRlZEA2eefJgjh3HOysnatxxpu21W5HoQAoAYWru7KBLULFrtSyXcAEFFp4I3DYsDU+3oKOC
9NizCmjkWiRWL/qTpkPZMLPxQ0yBl+Oz42GuZuWHi45e5BqJrPl46ZNFJo1LyujLC2CZscoPOkjs
nkE2PuME20ZETfhzXj27IAjSXzY8ksUryFmmmAE87SGOMKJpljG3pj+POx48afacfMnU7dCqply4
lTNJKIZsuExYLE5G34k9T+XbmykmdgsYTb8LLmGVgT48+FD+PhtQ503dXv2Sdq9G50Yg0kjr3Csh
EI/bUlkEe447wZ0KucmGakjeGvzFenwcKSNpdzxUXdMd1l3FbT5XloopqSElY/PBS5GDIINn2rIC
4WFgZOJvcohTcNwkzMsIcf1L1Lg4srS7S3fLLumOy7qqiLKx5k9tLaW9B1WQSYuNJGdqxzSbZlxs
826RyDdGQRZ+NJUeRBKbHg51fQ20PHzq3Ea6cZvV6/HgJk22Ek4uVDGkm4xMu6pb8uHtbKVlkEbj
PJTGxszIx2i36PtTeMFx+57eaGdhtQyIGoFTVr0L0VBDY0LRiCGCpd0aBsndMnICKSYIVqSFaMag
lLtgY7Y8EmIl8vMWCsPeZMbIzvkCzO+7ZzUc7KJiz5Q8eTjy1Jjg08XbR51i58+LWJueJIohi78j
Expidtx2izEl7ib0ADXatxE5HaaVGY7Rnja5cjdJJMrLyUyEWWRDHuGXGHzcrMOS3dLWKZFyId7z
9uZd5TPp8PZsoz7LnRBkdDwQolKfth2mYbecJAxZVpuxqSCA1Ftvlr47tckeQ13OZkrjQ7tuxnd2
ub10o6e9wRsSOEz/APvaDg97ehbi99MbuL5OM0TGJhVqAucjDSPbtOnD78FvTvqOO1WNWqXLxITJ
um3Rq3yTbQZflWKKPyjIam3/AHdm8nyPLCbdvsjN8WzWEHxRSF+K4Csuw7Sog2/Bx68UV+McVqFW
q3DcfQ30vQ9W/KbLxoE3z5Rmw53xrdW3HC3AXxshkx0klx2hwAQ0cssVSzyzDZ8DJSd5sdEz8+BM
bZF/Oxcrb4WUWAuKV0BkzIHmfeNtWn+QQAybnucinD3iRMiZklTKZTi7rs8ka4m3TPJgbjDTSorP
LAAciFqmmy0JyNw7VBZsafthw91/FrO3vb5mk30RDv3/AC0TZO2XCfZ4lnbFypRA3emfIZY2BDyj
yTMpOZnT41Y08+VkTiBZIYFxy10CRDtdA0kmCnkkhy8fNlgSzIqCAxlJsRZ5xFKtZOSnlkV4ZUys
JYo8jHWMyfbkhmi3Br7Zh2eZZrGNJpXxg8ZVlammRTNLE6BpIC25Ysafgy5pOEIjiRSzSCKOIIST
W7bVFNWzzY2Jj/yTHyp834zi51TfHnwljfuqRChx5yCrlqysoY8WTM0r0FrHxWYwwQpG2FjzKnxq
eWn+NbrEkG3MssrQwrBOciUPKjuqRPDKTHLi4kqoAkbNRai1FqL00lTZkSVLnStTMWN6VGeooRbC
2mDHl27dfw13befyYFt3AUq1erXrsIJBNMTVqIFANeSASEwzV41IaMMqwRirUAwo91BWNCE14KEP
MIoqVFCsO55z2xeHb5oVWDGj+MiBoM5EfEsKC2NgaMZooaswq3NrmmjBoRuK8QDSyfbCH7FBqTuQ
D7qC2JXm0YNTYysIgoRb0IzNlrsUwycCOTcN5TAxPHvWGmFnXvQNXtXcI0iaKVmiKTSf9QPgR4Is
RarClllUF55DD+4RxRRZETFrVNMAJpncxxhRJJas7bZvxcOdsRXETLtkEKYO+hYw/wB6/iRxwY08
CuzBRuW/Wpp5JDt+DuEok2kxBJsnEY5cOZldyuLcyKKsSkHbTypGsm4lmUGU44mimmIjoz5LFfI8
KoAqwMQ+JHLI+0YpH7ZNAIM7HhWbOwJlbJyAPNNGYmxsgGGRKsKYCmXlOGWttxMbIw9wUY00HyCc
HbvlQhkD4e4RyxGJ/XlghmqfacWQ/g7liqNwzsaod0x5GSaGX1Dpzp445FfDxZE/bIjX4ecrS5Gd
EV3C0ceXDIyzRMbaHi9iOA6Wr30twWrpr7nS1XNZTo7Crm163A3x6vV6vV6uaEjChkzLQzssV+45
tPm5TgsxI50qkVCov3BQ0tw5vW2xKHnyoUTI3NnLuztrbQEqcTcoJEysEAGOmW1EGoM3JhEO8xMM
zeYzHJLJIdQzCllda/IchJpHDYssLRl+6RSrVBIInY3NbQ8Cz5EBaNHZGkILwZ2XjmPeo5ax9u+M
biNw+Mz4ispU47YyxoFsvNf3A/smXfySzRx0dwQn8xgMXLR5cbNgEQJji+RSn8FyWYjleje5o8qJ
FY0RZtnA78//AL3AOA17jgGnPQcIqCQxvlRnIhTFE8E2A0DTQ+KXOJj2g8z9R1rnRZVp8zEQSb5t
cZf5JtqCT5YlSfKsl1O+7wz93yScxbVvszn47uIig+KRAD4vt4ZNn2xFh27Bx2KqaH016v69tbei
KvQ4iQKnysfHGR8i26FZN/zslMiHft1hz4TDlb7sibYnx7NkwNxzWIxJY0kXMRwu3F1TqEsqKB2z
Yy5MeNiTZO4Jk7TCnyD5QiRw5W4y0m27vOItizWQfGV7F2PJjEce6Y6y7ukNZfyXblhm27PkjS6n
xsa8TGos3KxU27asvdIdxjxsKeDzNCs8bVlSx+KJUjXvz7pEWkxoMaMRSwrHi5EUK7hjeVPDhtLF
CgkRZDW4QxzrDuLACISF0kKl5FoZRWp4nyY8TOWOk8ciJzoRBKYAHnVmvN/kjgjKBTcEXVQss3iM
mHKuO7TxHJeGSeKJ4mQ7wynbsNj+RFiFxHjolCS8cssyxwoi1JPFIw8mQo25cUxzm/hWQY6zz065
2KY3VxarC26Yk0C/HMHOi3ZWRl3vJjSOScy5CsJFZSjDK7FyslpWPOlQk4mE8zYuPFjRjHQOMaMC
MLGZAkwbCkNftZLKqRh45YxPNA0Jai9F7AvRei9PMq1LuCipMiaTS96jxppKiwEFCFQMhO2PI3jx
LtOQM4xQRRR/LsCLFyMclolUse5u3nZACeYfuNib6KhNeKjGaXClfHQxsPEth4loEHS1W0Gk3JUF
3yQXd8XMORkxS/ibL8e3PDrc87dGNuE2qyUbVMUiWMiVPGaAa7Xau0iiqsAiih2LRCVa9MpBjv3i
wEecmM+PkpPj48oxJl+Y7Wse4bk+4ZYa9d1XoNYhl7e496RD8fFgWUsJlk7ZqLlaijMlRQpGO0PS
ZMyY0s1hLMzlECiSTtrJyJsbI8rS1t2yrNuQ2rAETT5+3ZE88kjRlpTGPtlWOVdyfORVRpJNt2ZY
F5VK6LThpVyNphcNBuOCcfe0NQvDkBFVAzuplTyF1SIJKVryuUM00rRR5LuGRVWdvI0hNPMiVNuK
LUu4SuW8ktPiqwaCZKE4SnWKaoUz0f8AFzCJoVpZMl3kkctCTFGe2V+6XCm2l5Vjlj2WfHbbc6Kh
k7zj1HvuPePPwpT65AYPg4bKu0hGA3eJzu5RxnYbUOYq1Hh6cB16U6LIow8Wx21Kkgzg35Ofjqm5
xdy52Kxq3CdevB0B1vwe1GuWgqZ+xGJNEEG/Jris0Xgq9Xq9X4BVqCXowtSoQSSa7wtfkixmNbYs
cs7rdc6MNT4s1yLGlUmu21FTYjTlWBKzPNt8WTFlbdk41Y+H3Jkw+E3JNdtW4483/BlHFRcjm1e0
CpIhFijFWh8csEy9khtbQV5ZQCSaT9SkBBuLqsW4SyjIlleEmbKkgieKNM7PqHbcDPiw/wBx2qWT
5Wjw5G64+XszSgUZVahzq1EWCoWpcViyIEXZ/wDZnf8Ae4DZQcvHBOfigx5mM9AqweSQUuVkrSTQ
y0VK8FjpbW2ijuPYRW3TXGN/hnyYllTKQM+VAr4MoUSRQCVfUHASABJE1T5+Hjld621hL8hxlJ+T
x2m3zIaOPdN9yay13meUfH91mEfxTIJX4rjCofju1xUNn2wVHj48SgAf0e30FqtoOGWeGAPvGAjS
/JYr/uO+ZZ/bN5yhj/GsVDjbZg4tIiIJYo5Vyfj+44eb8hyI8/Z0uajmE+wwVPlPLU2dh45/fsNI
pd7mki23dN+iikTO/HwdtgXKTG+OrD8wyY5s746hOQL1JPDEJt/26Gpfk8hqfc95kj/FysiaPZ2V
ofim6TZB+NmCv2+Rg+xQTVB8fzjlD9v2mfOxI58vCx96grccLLxsjbdpnzcclsVlKOGiU0Y2ShkO
hjy6jylKySxTxq+VjCbd8iNTuD5ck8MfZJhy49R5kiHvjlEOP3PKixvKl6T8iAjdcq+LvZZlkSVO
VEXq3NkqNrUz2EGPBE9ZfgQjNiGZMZZGlyya3nP2tMLaS7ZrzKYu2fsZ/KjRSMv5ckDbVjwbnNFG
gEqHuycCLIUF4JBI6NBumUtfYlPm46FHSRWUMrBtsn3f5BNFk7djr+Fue0YbSblAMaeWWPsmmvRu
ajjLVh7c8hSKOEBvue3ncgAGr6dzUWosKLUW5FqLCme1S5UaVJnu1M7OSaVJZSmHITDjwxUFArlQ
tUi3RNt/LfAxBhxn5KmNBuu8Hc5UMimJwwZrUpJqwauw12CvGlEIBJlxR0M5jSzNNTSnHQywOGmD
VaEmOI1DktcyKFjnVz+Woccwek5qIXEmRHBPh7tHlvJmxY0ySK8fySSMzFgAkyuVljYvKiCTKkan
eVjGXB78k1i/iia5jcTwEixo0VBoxivEaeEkFSSQymQ3pe1myJAqPs02RE0Eu3bf8b2aF8D5F8cw
pMBJmSo51cBr0GoNQPNrPURCUexiGAqytX4kEojiaCmamepZrU8rSFEAEkgiXb8qE5OeqPWxbbuG
U8mFkbVHFvDywSzZ0ssWMzMkKrTWGk2PDkL+JiLGuPNEr70sAhx8rNe3JeYIJOVtuNNUu1ZWK0O7
ZUdR5+JNUGWzyPJd0ILPnT4mN+4QRCbfc/JZJ546fOhUTbkxppJZGEd6ICjyKafI5yZig4GVCyJg
pPJJteYAm4ZGKkGZt7w7lLC+TFjyzPHtHgg2/wCKTy4+8bTvEB+ISzz4G64eOwK5uEYd0ayy4OWk
uz4Mi/tGTFXn3zHCb9DUe44MldfRPE0UUhm22CRn2/LgX8zcsaODeMSQxzwy8FqPqc7FEIbb8Zqk
2z7hFumMv5+VEU3bHcJlY8g5Eeqa9qPKhPAaZlVXzEBfJkNGQmsqYpBDnPG0e5RdsWdi2fIxHRlA
J4L1eu6lYXSMNXhIo9womiQKfRELHaYyMj3day+5I3/VWMo7JCO5ZEClSaZSukUrxPsOZ+XXyLcI
YBDvKlZSJ3ZCKOl6sDRHH+qLhikdGyl58IF6C2LAMIsEJUzPjvDu0K/G9sYGMV29wZWx6XcJY5cj
dWlXHaCaeba5rNHNCVnIpZ0YXU0ZO2sbKLy1s/8Asz/+9rNJ4458iWRizXJUUJFqCdkfHU5EU6y4
zeWCWsTMtLJjIFKkaA2IdDTIRVqtQjY1+K/ZDH3O2KGjg7ocnLQNBj7iWfMkU5OUb4Sxlj2CKPh5
Vzq1qPINmYitJmYsaDfcO82+4aIPkGQzPvOZkoMzc45DhbpnImy7h5V+PQFYfjgWRtn25o8ba8DF
JjjagAo5+jbS39TBo1amdFabccOAzfIsRQ++ZstFN8zGh2LJZ49hwVaHExYTVtedEgVkZ+FjDf8A
I2LKxkkkAxNy3b9tixt1kji2oF02vEirBw8RI51UVKLP2O7ZCvBvsufgxvumSubmbbkZ+MGm3LIK
YuK9N4Iym17hkQ42xY0UUWFixrNniEvuWVmPmbXLiRzfLs1EyMrK3aDH+OT5uJgbCsOaIIg0gWSe
basCYQ7fHiR/JIcl5dn2lsyDKxszBkSeKSmQGmhoPMlR5VQ5JjbuglowzApNm47SblLKimNx+Jjy
UIs+CsxJoqh3BTUf44Zp4rybGxihGbiGPdCDDlQzC4FHnXabs4sr2pXBrJaMywRKqM8XZl5eGiZc
zZE3xVFTcJMKJseFIsTG/Jxp6k2fcJhg4MUEM224oMkOXiGPIeZFjLLNjR5cSpJHKzxJWNmpA4RU
mxme1TwJPHmbEi52HmR4uLNlDNreYsyDJlV5aKteLHZqhiRDBKBV7mN/8obvleTkGoNXdRe1M9Fq
L13U0oUTbhEplyppKFXqPGmeosBAVQxy2VwiXf2Ma26UqEiWV8N8DNy5czC2aLcX3D4ttzwspVgD
S3paW1jozBBM7MvlBpIJmEeO4pY2qVFJ/Hx2GXhNBUMrKymGVZIUQpNK5YBWTmGNhO1L9kceAubF
gbP+FJte0w7hmyYAXHfyNJIvdRcQvJGXaHGQjIye6RYVRIHglmMSrG0SkSiWySQErG+PSEMDyo6e
7mp5e0CWRkjsgyWkWLZNxnlo5+NkS7Jv2Njxb58ghnwcjGfHMchvFlXpXoNegaBq9A0GoNSHtbLk
He0htJMAGkaVkjCBnWNcbGEqvANvlwXkki+PzwRLvObj4+FjpM7mGTtiU2tYE30YgDdt58dJl504
2za0xomwou7vzoagyseYVJPHf8qBKyp8WZRiQunZFjxDeX7Yd3iEc+4TzpHi5EtSw4+OjzZObMoy
IjFPEaEkdTJkxQ42VhmObKmmMELyMYZ2OFkDGMP5jKYs41uc3YQrGEdpEE8yk5WURs9jt0pifMd0
jjcgsTU23Y8hnwsiMw5cylNyQGOSOVXjRxJs+BJR2WeNml3rHYbu6GPcsKSNGSRdTXL0LUafDxJB
JsyFjHvOLS7u8Zj3DCkAIYa24B6B0uRUmLjSGTacRj+25MRL7tCF3SUNFumHIseRBKSD6A1ysgY0
T5suTKcLOskZjjvapc+GOTIylhjfJ/IxhqTV9eVWq1Wq2iu6n8uevypaSSOQeI0yNXjJKIVEMpTI
BzkjXIdanL5OPlQ+NqxJlFPigo0ZWoQtSxtK0sSxVatu3J8BpZXleldlKyq9NGKKkURpfjitc8uK
xaDhRu03uUUFcaBJay8uTCyc7ObJTbp3il87Vs+64mOMnGmyaeGRaAaphaZfLEo3B6aLbsgybNlB
ZFngPkY1hLJ5VvbZ/wDZnf8Ae1yReJo7ho5DXhNCFaVADiCaOokXLgysSeGRA4O4Z2YKw95yGLZ0
qsu5w3imhlEd1L4qsjYrJEZSRtzBhBigP+R42zjGZUymkhgjHmkYtPlT+PBhQkZZQUTYNnYaH87D
7W3uK537HFPveYWj3Tccip492jMGNumUsnx/KUQfHSSdgxLQ7NgRU2BhPUcccS+hah6dqtVtLegO
C2ltbVbnwD6O+trB9wwIzkfIsPHWf5RMqPl7/khdl3PKWD45AtRbVhRMkUUZvQoVapcnGhDb5tgD
fKYPH+9bvKyJ8kyyPjufkmL4vhxsdj2rtYyIdgwpct/Pm4yRZuNLLJkQJUW8YMUGZvgLtveaVkzM
u6qZWgwpZKedxJgrhvDHjM4Xb3kqHHgxxTZ2FE+Z8kiAjTccybbtkzgMXYNlik2fAxsB5V2/KWfH
CzY8qZQlgjmjxsJsSfI3LDgTGycfLdo0cLjRY9bzsOZuKbb8eyI5s/asjApMtGA7WDRCrOhTIIqP
IpMi9IYmJwMecvtBB/Bzkpzmqo7RUcuOj+TbHqbKyuxM+RKSTHnqTbyBFuGTjmLLgnoGniDU8kcI
fdtuhbM3UMP3HMkMWx7plkfEsYJuGy40cse24EERTHVsfb8HxLHGK5XC3LryLL2oqBQAlRhCd1h5
QbfgtiiCJjiz4YjlXJSSbcMhGg3GbyZcEeXFu+fl4+Js++zYOJKDK/ZzOKHcRDtF1ZJDUc3Mn7kP
bRkoPy7jXfRei1M4qbIjjqXcCakkeSr2oMTUOE8lQ4ccddoAAo3FWChoJIwq0ENFWYqvaM1Artk7
ZgtsO74tZebGMaVJVmAvS86C00njCymRfzWjkjVpG3N75GFEJHDnyLzWZ4khixoxiKHCrKJ42jji
HeCJMXIjTty6l8iyoT2seUvNshu2DKyMmJtteabA+LZmWuXmbri4qSPzdrlXkkpxIKglQLBgIsma
TI+2jxpkwXx8tlix9uZ/LkoIpsJ+6AyjHZMryN5oy1GwqZ7VzkcLUssYOBNgZmNNiQeP498aw4cX
eNmxY8YnuEiXqbCKkPYxTdlJKrUljV6BoGgaDWqWbud5SakmApmaZo4woJCjJEzw7VlyrFLiwyLt
WFDj4fyrHQR4+Iz1j46RL4rMaY1Y0a3rLkij2/a5cyXB2uHELdVTl0rLXDIaXcJDFIAT4UppQtJ+
TKZUxUWScioYZJmjxMaFQXJycaV6wsaaKUyELi4MOXHLj5OPLlZeQ8OLHjSQ4uw+QRbVi9pwsYpL
j+NUHid82eJMiSSSZGdUUEnaHhgmxtt2vcGyNu3ba0i3vcZUg3fElpJI5BoKnxcfIDYWVELQrImR
nRLDuGJKfa9SQwy1NtuLKZNpljW+8YarvRjaLc8OUqyseI8bojiXbMN6O15cAXK3PHMW6xsVnx2Y
6+2lvR66i92UPUmFhyiTaonr8LPjJyd1gUbsqMm44b0jI46V1q1HXf8Au/F+PMndM3bHn5bYwbc0
cjHgyD+PHNFHjw9kkZje3oc9LVau2u2rGuYrHywpZ8ZgQt78pFscFw2PQCqmVtazSZW2TxN4pUOL
kOaOF31LiDtE0qMMeMiSJrstuGKa1FAwMZFFKK8YNjKLNwwMDTCx1tp7Yi+QJDOiPsOZuEs+0bXJ
teNGsW4vHWxbXDnZhxoIYd4WKNsyMBsi/dhsJMeSIGnhIqDIy8Y5e4PlxsjK2DL3SVs/+zP/AO9f
TlRAInxuxpYjTKRRFYeA+U2VA2LNFNKK2LNMxzcKKHInwUlZcHFRniWQHGBqTCTtb9zhO2bvlQzg
xvHmYnjbaxeRYe05ojgkObiB4N1wVC79iAn5FirJkfIsnJeDfc+ap/zxLj4m45DPseQhg2G7ybLh
uMXasXGMkEMgjhhiFzXOun0/PU/U29DpU264MJG+YVJu+C5jljkHHNkwQ0+/barS/IXCHec2dUh3
bKZPj+bK0Pxvb0aLDxYmAA0tRliWp9426Cn+Q4Sk/Ic0oG+QZBXZd1y1j+Mw3Gx7aGXCw0Nc9HdE
E254EI3SSL9x2LcZcLPy9xydxzHw2jkZYhWPt+4yuNp3Dv8A4fKcjc9iy0GJtO440owpGKKHk26I
Q4j5OLCcreMTHL77lSudv3zLyG+KxeVsXFhPvDeR1GQKa5pI5JhlebzyebFkfOjjWGUkBUcZE2Dh
5T/Iiww94xpZjEbZgUPvOzz7lBtPx18fJ3XaIsRYswtSyRvTRg12MtLOyVHk3qOeo5waScArkGlZ
XqfEglU7RhmpNplRWusreSQRbnNjMZsOaHJycaFot6zlqTL3F4w+JLPDs+4TRPsuFDUWLBOmDFm4
03cDTKpMm34z1KufgNgby+QyFWbtvQpm7aYrRIofZTKHrLHfCyY5aHnWNPJEX3fPEuOAKONjJFkf
I8DGfdN5GcfIe/FhfJGVsWZBADagxIlS4VzdHNK9K4Ivag1d9FxTSgVLnxrUmZM9Frk8qTHnlqDa
0sIoVUAghheQkUpFrWrmS3kkpUN9CCRkRiSGbbcvKrbMI4ePhRRw4/y7Hi/GFJ1XoQpDpJE5nIoS
uzzRwZDRpHEipIK7aZAKZHZWDILLTRkgYqX8niU5KCTIQlozdJSAAO6XM50uVtUzzypGuwJB+279
DHJg9jzMrCMI6vTwuCGQFsiYSyStKuERJitlRQsuQuRkoyLPlQxPLAPAZGE6R/lloorta1O9hM/e
yoAJCwqbYHyMPE26fasbaNu3vOOBlPt8O77zjz4zwAryFOnPJwkenxp4QklQ5SkowruSu5avXdTy
2DzcixmZEAArKylUQ5Mc2LJgTT5WHsGLHDkZ2fsk2RNk7hHHEEUdCQD0otz7qYmyYK+a1qtQQU8i
Is2c8lGJb+YBWSHIEkBSSPJcVLnyYru7SNFh3qKBUKx2J7VEUU+QfIVzt0nxBj7bDEmNjbJm5OTP
8amiLbbjQwTRrjZMWTLBIGDBjWRixSVn5OVisuYklHHVhhYrPlZO2RTwbBs80j7huMsEGJmrFmtg
4EynZ5oyuTvGNUG84shjlhlFqNMoYNt/YZjIBDHdfz54KiycecasAyybXgSD9kKURvcRO6Toybph
NHHJHKvT0ranFxXMu0RsZIt0xqTdZ41jz8aQgg+l76e2g4OdyiMH2/Fan2iw/H3OF/3DOiqPeISF
z8RqVkcEqDnpE+JiZBx8iLPkynfFhyY2+PwxgK0UnKN3xw43bFZJihoqRRFWq1WrtrtoJQivQhrw
XHhIoQSUMdjX4Jam2+QVDhWpMUV+EK/CZn/bYqGPnRUcjNjA3CAUs8ElCNLT4cxAxtyhQxRzRyYZ
FPHElZGSihmLHhilKkOrDsD0+OwpoytNxP8AdHwo3a0y2fVQTo36fhuzYeQu5vFjod1njkj30rk/
IRFHuaPcbFHOMvIzViXemSSmasuFlG0SAo6Xp46aMUyCp1Aj2wXlrZ/9md/3tBoyK4nw2qTEkUyQ
GoSY3O2Ll45QxNtk0cLZubiTj8zCIgGHKr4Miq3YhDqRNm4sTPm7fOIvkcGNDP8AI9wjcZfyFpZc
z5JM0+1/IJi2HkLLifG4mV/j2PBXiwYHxtqxViIjjokn0wPWFW/o9+DedwZFL1zNB5UrGy3jbAzk
ykoyRhX3TDjaTezUu8ZbVLJmTNHtGZKYtgdlx9j26FYoIIq507ogfdNuQvv2KJX+QzSL+Xvkznat
9liT45E7wbBt8Ij2/AjYAcD5eKjyb3t6VJ8gu7ZW/wAzyYW9y1Jt+2rX5HxuBvkPdPmKiFn2SdGg
2oquIEMWNfyFPLJHm5mIu4bjNHDjbmMvGy9xhhWMFVE24Z1R7Hlz1j7Rt+OY3hKRWhYL3DcoGjii
znx6xcmKSKPIWjOWF+7Gx8OBYsHCdCuyQzUiGOpNztLJgxT1kYcEjJiwCV8nc4cP9xnWgLjK7VbO
2KLc4YPiqbbm7xtWJHBBmFnWeNiYwaaA3EssVRZYNJkVFOKWcXTIBoPemy8aMbnuW15Ecj7hNJPi
7jmbbCdqieLIgajs245iwbbjpMsUuFLj5RcSwRzjOxJopsWZ54Ra9q7b0VYr+GvkCooBN650wDVI
psh++eCdpzKqJJETUcYVcM9sivUmbFiLue85GY4YsWrGxJJU+NDPh3B5MYx7rh9s7t4o1ZZFmiJK
tQe1LKAFcGu8CpMmNBJnk087vV6QFzHgyNUWHEldoUO/MhgjEKIktSvyMgsI2crEq0RR4LXD5LYL
YW75c+TtGXlOPke35uVEOi9RUhFjjyVDARA4HdFEQGVCEVHFuRRzJay9zOFRC7MaALMImFGPtqKE
M0StG05tUIuXiOVPjbHnDJzsZssbRtj7di/Ic7N8oZ0a3MtUqlkEbLGyOKWCTtW6p0oi7BgpDrmR
Isys+K7ysJIViVVV2FZEtlhjIDMsa5L5MMWybjM2NDuUGW+2iI4nyFUXBNCpIQ5ZCpkjcV+hcjFI
IkqPKZajlVwGruoubSzAAFpSqgAVIxLZe2xyY+JKyS4m3ZlxvOEkW87j+45W3gRbfDMsiA8vcsAP
fTpVqkkSJZclI0KS5ByZJYCJnaSHAKJJiY3aiRxru0xbHk8ndiTRUiLZiqUJQ8mbhvFDtKZCYuJH
nZuZJtEkkuL8fwceLElx8LI3fOxEw4fkeWKkxPK+QcmFEihkrzZURbLiMeTM08yKTXcwO2Yaz44T
cMZlyJVml3DEyETb555ueO6Z2WghzMaapcLElptlCsr7zitFvcJaPJx5tZ9vxpjbcsUf/W5bCPco
KTc4royuDwPHHIMjCgnqXaQI44d1xhDuk4dd1xWbvj47VbQasqur7XguW2qeKmyN0xqj3nHNRzwS
ijofQJ1tbXnwXNPBBJT7ZisW2ggyxw41PlTSxg2OzB2jXtssipRGLkUMbxJBEYTuOB58UQqaONRx
CaOG9fhtQw2NDCNDCr8MChiqKGGTS7fIaXbnpcCIUMXHr8bHr8eCjjY5o4sNLBEK+0Vbn70Y4mo4
UBcYc0ag560c11MeXBIXx2Dy7b5qz9qzsWiLEoyirG2qsQUyLUs6sJpUsTc8MVmTif74aFqEVKFB
sLm3b8JzcbFb5FmROwYFQS6bx/mxcKbywYmfPjtFvaTrmh4pBIkoyla+1SdkzHk1xTAEOKyjZNpH
3k1s9vJnf94VLMsYAPZ3CjcFHtXZG4ycSOpsftK7rLjwyZCkvkkV+ZzGdak3HJBx9y3yTHTCzmGP
tHdQ+PSd217Bt8oyNixoJosHbZaikBXLHhO5bpD5sHFkmmiHjbd8/wAY2LFVnnJKkWPXUeiP6Db0
r+rNKIYcnIaVy1XvQ7qUtUWRJFUeZlS46plTSJte5sy7HATFgYkQVUiWXcsGMy77gxo/yMov7hvU
xXF+QZqLsGSZU+P4KxptuCgjjjiF9LGiyqJN02+OpvkeBFQ3jcZaQfIp6G05BC4uxwhNw2OOQ7lv
My/jfIZgPj7OI9h2pFSGGNPnGP5MdIyHx1E2P2gVjxFGxmCyLuGLEM3c0U7hO7Rx4Ujx5mDNjbft
cCyZa3ABHbQApXZahnMZjy8WQZmy4zATDHONLHIQxDBe10kjCyYOQtHJyIwyB6UKJmyfCRNYxZME
NLNmBWlmUpnbjjom45kdYskWTDmugGVtwzseP4wuJuO84GImLi5jPJ5luVBp8cXEksNDcYlC7qO9
czcJk/Gy5jBtmCp/cNoxUn3LMzkGz50zfseCkaSz4BEvfjyRvkYeNucds6KMiB++OSNZVhjnj0Bq
4p+6wkajRFZOZJjvjZcGUmkkkMA88jyM6TN+MmGsUd6g+5s3MhwoszOydwmyNufHhjWo42lfZ9uX
EwyDt+RNu6vuyQRrFu20YyZMinFmIBqeO9LJTSgD8wCmyZWruJqxqPGmkqPa4zUGMkQNloE0XNFI
5HSGYr4EQpGWKxoB2otd66rLGzTTOjRt3rpmxIZY4tp2+TYMqCWPNmSHFBJK0rche/lkREGSFUXL
2sFDKtkJK9quTR6JckmzFri7BY1lBknKtHIO+LmuQ3OEdscmc+Idq3GfMWDNyE3JZ42T5JlxZOWx
CUpFja7LTR8pYpSe6RQymnc0Onh7lg7kkaUosc/klkd5Jegnl7RGplawFT5EQbBzcTNxp4IJ4th2
rHw8HdGkxMTJzcjKar1yplVg2K9MhVihBnxUkBxchaWRkMOWGrycnmEYRWlKqKC2qR+xc45ERxZm
li2nExRnhDb5CwSTDxlyXhDR4aIEhAIrvRmubkAV5lBVg1AG0koWsnHnBEZNSyu5k23OEeNiRYsd
6lyVSsjcbn/PKZMKKRHwpEmH5eKuDl4xCx40m4ZkByXwPjuScbZsSLHi3kxxY6b06xZReea0bQhY
4Vx8oyOzs5nxYpWZ8qE7rlwsh7WI5DCxvyJziy4KbHkfm4+XgYRLbfl45Tc5IikkGQku2oGmWRTA
01l3EIUkjkEmNjymXZomYpvGMy7wQy5eK2k2PBOPw8rHBzAR+BCwM+444h3DEl470+BiO0u0IXki
3PGRdxyoUi3OF2TJxpDbhPG+PBJT7Pikrjbpjn9zyIWTdMB1RlddbUeL24TV+DeIIZcTxJixTSLI
+y3GN9hBSGu16D5cZXKzAMve8vHTJ3KaeWDyCGPMRy2QiLHlQS1HiySgYFLhQChBAtAKKvwc9bm+
h0N6Ne2gJplV6aDGYDAW0658EEubun4UcVnlYs1M5b1oTaSRe1+GEdyEWNKxUl2bTmRjgorSE0st
gktjk5F8fAnaOnlZWDmRULgTRC8/NIXMeRE9wwBDxmnJrNI7dpHOtn/27lnYse4fnYnbuu7qyYfy
KMQ4e948OdvHyWLPyBumUJsjLzpFVMuWo9rneLI2yfFEkPaZLCmbnc0GYUuXkpUe97jHSb9lgx/J
8oLgfL4ojk/MsB0xfk2A5i+UYiTbjvG3y4WLafc4Gg8m7ZsePDjO2fuEcHijyVaMEMNMdVJcgtwW
+m5+nb6K9ZW97dimT5Rev5RMDD8kZ6x96wZyCCKZlFS5UUVZ27Y02M78yRQNK4FCVKDd4xpVxsqT
etuVT8ihKNvGdI7HesgDZsyVI9gxw8O0bfDSY8CNfgkycaIy71t6F/kAevzd+yGGDvsxfadviYr8
cxU/eoFH5e/zkbdvUxX49BdNn21KSONF4LV8mhEuzgitu3LFixv3yHvmy90c5ODkW2HbcDMrJ2iT
Lj33b/wpImbxb7JbC2NCcwMTwi9XAL5caBJFjiXGk7Et3Y2W65McWLlD8HPw6lzpHMriQSeVqAjQ
ZGXL2RStIJHPajMWVFC3AKSNYSZkC4c0eTj5LpabblzsdvjcWLmbrkbS2NFkz3kkyxIuETRwoghe
SApucagZ2fKI8KSZ4MXEgCTkqrglhdTHFkRkT7bNGY3EkEciwoYAMzIxzDuePIUYMBau2rEaN2g2
NqVijDahIBA0tX55UaIWSKXH7MVJJCQVZ3rsTHi3LaJs1tv2XHwRvmQssyi9bHtcU5Alx6khizsY
7Kw3CDd2SOZcnPrdIM+CaIkU61mRlVuzUthSgtUWDI1RYsaUiC1lWh0YckIuMXDuQgqLHdy8Esb/
AJUYabIc1I4ZgiMYxmLTTZteaNHRWlMTOkt76Zad8ORgZuS+2Yr40EMKLD8qwoIJb0rUGYENdlQU
ABQUV2nvZ+ZDGmRrRralWxeMlo4wBHEikg3kS5hjjWluFm5mc9sPbtrwwQ4uHH8ZxsaVszHinx+3
uaXJSNpGYhJD3ede9mUUedEdxnaMUsSsxiQtNHFd42dHsi7Xzg590j2EhaeRVCiZmdp9mjycYYbb
di7P8a3CePGzZdoj3rfBlrM7Y0jZczUZ8q6TZcdLkREPlRo8kIlWSORa8UwkJscvCBJuCuRIggQy
Uo5gVewfLiWabxtHsu1yz537NgCOTIzseffye/EkjV8WIGJ51x45MvJmGLCMdGNO/JVMrKAqy5MS
0TMWAVF7Zcl0hx4EZ7B5kQS54IdpZjHAqVdFCCWalmlXO3PIPbjYhaLb9pilyMza8jDjxvkO1yQZ
299k43KSV2MrKVjCg+MQxyUALHkGNOwVcvI806gGjetswJDGmR4a23ctpePcJZmljzJVVJMfKSXa
k7vy8/ErHzsXIqbb8eWnTPx1jXBlk8u5Y7Q7jizH2kx4JTkbTDI0sO44qpuc8aQ5cEysqSLLt0ZZ
snNx27tvz6OHkROM2WN4siCccbKrD8PEt+1peXHz0d8zNhEe7YZqHJgnoj02ANS4UDk7dJGEk3KE
rugqOeCQHlravbjtV6Os0yQoZHzpcuETMuIBWCiJjs4Ve53p4Vry9g8rNW6S9hhkDGeSWSoksJ5F
jXHcCTHXti1OnSjoRwe19CL6Wo0Bbh6VnSMuMW8TNlzOfyI2IGO9HHHa0Mgqx9QGxnF+LDKifddu
bEk1RGcpB93dyNEmgbGYdyRkpPHzpI2hIIYMLicACX7ZMNhJCVcUbmpFvWcAG2tVC1s/+zfWUb1h
yYapujQTLtu3ZjTNtJ7o8LY4cNY4lJANAAC9TxCVcjD7DNAtpoVv4jRS1dtdtWrnRq5ruIoSOKMs
hEc8iGPcMhS25TzDb9/XCfF+XY0r/vG35tSx99COpEEaaj0B6I+vGs08OOuZ8oxo6y94zss4mz7j
mGL4ghpPi21IMz49t0RCTE4W8ZeEc3eZo5GzsbxRd0kK4OZJjZuMMaUtQahagBUYW+W5V9k23CyM
Q4u3RMuUCFAelFhzp5I46l3Xboql+Q4iN+9Z8tEfIplk2qcIMPY8dP3L4/hiPdc9lh/kGQ0WzZjm
L4/hpUW2bfCwsK68bMqibNxYYZPkeGq/uu4bksqFG23DgzK/4EDSzbjlI23wRttuZkYBf5DGyPum
K2XJIbb2wMvx5LlRwMQoOXLITjzPUcEKH8aF6fFEbtFk4jN45IzlyYlY29MakzpJFebDYiPEkZ4Q
CmMzBsMXmMsNJBNKzxzMrYmWAiYqLJlwkS7piERZ2cqwHd8yHI2nFmyV2fDaLG2RIc3fExIsWDJn
7pZsq6YSNT4kRAbIgqHLV6jnvUc47VkqNyVTk00CTx4pbHnjPJ4lVntT4uPNX/JwJMfISdK77i4N
MoNKLBjYA3qRmVJI8qHGMuWVc5QSHABqfFyWdcKZivix1sWO4NmpHkbxuEiuk8jbNtGRlzLtuIqx
4mOhArecDyx7tvbKNpKNtu9Y8c2PusISVXDpNH3r43MkG3iooEQBQAVBrk4KsSA1rXqGAvX4sdFI
IhLOjIZ8jGx48jM8ckE8o/bJmp0lhfF8gBnlYyJkJSxTkz4qtLFKrCiO5Wzjgptu75OVkbZmzZCf
KcHNkANKaJYFDYpIxYC1FavarAmSWKCpd9x1Mm+5TmLds4nDzkyVlMhaHuAbnVwKEdmNwg5zZJLG
badwM7wMmF8e2XO28bpvGUiyntQ374vyJpP2/JalxZsdnnkkkmldJGdLyOiuLCvIwUmxbuVsjAZV
gxxFAzWrJmJMMQjWaXxjOgyoYtvypcbBw82WY4TRzY/y2RExGmldMaSRnz8yQPHhSvWHj5OMZ0jx
5uzGFL22KK1S4clPGykhhU+Mj1PE0RxARCtAWqS7HP2t2ODk99bfh5OJWTvsECTTSTyy+LcMVMCW
FcdJFl75cihjLjO0KmZ5m7rs1K0cUcuTNkHHxMfvZUjKQ+V2ZFWaZVrIzljBnnyDHAq0bKPOvdlY
XZDtUU6wYmPnzZUu35Uc+PhY8EMkx2/P3HJllQPCQIJoQm4dlLOJVEkcdCaRlOS8LK6uGNGt6yRD
BSiwwYTPPFBlwHFigcft0UpaHc8UnIxXeSN1MObKBFPDOMnbMWehHumFUe7Y1MuBnL+JmYoefElp
cPIiVdwliMU8Mw6VLDFMs+1YkiDC3HFSLdZojj7hj5BmwMaciLccWotzgcyYGJKBHuOME3GIUrK4
9AgEPBDIjbRCxGFnwtJm58Dx7tilYsmGbiPFJi40tS7RiMVxd1gI3SeIx7jiOeRB0NddTwWrMyhi
wtk5fgzc6Scou5SxwbHlzLkQywT4o7YP/i7MQzAUTUkqxqFOZkZrxiSCMkzzU0Q7BHddqyPyMSue
pr219qtVqNEVz4varGjWXgS5TJsmGEy8SOBz10E0oo5Bc+TFavDikLiRyU+2ZKtJhzwsYpLiKRhw
r98HCpIM+bh5ex254u1Z+WYvjUcKyrgwBiDQ5V1AFwaHNcgFZsVu5UUikUGnW1TRl6yYyo2qb7b0
TU3JchizbZF9vWtn/wBm/orbts0UBQQQg7ZjJky71tsGJGeEgkZU6QDIdpS4AprWJWjbS1Fb0ynj
BtXfQmkFR5syHaPkOQjYeTj5ySY16aFlUi2gr30HDbW1Wq4FdfTHpM8a02VjLTblgrTb1grX77iC
jv2NX79FUO64ktIyuODK3TBxBl/J5nrIefJOBtsWVWImwYhO7Zjsz7rLU0cXbGmJGHgm8OXIxmx4
ZJJcyRjJs2Zuawx5e6Zz5OyZ0OOxFwaBNLzpTW2QwT5O4bhj7fSZxyZv3HAx0/eWYfl7zIDg7rJS
bXhigfjuKy7tiAvn71M0mHveQV2M2j2LbESHGggUcvQvo27besmR8hjWTJ3LcMgR4G75uLi/G8VV
xNvxsSlRFG5Y0uPn7OMiWTDgx4JZifHOg8Lg9xVZJcja5pNxlRbbpdtx2GMriirU88EdeWMow/KY
kKvcxqTJgiqCWLIjeKRRMnnCBErMAOEzFWinmhKbtuQGJumRIWMjRtG8VDJEuPFJHJHNkYmOzb0q
p+buU8cmNuGWo2rFLwYSxySdgjjnCjJXExgPkcccUMm87iMb49jTp+x4sWTvcUKYOG+RJkNLJE5C
kSYysVkliqLIUiLIBqKUEBvtyss46tN+UofOmJx9ypc7IhMckcwsrrJHLhSQZCToLE315WYEmbNa
MxZEWUhlgKLHkyo2Jlmp0lcRSzGhJPFIbBcY+WjtWDlPifH8JWRURbVzpVrJDF912XHGRhSHbsfH
jn3BfkWyZeKnY8DEXoxgSotqAtSoKAQVFP3P5bV2zS1Hi2oDlk5LKZOwNDDcHGkSGbJWKCfBLYih
kWdsfLWTHyFVYMq8KlIe4mAQ5SqSjhZGBrLhjaaFNq22TYMmGbF3vJjg25L0ppukYuqqBQrne1Sy
pFGBl7nkDY4yubtz4gOS3bBlSQyYWSMyFv05Dv2xd3YqkCckLALtlZBil27ecqbJyMswZccqunyG
SNsmUxqRiQM8auKD82cAzYcRYYzrTvYykSUkhJE7l4JnV27pWQIRIUDZE4QY8JJdwiz5ZgkwtxM+
MWhzK2vCxUxNyebbX8zZOTk96y47TxPLFEsqABRlLC6tkvTgwvCuRjOLUCKngSYSQvFR8XfmYgIi
A7AKJ7RNlTQMkn5mP8d2+CPN7a3dlOb2MVjxpmGcFeoEgdmy4i4AtYGmQksAiuWyGeSOIB8iQwva
JZP8eTmKgmz5Z2ixeYWwaUAbecfKM8cEmfl4/nTbPjOU+Ps0K4c+6tHHhwbjlDHyJCZAr2ysyJ28
s0b40i5DSbdimHyTwKuXAxgIkKRiGrg0WCjccn8nKQXLc62XECx/j5UjbdtMUmDmYubgzwbnA5eK
GZW2sxGVpYyqjIP7nk4jY+Vj5K5GPHPHuu0TYVS7lPhQJu24SR7dsmRLj4u7GA/h4GVX/wBnjGLc
8d2BBFHmH2rBcrtmZA77nmQSPnbbkKMOSMfuEkBvj5KSbegppc7HCZuO5sfSIvUsSTIdtxjS7fkx
mXJzoJE3WEJDkwzHh6cLAMJcDFlDbXNGBPukDR7tCQmRBJp7Hh9t7yo1VZJZFwNsEpVQq1veIGmh
6How5MaZ7VPI+TJI4xYoYbmaURJGtKtWBG1Tfj5dW068F+IV1q9c+G9GgTqa3uBregWYjHzsrGP7
nmiRdwhZVzICryYhRI8SWm27LNLteQJU28QvkwtFMFpImcy4csQEaVBtGbkGDaI8SKJ4cac7xmTB
3d6PQGlpI2IETdrxEFAvbnR2GA/MGMBpCaVyzeNa3DF7UwZCkqsSC1qzM0GnN12sf4rVs/8As36Q
/ve05PZI+VjxhvkODjS7z8423J2o/KJ6j+Uk1jb9hTHzxdku6IDLuOQ1O7MS3J0ZiY2syNRjaipq
1Wog0VFKjMZYmiPDagrMY8Z4TsHyExTIizwKCxy8ZohoNbehve65OCuD8nVI875LHkJh/IMzGfb9
1xs5OL24AdCQKlz8OGpPkGGtSfIpjUm85z0+bkuTMTRloyV5OQkqMF2SKKFXnW6ZTRn96yo6T5Jk
Xf5PGiZu+52VSRTyth/HTLWZg4uPWBP4JQs3ZHh7hJEuzY7SJhYqGjzG5bQI2kgj821bAu5vt2yY
W3iszFXLx83Ckw5jyKsKU0hqNytZ8khKOaErVi500L40ubkRDaMqeNPj2EzQbZgwBY40OltbcHOp
cjHhrI3bDhaX5JE0OPnbtmmHadzef+N4xDbbgNS2UUKeGOHHX5fu0jbs98rYOdIJZJJEYQSY5MOF
s0UcW94cGJHtm6T5Ezfc+U/fn7VH2YQHL2fCikkigaORikJrNmMCEBhHI6NFu7A/uOKaOfjWyc2W
di4kbGw5MlxtuItPNFDDJu+KlS7ySk43jKaPaJTPBtWHjzfi48FCSTHIJaRitSbjBjpLubtIMfcJ
ZMTbsSFY4oVSbBic4+X+HjruONOowoZ4ZNmxcfJ31I0wsF5ppzK0blQQ0NiJ3jMOSKiyajmilSWJ
FTbDu3k7uciBqmwWiOLkiUTBVBL4k2LmQ5II0lZo1G4uWn3JJY+6F0MJgJgSZ58l4SJcqUFFxpJG
BoL2wpkeZfPPCMX/ABxIw7AxupvSregthIkITcZscrum4zph/E8+DL2ndZIvBu0cZSB+9cqRoqj5
r90aqLkILrASY40Whz0lYhc2bwjAVZMqVyzwteKd4pIYPLjbapekjYtzQC5BU0UkNG4pkEoZvs5W
zYy0Uu15uTJteI0AGLAY/kODFhZ6nmKU9tRuDQZTXWm5DdcppawUSLFLVKEmjyYPDLyrAyZsZlm8
sf8AnicrIBjiTtyWqH7YoHwwmJhYGOu1Y+Pm7j44/Hnq0GW0ZcKrXuWr7alCtURYRs3bUqqRIVKx
qGLIDSQuDiwRxtZQZZO1Y1ORJyAeeFppsLbdwgnwVGNg/G3eHHycnbkycmTJkKIz9tMBXd5XkdUk
EWO0yN3Pvbq+TAB+F1oCj0NiMrEjvzWipjdRUzKWyMXHeDacbIyMqDZYI4psnKxzMZBKq3ijInjg
SLzhUZgkTRx5UzORU2QkQjD5VSXaosWNa+4jInjFZe4AEwy5DJEqBmCVk5BibLOOuHtXhSPb9iGX
kS7BiiDB3vBOPumWufmQ49hLMUNrCOYq2biwTQnpBkQeJshYGk3OWWsXGSV4YsrHcZSyPBHEo3jJ
/HxaUWGJAZ51KQxx52Z48XKyTFepseDIH4WZimLdI2cEEZG24s5MW54teTb5WZN1x6ypdwy8OHBz
Jn2sYOFNuX4rY20Y0eaHwcrFkg3XIhCzbfm1+3PCVy8uAQ5UE9HSwNT4EEzybbkwUc3PxFSXAmkX
KzIjDm489S4uPKThTxOM2eNosiCerepIiyq22Yj1+25URfI3SAx7zDaPMxpa6178E2RjwDJ3xYmb
5Iwp/keTdd+xZqSXZ52SHc4UXdJYim6YLkEMCNHkSNWz8JV3CVMnM27bxKyqFXTPDmOI9rMRY2px
WazLH3LBSku5dYY0Bcol6I5RdckGNsWYTQae/sOG19emlq969696PDNEk8WViSY0no9jGvG9Q4WT
MYNkvX7EjyJseagx9qypWXZJRL8h2ZjjI3bWNtm55BG0YkJOdgYtPus09SwwyFNtdj4XiXtYAQll
GOBUUagBLMkRvNj3p+6Op+548RgHgEaqrRESRLIDFLFWXIssUf2THPgjTIzpJSLkhWkbDi8UVbP/
ALfksuOu9xyN2NJIxuaJvrciosyaMQZAkUtV9DRFFAaMVGEGnhtTKRQZqINd1qZr6WFcquaRSaTE
cqr4sBl8eTJhYI8mwzS+GbGUz5GMjwQ4qmSaHxTOAPT3bC/LxZUaGRudCsbKkx5Ns+RQSrHLHKKs
eHnTyxRiXdsGOpPkEYqXfcx6lzciWi9GSjJRei9F6767677UrXOP2NGrwmlbG7crKU0qzSHt8Y7b
0iuKK5gIzciscYWbjZCvDLtWUMnE151apvFIm5Y6pLsGRg4+FJv0Cxnf55D+57nkLnhJFIocqTnS
q1LevtNPjr5HRo3W5G27hJjvFP3KrX9BpI0rJ3DGxjN8lwEhXed1eULv+VKfjuW7x/GtrjEGNBjx
AADQCrxqcrddtxx+9ZuSN1z903Ufsc8hGwZNsDAXEbtmhebcJvx90zp2TE3jEli3bcsOZF23Gx2Q
BaT7jjJ2RAGjTr3KElhfL73iWzq3ZIGwsYVlYj45JIHkBCyMC+RCpXNRXwdwzXqLH3Wc4+1SWhxs
fGIbBCs/dUcB7CtqypPCMjdYI0bK3KR5NqznOLiYqrGVA/US7K0c5gr8qCU1Pgwy1DuGOsMm44+V
R2+CWJtlx8XK3+OJMXBbImkEw7mQMDjshjyWWockVHlXCTBl8l6L1lZAgjeeGy5LThZUeio7otyy
4aTd4TT71jqAJpaknk7o3P42O+Z3Rw5hfI80uQuCPG2KpjijggqbKgemnxZaGRMy42K6KP8AGo50
imk5F5FRN13YSVk5bytC12x5sdcXZUh3Gt1+P4ObjnFlxUlCyRxZjo4f7TL3PG1IbUppafccZHx5
EmreoZIcvaxI82QhinYSJKGU1NmWVXVKRgFkyChKZc0lNyrkT+KCSjCdR206hknz5sOLY9xy58jA
yzlx/JtqmmApSK607srRGTs7vsll8cUjuZIInaHsBCwqtbzGqTC1bfjPNkEBVJQFYx2sQgmPcchg
kWXtWc7bdiyYuF8c2bNXJz9ykwlllMsnkQlkNK1qZiB3MQlwJrBJ5T4XdqWRxUcaLC00JKm9SOFD
s2Q6KEVg80ufsOY5kjyNtwcLB3tosPIhbF3KZJ5+gBDBgxATmy8jEjmAw2ixnMudizjM8M0eHi5s
krdysOdTThFy8uopplY2YuwjGZg5LwYGQsUW2iZJk3jBMe5yDNEM88TPJKmM5ihyF7VyJxBHlbnC
NvkOTG8MmdLQaWdoY5U2vFcuHcKMjNVRJkTZDR4yJU+V+PIrSSpteRI+RmGRssQtNDtWz4cOPuJ/
aM0b8MqDtiBw8cLUoJTJikakMlDGSU7hnq6X5lgBDC87fhQIiY0uPRyYexIE7JMFb588k0yqbmtn
x+xMCSA5GRtQCAIi6zQxToMCbFEef2BHV1lhhmDbY0bStMrNNuGKuHLm5WRBuWXCm3vtyPYETbfE
9T7f2GObcsVMbdcTJE2BizL4twxhHuSXVlddCLibbsKdG2eWOp0zbRbnNDWPu+JNXJg+DjOFhzcY
LuCCkkSRTwdaPHenVHEm1YklNts0RMm7wIm7xBo8/FloCtwz48OPIyJsqQ1c1cGuwEWK1FkSIcGc
vivhwSltrliUT7jjld4iLbzuEM0TEmsaeMSYGbiZC65f+t0IKy8+4GmYVk/c0jEvELl1aSRVqWbw
rGSyk9pkAkTY8gjjtwe3CaJ4rW1I5ywQzpLsMDE/H5KbYssVLtWbGPxsi8Gz5MgTYo6XaMRaXAxE
rxRABEFdNPjxgdJcHKirAmnwsq0QT5FnPfFc4ck2dlzBrmu0mu0CneoHdJY3agVkrwRd7YhpYZo0
culRSqzXWVsiCHvy/wAZY1sHxIUlRTBDRzsOsjMxSJM1K3aPD7AL0sZNR4crnG29Yqtps/8As3//
AN6JiGde1uKGQoyShlDXoE1euunuAKZQaaIGjEBSoKyouw1YVau2lXmbrWOxeMq3e57mxXkjbZZJ
XERkZPMA8kH/ACMxU7WNz6XI18mwArAkG2gYio83KiEW67jGR8h3RTtXyNpnffNrSpPk2GtS/Kpz
X7zmypJO7nvouaLUXIovyLGi1d9Fqua5mgrGhEzUmOajXtGRFYAAUxS7ZlxFjM7IirTSMKZJXpo8
laxMo482dLBkSfHswwzz7jhQU++pTb3lGpN0zpKly5TTSxvSHtdT2N3Oa7chgyIi5GU4Kyd4L0gJ
oO60pDUq0QActR2RG9cwdonLxRXAGhIWps/Cgp9/w+9/kWW4V9+zJDsO4zUnx3DirHwMXHihgggG
g0mnhgE/yHbIqfe82djifIMx8L47iibedlkQLm52XjrYC9BjXMN3XEzXeN4pHlMXZPjySYmyxypi
Z0nhwsSPvljjDUUUKVtRsBLOshlDIrJLEGbc5pe+GEZ+biRwS5fMvmBvx5JHTHjRkSMNtsczZTEF
v89Ke+NYkAmlx4Um33DgcZu85csewTTLJsq4wimW0kncAFVRhxSJ45IjJ2kx4mRMr4IjTstHQNTY
UE1JuOMkRz4M5pMHFkX9px8PJ+SLFFFhZMrqk8cheMNXY8dRZNR5JqLJufICMmNnpNnBb8eCNMvE
WaQWNFiq5e35ePH5p4w7SuceVYYyshECkyO5JkLzMIchRJjAQw4+IyvjY8CssfmSaFVSaNYOxnMc
NqSI3yMjExBvO9RyVNkPK0aFmgxglbvHPHF8HzZsUS5Ubp8gyGikxvJ4ZY1arulQygGOQEI9R3Yd
88OQ5wc2XLnAjjypCmMSS6o7syoTNC5YMVjUsrP2AZncsUysozIC3ejV3C/lJOTdmW4BtUscS5Kb
ntmI+w5sJh3idY9vW9LQ5kLcDvSu7uWaMSwzCRZMOQPhgki9btF5osbZppBiY0GLC7WHeFKnlM4C
gd0+fL2CHec4z588sUXx+USbV8hdPwZG7B+oN3u8qSCPG73DKymKcl+wuMxJEdY2YwYTPUWOVBhB
oWjXJmLGCERLNIQc05UEsOe0G37Vk42bkrEAN9ijTMAogEBFAIFE0FFyYyyKrVJF48GBZspWLZOJ
BDP5IolhjnyAgycosyxm4UUD2Hc0ycKPDzWfDx8ELn40CQwb2ghyYnlnoq0WQcuTvyO5Z9p7BNlM
ZMp5HkXCWB5NzeA5eBHIWd2aNnSBcvP5rDJMVQAPIq1uKz+LG/IXbdngz5ZcLZ448qXFieHH33Kx
KlTN3fIjwJcVItuIKqFUgUvM5bI0i9j40uDlIU26Z6n2uWOPFULFOyzRx5DIHSKdymXj1m7lGmMa
TkIY2mkxshIhtke1x5GRnY6xcUkccqvgSRV+4NCVZXFT7ZiTDL+OZMUy5mJNUmAzASS47ruJQRyx
TK2LGXysKN1jx9wxhBvSB/8Aj5KPtiK35G4Y5gzsafguamwcSZpdiiWRod124x77GrRZONNTosiv
t0Ny2ZBSZsDEcwdDr724+lWU1Jt2O9NtfjrccWVnMNGJjTKwPSu6gQaYC2FuMuG2JkR5EVZJyJpZ
tvgxqyWDTHlpBkT4zbRly5WN7zzRwxjeJMvJVQ6zRFG72SnyTUuQ3lyFAeKVFoSCSifGkMZlfkob
nUTXryHGzIpVmj4LcJ4L8+XCa61z0NWo6Xrrpe1XJo3o8tbUNBzpXkjbD+QZEYmzcJ3wswy428LJ
+QVrsrx0wCB2eQrEaERqBLqI7UqXAjDKqWMw7aO4YcTJuWLFJuDHJmWBiZ4/G8UssdGWR6Cua8bV
4xdsKWaodpAqPEhQDtFe1qIrZ/8AZv8A/wC6DYyi68cEnISUGJ0vRNXq9XvTLRWh1dFkWWBkbtoC
rUBY2FAmlZrMB2xFb7RkumRgktAwUnLkET5GQ7t6mdirlY+ZiPjz9rCmFA6G9AE0htXk5d5rvNQS
XjJ5F+Rai1FjVjQuQFrxE14SaXHpcakxhUeMKXGWs3KjhfG3PFlGTHAizSd5w4GJLdoQuwLqlDmG
AqREIacIzOTUDgkyAAEtU0pQJjgnsUDIjNO3kTHkDLewk50+MXZMRFU41fjChHOtKlfatK92yWBS
JDftrZ5kikfe8THDfJFlcZHyPKiTY87JaD41BGRs22ApHHEmtqtU+VjYwyfkm2w1Jve6TmTB+RZd
J8Zxbw7ZgQtUEYc7jOcPF3b5hmtEgCpRrHchRIhaUhi7FMvCw5dw3M7FjGLc85NtPm/KG8uF27Z4
g24wt46ZkcT5EcIkkyJlzZ8LHxsvfBIZszIliXG3R4v2jImjOzxyBYMRA0kaBocWdX2iItHt2LFQ
lAXygzSZWNAw3uEmOXfM+GH48JEh27BgLqzFASL2oBHr/ET442YdiBlR1yYpMetmymhyJkXJwoco
xFZY3WN45K7WoPTYUZkfdceKNchM+SfasOdX2eDDnk3XGM0Ls0SyRyGSINX+SIw5QYRzmo5lNNPI
9K9g8LVLjEFvyYjnZ2VMIzHIFhY14ljUyM7QRDHjLl2mx1mEcYjWKDAlpVftMWU7RiJoV2zCRVEj
sg5qLVlZ0WGm4bixd5Gcx3keCFEosqVIEmTF3KLBxNn+Sblk7i214Uh3nHwttxVkRnZQwsUMUvNW
BpJCAHsGknD4+NM+MMfJJDWRFszKpoRQqz5ZhrwnKSTBkRo8vIgkeNXMsGOBiRhFvV6klvJG9161
nR90UexGR9tihbI7U7d+x48fdFpaHT7gVNj3kK8McwixI8ZboKu7VJdTbvV27aJuUVmbuAEzXrHF
6/Ix45sP9pmfBEGXuSRxIm6KY85wCJ5lgjiLKDMhqHxBpYTRMXjjmgVMlxJJjY/lbtUFVtRIAyZw
oxoSollEaruS40yZWPJBi7fj7rlLtWCEyd6z8GeaeXIkq9X5T5OPjK++IWxciPKCoFRFUNJktAA2
N3ZO3pC6okYyMoKJsh5WjhAoLS+SV485sfNyphk1t+x5uTmZuzZuGMXetwx4pciXIkmm7gsbRI2b
IqWtWJkfjzZuF3zHHcHFhUxY+EoLOqLPmqqy5U2Q0ECpRKoJ8oBRJJlZGYmSkmFs+4yY3x+eInep
I48XIz9ykSDBihpsrFRg69oJq4s7WHewMyOAWYEd3j7XVEkYVNIErxNkVLj+GOGMyR3sN3yRLlAA
k9NlxGVU2t89G2IwS/kbhhrDn4k/oMiOG20LINwlgaKeGdalggmB2x4XfMniCRxSrJEkTR5Gaghz
8eUy4kMgmhftjwY1dNwysU4+RDkLPg4k9fh5uPS7l4zFLFMtHTnUmNBKh2CNC8u/4zY++Y8lRZME
9SRRy0MAxN+VlwmLLxp6sRVhwHTnwHS+m9Ixy5JZoyckuXfGoReRXxmjIjc19ylwQdtyBHOHzcat
kzsSPNTdMZSk2LkyPtiMZNsmUtjTK2LBOiLuOdGd2zcnJTbgfNAQRJGrrPjlamQipRyBE0csHjqF
e6uRCi1MeYqWURvNkiU7JIHxhVtfbS2p4rVavb3OntodCOWnXTpRF/QFXIrHzpYGyXxNxjyMR4X7
aC1JH3UIRQioRVhr9nj5dtnV0WjNGKzQ84GEVr8MtX4DGk29Qc3aJZYFGLGBJekxMmSottApMaFK
6a2q2uz/AOzf/wD3aU9yccTWIYWQm1+Q0vQOg50RVqWrKwmw1NMpQgcAvQViArVtp7ZtqyFGP+ci
Nl5hla9/V9vkG2eSMMkkPI0Vtr0oNQNXq9YxJoROwMTUIjXiNCI0IzXiNLHSxiggoRrSRdxi27Kc
LtWQBLi5UQ3CEuQUuykFzZoyiQ3Fd1P39qrK1eOMUEWpFHbCe5Y2KOp7qaWwLsSGBFSKGVO6OVG8
Uoa4a5KrRbQ2ppLDyuT5ewflXLZKgtMwjjExrA22KeXD2bFjoQwihxTZWNAcr5Jt0Dy7zu2RLLt3
yDMkHxnCZ4tuwITc8MDFXy3w/BvW57W6wP3xXr2SRmdfuPNRmsRN8b3WDDz2yMbx7phpu2YkYgj3
4kQ/HmjXJydxw8Uzb3GKmyd1yWbbd3zpV+OxwPjYuM6FvEoyMjyGeO3ewX8iYiRY41ieOR3yp1Vt
2jUPvCuAu6TPDteOsmNjY8LKTccqDqalzMWFvyldfIAIZEKgdpUAUY2oqbF2dWiTGnhDjKnxHDTs
yFc3LFR7s3akolRWIrkwjxo8fJm3fChHybdTK6xMVw8s4kuPumFlkl0UFJA0BpZpIjDkgiPItUWQ
DSOCcmHyVFjsHZ5ex8OK6RGhiLdJIYA3fKwVUDOSchgq4/jjiEIZPxsRpcWJccQxi4pFrcc/wjO3
BnZmLVj4vnK4sQAjVXaIUmOfyN02HGz8XZ9v27bcpfk2xkZe24+9rN8ax8GpQIJWRWXtKGOS1I9R
kEf5AJ8vyLCJI6CG4taVkFLArlVV48eXGcWjYyYsTgxPdowyJEVrtAGUZY48cs0XjYBBZJF7kzZc
tY/j6ZSVhPNNj/KNrEDA81NC9dv3SW7UIYKQKlksEUGiTTAsVcqHCstiESTtZWcrMbC5jx32pszG
27azgDZ9lkyMvcc2bBgkyZ552Dmpc7GMkLkGWaKzdwozzKIpLoWDNjwWHaqqqiuQrImCrBGZXYhV
jmxjPkbTt+Q7bZk5+Ttnx1tseTfMGITTZGXmZeZDiRfv8jsmRvbRPvOehbKmlmjETLsimMLaVFUK
qy94miDSAhVyssAPI85SIKLACWUiPY9wjYbhj+Wtm2jcZY9gjkwsnc5IYcJb0DXazB/8kGZCxc0W
C1GJ3LvNMkSJA82Vc5WeARFLOYxZXlRK3GV8Zo4J8lokyIZMDPxJExM6Dxbm6zbhBimWNYkWmYAZ
r40pjMYRmvXdankUVNkqC2bA5OKiZE7pkvk5cojgitXb3ZC5GGsSoJaIrcsn8fGJJKC1Y0RnmT8v
FHxhTJNuNhDc1Lg4s7LjZ0DxbmhZJY5RwHQgES7YqsuZl4xhminXSbbMSVjHueNQlwpHkEoWFGoZ
kkRaLDyqf8vGV8TAyGEm6YYx8/FyKZQwl22A0X3DFSLcsWQixGltJsLFmqbZZUKZOfC0W8x9sGTF
PUuHjy0MbLhpc9oxHPDMNT6O54pmj/HjnEkDRuxY1zKB3SvymFCaNg4jYeBu7D3p4Is3du+bMERl
6Um45KKu7yCpd2R1wt1yMXJytylaVM7FeliLiDMeIrn47qvjmE+ErVnYywgKRUmYMiiiq3b2ljQp
mCq33tGkndt0jY2byPFzrmdeeg168FuR0tXKhbg9zpz0Jo1arV78PUgkUZWYNEhowNQhavAaEBrw
CoisVHLazTOa7mNXq5ruruFdxq5NSLOVTbIVKwxpw20vV65Ua2f/AGb/AP8AvVG1mkFn4lNjG1yt
gt6HD7crFTQ5EdF51Pjh0MZuIjXjtXZakQk+GUGDE749s2qSaVeyCJ5b8Htbi9tBo6B1yvjLvLl4
suJPVqL110UE1HgyMI9rvWDszuybTcy7QEDYuKA2CghXZ8mVW2LNFHZ88UNp3Co9lzyYthkqLZ8S
OhNt2Mcjc8SAneEWVt0y5WmyJpm5Cd/GTkQGseYML3rpRcivIpNxYGmapD2STLYwSBgCoBtVtCRW
UtOO+PGl70tzq/MkVJJanezeZFPdLZIwKVQrecA400jnZ+0tHbtGrZECNJ8i25JV3ndsmc7dv2TO
vxnCM8W3YEL8F+GIgNJD3x7pteyxVj5sDZF7110Q/cXvWRjpJG7LEuBlhcnN3OcZP7pO7bqcn8jb
NsGbD+zYleGK/a1SJdjDKyOTgZCypmhdrKyR9iGaLzSvhPiy5O5xKzbnPO7424ZJTbcXHoPECJI7
RZTrLdJG/MlLR5GUGjXkwxhKqm3KyxlVVzdTV2sr1ypo0cTYglcjIgcBHB2+ZzkYeNBBhZvaiOj0
Cyl5oo03De4qih/HhGPJkZOXhT4j18UGRPkbucbByoskOhUPRiZDHklTFOKhyQBHkrZG72mSNRDt
5kx22sdrxZeIYcqOWpcgRlfyZj+DkvS7c0Zh2ubuTGjCSRYru6XqINdFrIyY8aHcdzbJkJJrHxWc
xoqKByBK1j4+Rkvtu0ywzrKoresCHcIsX4wJs/DeBI90x3zcU/EJcWfMgOJkFVYEFDFKaRq880Zk
mcqrc791dhDSdimCBFZREBHjQxCwFd1ZEvjUbhIWWZmX8gClmhapiqUpBoaSSjEyI9+nkyNqz45Y
PkmNlZOCppDQalsaYXpFIPOrClAFWBopztRUU4/xrGgLGwl+5spvtyMnOjmnfIGH8ZM67ZvWXCuG
6pbcpWWJ/tqKUdnKRcf75GkUjtXtjYAwtJKGdAykESyBVUHJkFkEEX5c+X8eyDkZJyEr40MrFyro
Ez54582eeLHizc+TPO17ZEpBBrfceKGWOfGR9rxBlTQwxxoRYSNMglmVEjyP8WVnWoK85VQAzqgy
clIj3YeTt0mNLFkY+3bj+Ptix/ifL8mHHgnkfuV70hqM0jEDtSsnGLRwYYo9yo2XC+HPl2D5MuQ0
WOqUSqiEflPueO0eUdqy8ifEiEdPH5hkYOPZpmdUkyEfCkMUUc4aLdH8lYiySUpYLk7li41NvkMo
hyPOzCNyIccxkJJi48CnGWCOJm7adIJzkMYTGwaMkCt5yjNOBcnkNjxAF8kyzRYzu0mVnYrQZEeQ
vPR0SRX23sBy8/EGPn4mQvEyqwn2uBycrMwhDlY840lggmDbXJDUzMDDky9v/FkYtn44OThZNFci
JXGDmV+PuOHUW7QMysriaCGcNgTQ0c+eAx5OPJr7VJFHKs21/ZJhwpUa7jCF3aYNHmYk6S4GO47N
wx1G4RAI8cg05aGractdyw5cd0WDME+3kFcVlqbH7FItqrshgmi7oxH342fBkYuRC8EvBAfNHIht
guVXFMWQh2OFqg+OSynL+ObzEJtrzVcQLiVuMYE0Z7omP23NdKlkMjJH3HFspnQiWIkx+geDrp00
NGuVXtXK3twnroeWvTh5aW4L0TXOudc6vV6vpfQaDivwdeHqTWz/AOzf/wD3qBsZvuTjg5szAKgq
9C9AV2mragEgwMR2stRoSZomEbBQU8RoQwtRxsVS4gjGDjZuUYtqIVQsaXPpW9C1fKcRibkHuFjS
Ruxj2+eY7f8AF0C+HBgp8zAaocrIZ5MbPlC7W949rxVkWCBWLBQ+biJUm9YSh/ksHad13CRpsndT
I0Mqx/4IEE+3yHEOaYZodw8WbkrEEx53VwwfEyBebFIqGc0ztdiQA0jFJCKBuDzqVAwV+5IG7XBF
u9rd5NXFMFJmTuWFwjQzLFJ+XBX5kZLZiXbK7y0k9KAoYBquDS4rvSYyCvxVNJBcbbE0NLkBVl3n
HipNx3ecjbt7mMXxnCVoMDCxm47VarcEZAf9dbxtKZTZWNteCcHMV0qSWOOpMyJK/NZkAy8mLbfj
OfntkfFV24Y74r7mQhbeJO/dNgjIwmiIBsS7rEH3P74JJxU6SCDAhxCXyII1k3fE7Gydxyym05GR
Q2+GAR4qRqkjyvOY2dcbFkWTCC1PAyLkSuI4d3hxmh3/AAXfD8bgrDfyIBLlwwnCz8bOoLcjtBDL
VlOlqtRAImwA1P8AlwFMjHAlzsQ1Pl+MJuO6zsdrzZqxNvaBsvExpyYMfFA2WVWf48s0+B8VwkyM
z4rteXUOwQ40GS6YWWJBTRq47HjqPKsUyajzaabyxtizoyuLLY1m4MDGDGhSVUjjrymgXNDvrsvX
joJSIbbhusGGu47rPmyM1qxcRmoJ2gdIo0jEcb5EuBjLhwM7GpmkV2Wa2Xt4zEz9u3SXcvjuJ+Dh
5j9mPNtW7ytl4b4DlVdSGQxSmgQyFpCsfaY0jCs0F08xkMaMI2iZoEylJBDBmCVkB3VY442MyhpY
i9KrLI5DM3clA30zowyd+04j/G87HnnldQmXb8tDQ50lxUjqtLOzUJBQINXrusQb0erydrmQEAg0
xFovulychYpsLeEysg5CDPTtVd9WNNxLp2yQRzS7osKSKCaF6u6lAbxx3WEF3CiON8dHp3CKxbId
VWNUSXMklwNyTMys3IikwfPI0UjxFnkagVjTPzWycja9tjhAQASP4Ipp55GKgNtmGuNjyFgELKDN
308iY4yc5naOAlrCjKKwMzGlzd3wsPMfZsLc8xsbClhzu21TuY8n5FA80ONIcjARuSNSNStTNyjZ
I42mtT5IBmzwD4ZJSqKoknSMRY8U0G0RRLkQ7C+bk4uyyYmLNhwsp71fHXDZszwykwlBNmO6PkzQ
YOL5sild8cbnuE+Lj97BsZ4sd4BjZUEuP4hAZjHKwmifKihWNy9NKrRCLuE/kLqQtZ2UMfHZi7It
hjQmeeJExsfZRHmwZOFFjSHnWRtsMjNPlYghy8ec65G34uQRi52NR3ExMk0UnFc1kbdDM3k3LFbH
zoMg6e0+2Y0rSYudEseU8DPlQSqmO98hyRCxFSSRTUNrkhP7hmY4x83FyRU2DBMRFnwkZ6iRJI34
DzEuBG1TRzKpxNsmZMXcsehu08NDcMHJD4CNTNnQqu4QBlZWHPQ8LAMNw26TClRkmhMCW3D7QevB
hS44RmHe6/nQTwtC+qsyMxEqYylXwpQkmMXQOis1lepIwRlwFlRWydvxJAaIuAbGaYkxp3FIwoSM
98WGJJele59AnhvqefF0o8qvVxXWjpyt04Lczwc+A1yrpQ4Pf2vpfgvxGjw30NbP/s3/AP8Ad0jP
cnHAbPzkdrKEQ120EoIKdbURzRbn7VoObkBqhW7yYYlgyNrkVjt0oGLh5HmO3TStsrx4j5Txl/oL
USoqWeKGn3jEjc7jPIk8G9Z+PuGBNhS0g54+PB+JjYOcKj2h3kG3QBRHEiPlYsIbecAU++r4n3nc
5EP7vkGbGmEyxbSixDHhWdsw1jY24PINnntHsO3LHDt+LCSiE1vmQIMOQs745JMyqzcwcPMNTwA1
FMDV70RyYO1Ru0VK6uG6KgE2RAY2R+4IDYkGuQJNM4tObOIpXVj2lGYskSyRlLAlRRau+9IrGlRj
XmgjCZVzFKpOLjl6gw6igij9O3oYzIiS7hhKm7vhS5rWxoUEs9Nghq/GgCLEqxiwGxyRNt+6pE+F
+y+HOQAyZriTcNsQxYrTFhNOIQJEyMbEixjJJuuHBX7t5qih3DMlg2lEMmHDi4+NkJkwtGy00TlV
gKxjHKyiaOYnHNN50qfMxkTJ3HGkkZWJZFVdoz5sPLaRVO4bnhYS7nucm4jZs2Tb5sY/ayK1KDa5
FGWJabPwko7pjX/Mynr/AO1ko4U8lZkWxYywLuOSyfHsl2OIMdyAQAL5GVJI2PjRxJapYI5V20Pg
ZjbhgWVPyDunxqKV8zbMeXH/ACFhlWRHDxK1f5IqiygajyLUuVUUwLfkc2cFZY1mgw5jNDa0kacu
zmFpUuZpY4V3DfXjTIyZJnVJHaHBjiRCwHjnZo4BEuHgy58kGNBjqxoq1swJFBBvkokxnhlXctki
zot7/esbJ+Mxbm5ZQRv8efJnNiz40R7XRg0ZilqMqaR7Lj5CW8qWEEAcWNBgGzttWV8eKWKHvU1N
EJFmjmjryKiyxrIOyYVjGQ48hUtyAVw1SqGT9rGWdughxkiVVi+W48UWQhpbUp5ntau0EIvIoKsw
pi1F3FBWaivdT8kjUinblD9sUUuKI8HEwoV2XEx9wy5Ym8MzTSykA1kR98SoodDGHZIO+RGBSJ6g
AGNH2qL3pmAE0jSPFGI0kdpTjblkwZWRusMDfHcbDymycWKWKSZ2JNb5kuqbRheeRQLCwG757ySg
sDtW0PmMMpY5JciYNDGxqbIihqSeXIaGBYwbKMnLVF2mTDfEycaPBl2xst02aJVwt6aKLCXcCVup
GVGJYdrcpkp9rRtSGvJYTTL2PPU2WqUfPOYoUjrkKkyE79yhx0ggigbbfj/x/DhxdxxY8FpN2xgs
kMcskucIjCJIH/IMjY0IasyGF08bLTPjY6tlozwxR5EccQfIyNixkC4+btskOXBJDNns5bImYVHk
TKMaOWuxIorAA1vWT5JlFy3IbFiWCT4yNtu4RImTO076XqfboJWSXPxTBkxZA1ZVan22IuH3GGSH
Px5muCOGbBxp6WLOw6x8+GYggjlpJFFKJtqUGRcrHaDdTYLg5YaHMhUSxpIMshZtrwshb7rgVjbv
izAEMCFamwIwytuETRZ0ErAg8EuNBNRx8qJhnJ3SYWNkh9tyIBFlZkAGZjZFSYC9zy5mO0edjOQQ
RwyIsiTYjh8mCeCCTOmdSePHnaGTMUEcGNL43X/jzLErNhZTRHkadA9OpIniqMiHMkX8bMY3GQ9m
Vb1DGBUcZdsbDHae0A8IFuC+hq1Wo6jW3AeE89bUNDw216aHhNex1tXPQa+1e2l6vV9LjgvWz/7N
/wD/AHtI2s0i9rcSfqx0CwpCzkRWAitQipUNPCTSYgA8QBZOfaKXonXbHnMbxxufFGK7UpQFFz6t
qJAqSeGMS7rjRN+5PK0T7vOH26eVPwcKFfyttxXXcxMVm3zJG4bJPlxSYGTHLhbNNLUE2Ht2KN9x
5Ky83cI2Mu4sxgSeaLwRtHk4CLHJkOj4u/ShdmeZ4tkiheLCxolTFxozx77uMWFBkSeV8fkAfuda
5isLIdzPjEVFKQb10CkyEo8ZTIU0xF5pDIuIlw78meu69MwsZPukhD1iwd8X4sZf8WFWUCzLzlhB
BQgxRFVMxpojLS4wUCKUriYrNJtuKUQC30BngAl3/bomk+RvIXn37KeDZ90th7DgtUW340LbztML
1ly4eIIpBKlFbY8cIcC8YafJmGzzZUrr9ghBlmiRlUrapdz24LDnyoy4W8StDs2NFLFj48VR5uLL
TtFbLzEaFZ54Xg3qAKc/BZUkgnVolYPhI6BVFNjKy7lNFK+Ztz42NhbZPuAn2WWKXGwMXDeXNzJJ
YtnnywitDmRRhjseXuORAsO5ufwcg0NrxzS7ZgrSY+MtfatNPCgyfkGNGfFvW51ibFhY9CwGVBO7
+KXIfdQwZ48nMaKNIolaXHMeRHLTEKrTsw8KZUe25KwwTZkEohw0mi3P4nhd267XCkcWTXIh4Vau
546jyAQk1JLSzVHN2Vhy9jRSgyDcGDQyLKnmhDZO4xxVue8A1LM0jRRNIceFIQ0hagDeKSVKwtsm
ynhgCo5x4qXO2oHP3TbfxsWVMaVYklqLIfbc7b92myBuOHHuEe5bnuO1x7J8nzd1k8UYX5VLJjzY
q5Bi+2RSDGYprGNr0Yw1OggqJopoMYlVKg07i7MVDFSTYUWLVLjwy1HixRBIwRkZEUcjR3SO5qNA
oNZayxvtWLm/k7RlTzpv20JmQKea9e6lBorXeVJmNCVq8gNMwsGsilaa1HkJTcZDdkOTs809YmJ+
HifGdpyMKty3ZcMEA12gVIf8f4LNX4LqVV4j4nc42II1CgUZFvftWeY3giEazz2G3brFDOThLLt+
xQ5+bj7ZDgLm/I5ZIbdwIKjM2kZDYWJJixKrGt5yTDC5eRtu2oSg5bAZAkmjxYoII8vPC0EkyCiK
itIqUcuEz7nhYmRjwTTQtEI8h4MSDHiz55sDNzstMlY4op5cUSxCt1i/D3GdQVQ0rWDSVLOFEuY8
rRQAECiwWkkSebJjgGYdvjz22rZYMbGlz22ltx3KTMXc8x0b8jk8kjDDh8lSYEMqsJ8dcjJiUS5M
SJYtHjRReXsyQ6QN45JXwhEVmqVEeQK3dFgMQNuxiIMNIyvagJvRNZuQsEDuXdBYY8Rnnxo0x4sf
ZYciKOBoCdLa2uJsCJ182ZhDGzIMlbcEmJjSBcHLxqTcmjqKaKYW4Z8SCdPBl4aw7jC9AgjXI2/G
nqXasqER7nk4xjzMPKV8BCZI8iNl3BlMuJgZy/gZ2EYd3IqKeGYGnx4JAMGaClzZohDPFMDVtHRH
B29UcZeVAY8nHnBwcVqk2+QMMvOxU/KxMh/xWDDJyomjyoJDoaYAh0DM8Ic7ysa5voRF2XKjEUnB
jsMiCCWy42SkieOeMw5SS0y3EsdZ8R7dzjLxwZS9sjh3gjpVJOHjADnfS/CeE111Nchpfh6Ua6V1
06Uete/BbW3HbgvzvwjU1ar6DW1Xq1X050atps/+zf8A/wB7WT7k4hW2qZo1hsqY9fj8kgJL44Wl
iN5lpowobtrsuVQ1tbKs0rJj4g5D1CQKeeGMTbtiwkbq0rLLvU7ft2XIke0YcIM22QN+8QSsc3dX
kOJnyxRbXiw0W2kPJu0EDx7jPM4feJ3sq1HBtyRS5k8+LI0xx0gy3eTa8l3j2XHZxtWCrJFHGtgP
UmyMeAbvucWZLlFfLDyQdfZlqElXXOxgcjGBEcxU9/LuFy1zMoI7nUiXsjxmXIhmlS5ktTT8hIxo
NzWW1RTlHBuH/TG1wwuHH2gfcSHpYnoKVCODSrIx2t0XKUADjHAzKqruOE7T74EXI3HNlYYe+5eM
nxuCSTH2XBgCY8EZ0jlaOrl4poBLHu+0YiVjZOO09NI0YWYkps+RJFmSjbVgnxciPNm8eFt7LFkQ
bjnZU0W25zzQ7PhxyCOFT1EUayS9geOWGSOVI3JwnlRJomdMnbzEe21LI8Rwtw85lyoIBk747oMf
dc8YODhY6TbXjR1HkkKmJHG0qSGDboIDHEIozHhY6bnvGOsG5/G5xFnUK3HOGDi5e8blkviCYQ5E
iuTMXrFyI8WXafkpyMmraZOXFjKmLJlSNGAHV4GV0cNAhrxt2xxMJfsjHKpMQ32ndIkgy82B4937
ciDatm/zZ2K+DLFKkgK08AJEkkdRzhqWWvJekksVlIoPUefOka5IAy9xJp3ZzjwGUxxBRKLpjRu9
SRolbdhpGscKpW5bvkySL8eyp6bYDEZPDimPblmxcWXOWfGxccwvj7IZP+XC+agy6M8uzpt3zc5D
Pt+PkVvsePtWNCXlb7ZAymNopTSPcFiVSIENEUMWU6UCrqejqTX3WR2NIxJMiRjMz5Su1Hyzd33O
gSQaZykLk7tmeb49ntCkyvJDmYkuHkqeYANLpOVVI2WYnsWr3qZ54jBJ5YhdaisxlHaR902T5JXw
n3Fs7cFyZagusPyWaJ3BtTsKJfJawjBcmslzGcexCm9GjYVkT9gx4SKmmEY22bb3ZNl29J8X47Pu
OXt23/tC5O74MOPfvbtGqrarAVmbXFltDteNjpHAJDDF4xPkRpWTmljFiknpUmQFG0PhTybphQx5
GDlzyvsm1xzz5W3YrxtvkiQLnPPkS4smRNBtaM2Y8cmVDHMp37FEmLgu0+Cj0ZAKnzUShDNkGOFI
wFpmVBF25FbY2LHnRwQZGauJJCcbOyoVztxEk+5ZUzP5HljVLxxYRZduxFWKRoAXlBMsTeQP5HbM
LxHCljgw8oQsk0TmRvyXUSYCeTzyJhxmOPJaF4Y5HCoQLUxo1veV5JVFyxsNkxDSYi5SbZ+Pj4uV
Iss/GRWTtsMzCfPw6gzMbIrpwOiyLLtkRHl3HDWDc8SYddBrNBFMGxcvFqLc47ghl1eKKQS7RGSv
7jiNBu8bjsx8hZsCdAmfkQD/AIWck21TIBuOZiGHcMSY8rEBhLgwOOzOxlj3COkdZFOntLgwuf8A
7DGqHPglpgrLLg4kqnapVBys+ExzYGUY0kWhmSRVFkQTCrUeVZ0hkyvQxEYtkSNLLwI7RvJZl2yU
R5Iw2AaNHLRSY65UuUxO2ySRjGLYkuM0Rx40YKpBwsUkiwGnOh6Xvre/GeuvvRN66110Pp8uEV00
5a3o21Gl+G/CNfetnH+Tf/8A3tU+5enEK2FrzCA3EBoQ2rxWDzY6NJIt0gNNjKT4sRaPjNdjGtmx
mM24T+ab0ZJI4lk3TEjP7iJWTJ3aZ2wMyZItkxYwzbZEx3HGu247h5XfIKpjYlS/jLCu5v45cjPd
HinQZJwHlbIwA8cuQuRJDuDS423zl12nxmDb8aBRhYgYcq5+rzqSeCIZPyDEhrJ+QZkomynYySt3
XJMfKEV3UTQjkKsOcGXIlXglBiK0Vlq0opvIR4pGZIuwNNJCDI1d7GrtQY0ZCK8jV5mrFf8A4wz0
7Y85FMWXBJTLcBSSMdDQgRKsluwkopWtoggOMOnEbANNCifvWF35G8zmpX3nJXH2vIkih2iISnBw
y/oRSMpa5O/7bJPJPt8GBkRuHRxcbNPGuRLNAifJNw27KbCGdHDmRvEmyRK2bAjKI4i9OjIfap1a
8eTG5y8KLIEO0xqDgwgDHZRkzLjCexLSrQdy0W2ZU7Y2HgYjT7ljLCTn5QkhjZsctaDb0SoA/b3e
Kml5pMoPyOIkRZLQTtlxRRQ77tsz/JspZji4xmli+PdyZnxfLqPA3HFrJxfDkHyY82PIZMf3mzWM
mNhrEzEACRWEoBWXD5lpUryvZ86OJUy8J44poTQcWsk4aGfGJhWSoISq/I9ozZhhbRM+CJXjcgGi
oNPBzEzxmPIVgslLJQkozADJzu6rs1RYEkwhiEae+NjyykNJK+JtawoMORc14cWGSKbKhl/E32UT
pveNSnJtj73PFjY2HlTOkUgeHGj8cmBjE5bKyygZeNixY+1z43yvZ5FlxoN1GXtmLt8LZEcs6lZA
6NG0UxFRvcRvYMoelsHBaBxMASFIcdrzNMginMrZCLIs0SlokiijkaSNnbvJMoMblhMvfH+Vj40W
07quRkJMkifLLtnJQNA8+lbjMLQskeOAWNj35UJ8McrRtFmxkwuAJpO6oSAJM1sU7ZuMmSnxjJXI
3EILbpJEmfJlold0mTSgxQAMwji7lzJe98dgYR1vWROEEERYyyiMY2GmSuPsj488G27vuE+xxzYk
G5ZEMGGQysW7QHDBACAAKHUiuyYOInpUSOsnOVVeWXJaKBYwzKgly4w02HhSbcndt8mNfIg23ZcW
CDK7tulzN5lkg7XRo2cRS5CQpFkSkTZH3RsrxzxCaDbWMeVLKuOxefIqDHjSlFiBTsEURmaPZvxl
i2bY8eebI2nDkiZj25DMIopESi7SzQRt3pt0WPFhRNM0mQlTY4CkSZQMMxdVa0ODHiTJnR3yIWVk
hFxmwiGRMvJI2zIVoZmkeTHSVImnjkwc5pWl7Vq9Zk4ggkcyOgsII2mmxcYpF+2Zca4kZQH05dtg
kaOXPxjBkxZAtodZ8PHnBwszGKbm8ZinhmHBPjw5Ctg5OMY9zCsrK4twTbfiTB9szccx7vNAY8rE
ylydsjlqNtwgePKxMqpdphJDZ+LWPukctJIkgqSJJVfA7R58zHEWbjSjrrNjQzKcXKx6TPK0jo6k
KwlxYJUO2yEy5Gcjxz4EwxzL3JmoZMqZRitjSShoZEq3EOsKlISbnh26VBIytjS7Rk/kQh2uIwTk
YeLkEwqoy4kiO5iBajjQJjQeVlUIKP0XtQo8R0tbX3tRNX1Iq1W9Ea+3LQcN9fbgOhrnwEabP/s3
/wD97VDZpBZuEV8eW+XdBXcKANSzsSSLnyGiua1DFzCY8QAqkYEWOHZphCvBapJoo6O54IJ3FnEe
TuOQhw8qVE2yPsaLCR5NxxFafffE/wC8PIkUWbkp+RLIvlQRxYecYpZQ0Ef7YsRZmj8W9zR/teTO
g+PYoEe2RKTjwMqRxxi/D19SWaKFcn5BBGcne82WpMhnLS00lM16Y816DlDrEzdjLRFY+XJAYM7Y
ZxHgfEp6fZdjsuwQvX8byab4znk/xzc6/je8kfxre6PxrfqPxrfxR+N7+Kb4/vt5Pj+9BBaPDq2m
PlvGY+2QBx5VHKNWcAWpDc7Kw8eryRxh8zFjiffoYnl3TcJxJib1PUGxRWh2vHjcxRFvVidiMuEz
w7lsuZkRxSDFEm5xgY0G4zJJtVo9ziOJizb3NHHl5OTkrtee2JkQzKFUt3ACeO1qtQNqaKKSm8+O
s2RuBCZuPEo3aKs3fUljjjyJ2O0EpHHh7biSbjLIyY5zEAxsCDIyp8yXH24sLPHSjtV5Z3lxRkiU
x3rtW24Yq5W3tIEGXvJyMDGl2GbGk+PYhg2Nji7iSLMCajSQNNi4uQs/xvbpQkkcEHlyNwMEEOPH
RIocheiitTIDUmNA1TbYGqWCaI3IqDOlhrDk88OVDFi1mCJq2GR3Eyr4wt13KDOXLnxZ8MI6yAin
UENjWInkiKZCsGyVQTZbSlAWbHxQDcih0UmNk/JyTgwxYqZOQmCYdwjimf8AHjyMrJhjOLlz4Y3D
EmzRj4TQwiJlWMykOewLLIanUToNniChsnAdooZ4xteHiZeBvu2rLkmDPhPxrHgbLyMYZSsHDIUq
KUikkvSNX2mnTvTKH3Y3eqzsCXZiUiAMjfbCoZll7qjIWMDkTQAFGp8WF8nFn2/BfYCj4fyCGKTb
ENKeQapsoIIkaWbuVaZ0WM5CBO8Mv48biSJnmjBCSse2U9kIzNuMYgjxcb47gY64nyHJy8fDyY5n
ZYO9bqtJPZUlsveWqSF3nx3eN1PKeZUEKGZpJBGiYORlw7bg50OSc3Nlm+P9j7ZvvYmA0jNS86BF
cqsKJAoNRJrJyvAoyFWPK3ClhklZUADzKtbdJiZOXu+2xS5GLmzSvg7amdkJtmGscG4tirnZhyDb
yIY/GX3C9LIJFgbsXDcT5qRiPTe4nxNx3JEkZQLKKFMwVZo5JosCGP8Ab/jmyY7VLiR4wl3CWRZp
o4gy5WXI5gwoI4pJKxcaOGKwlaRWKMYcaOXy58+Pix4yft6oXkSMpEZajiBRVyIqZlLnsyCWliZC
t54vJHFluAYPMPEjnyzwFZFcb1mB3AuWaw2XEJbxZbtjRWxpQBJpy9IgGp9viahkZeEIMiKdeF44
3E+1RMzTbliVBueLNXtrJFFKrbdLCybm8TpLHIOB4o5BNs0ZIl3LCrH3fGmMuLjzgwZuIsO6I1Nh
YmQHxc3FqDc3BTIicUamxYZg2LlREZrxmOaKUaPHHIJNuCn8vKx6hyIZxRsayMWCdZdsk7jLuSAP
gSgwJFKYsmx7lothmjFgmmjiBMaUVC0ORyCfDxxkZ2LgZ00E2HmR5EaoLGMUYxW5PBDFuHZlYeEr
NDtilYzfQ+lbgOvvVtPfh9+AV1o0foeXCeC+tqNdOIV1rprs/wDs3/8A97gf7l4VUk/HsEolr0kV
hI9Oe49xBXyNQBFdhNJjM9RwQpTSKBanmijps7FWp90eILl7lO0mHmTSxbYYWjxMSJnycMFt8iBk
zZ5YFyNwnF/IibZNPUERiqFdvCqA64y57wLt+SBBtcaxrtcIEcSRIIowatofobVkbhiY9ZXyCRqn
ypJS0lM96Z+bPei1L9xljKkdH5RUNIx/jXq0dGNqKsK51Y12k0EauxqCS0oyKC5dBc2h+bQOYK8m
UKwc/OaKcDxpjY7Uu2wuMjapYhYitskJZUImWK4W7UkRYpEIq22fFgXI3KCEru7u3n3RzLtuXLJj
bHixpBgY8DWF/oCbCXdsLDqXfSzb7Bv2QMbGvkrDEgTOaOSbF3bLjVcSLM3vGihiIZ8dQytsMwkw
0ellZY0RHq8a00sCA5+Cgyt3xYjFv+NCXbddyMXx9bw423YNT7hihwc3IGJg4yMqRhc7PhgMO0TZ
YfCGGYpYlEwV3tJ5CakEjEZcqp+TGyh+0blitjZvxzCxMybC2DAxm6VuG3Q5yQ7nuGIY9xx52UvY
vYy7hDEBjyyy9ikBSNDwXogMCiqWEcoysIJU2PjR4a5LgjPxzh480GRMFjjGVJnhsGzYu448Mi7s
ezGxnkeeKXvoiitMgNT43bXkkYxIzNjYwQALTSWEcTBMDF8z2C1C64eJi4xysieBPz8jb4Xj/Dxh
BhY+NDibSZPxZrglmDGQtQd1PcwpLNQ71ORGk0QLxq3iniydnxxkbZuO24EjOjjI+IoZN1mxo8uO
QSLJGUqCYgrIpMkjCPGyQ0Mq3YXNSylWUgmRwqk9yyRf44Y2iUWaJZRYOpq9Gtxi7li20CTYpycj
JxIcmHcMb8LNV6nZymKkwJUJHKxCo5sS1oI5e7ttUYszWA5tJNZpZNsxZJSoy227DbCxN+3MzsWp
mLU0ctomLI7MGBvU0ZlEOPJG8kgVUQzuzLGkmPlzQbOc/wAzbszS4oSxzZcZMzcMrLZph3d5pWBI
NNKq0WSVYi5HSsrJiCvkSztFjqlEhBkZIRcKPAm25sX9slwppHj2XZYIosmOLFV97jMSSGVSrFHz
8eCLI3GacpiTyLixRSQ500kRxARGBz5CvkOMk2Nt7NPhKKFdKyWkePGw58rB2T4/eV8P8CpsyWZW
kZVgTziaUxUmK80giUKTyABqbISFCrzVFFHECwrIkvK8tqglp8iMrHjs7SYqSpkRlI0y1KmIinmX
ICQRFFZ8YSCOdFkIadPCMqKZZUHKCJppceMQQQZGX4sbKyWiOvL1GCsMjb0kpsrKwqhyIZ14DXSs
jDxsijg5mOw3RomjmhlGrokgfbWjZM+aJop4Zhwz7bhThNrzcR4t5ljYPhZ6SbdMhTPy4i37dmrL
g5MFJlyxtHmxtQZW0YK1Sbfjs3/Px6jzY3CkMK9psDHkq2djLFnwSHqKuamw8eesnZw75ODLDXim
IkTLWmJB7o6/xUwXtVQxZ2K8eLkvjTZkXjbDmBEe9yY4m+UolP8AIsuYZE5lOx44zxgYbJGkaxro
fo/bQ1fg965jQ1aueh15jS1cq9zR1I0Nc+Dlwc/QOnLW2nvbQVs/+zf/AP3uCM3BFjpaliZjtOyy
SmDGsojSMO1OaCEkRgUEvSw3PZBDU26KxO9IHGXkzU+RlgzTffjb4cIYGbGZM3fYVmm3jMAOZmzv
Jgt2RRSNDGMRYYkQpjLuEkK4GUhg2uJVG1wLUGPFApRCdBwDU2A3f5Q8Uku77jKRuOaDj/IdyhMP
zGUCP5hETD8m22WotywZaEsRoEGratLElT71t8NTfKEs+6ZGTTyG7SXouaL0Xom9GudAGiAlXqUW
jtVqtSC0QoNaoZVDKuG4lTCB/wCKD5McH8iEV+WlHLr8t6/KejkOa8715XouxrGeRTlE0CaSV0MW
e1sjtMm3D/kKtpFjBqLFFSoEolnrEwmyoxtuGI4ceGAcF6vV/TFFgKn3rb4C+/ZU8kWDvO5DE+PY
0Jw4YYl3vOix8XN3TLycyPFnnHxqHb4HmeKOPK2yPMmlVXXcJfFuUskmRWwCRc58HOAVNyNNFn1L
HuCx5CdjFJMh4Pj5ZcLb8PGTywxyDMjZ3gkyGhTb4FQ40pypsXEECbluCY2BjYkPjUl+wNjY6ySz
RCNu0KGyEpmDhgWCCxbJmr5FABNtsoxc2J0YmVFoEEZOHBkplfG5MbITd5tvK/JJ8tMLAix0ljLJ
GbLVtfcHlQFOt2PlYolhlBYFdi7YOQhZ2VBHkKK61HkZEKYmXNl5ZgiZJtngx8veBFjyw5AYUwpl
rJgdJ8THWNUWjimw/GgqGGbNkjjSJEQyy7op/B2yZWy94hTsg3qaJNtzcjMpMTHpdvyYcqZWNd16
7b0qMxcOtKPsWpXtGudgAPjyVBjMibpsql9lnwMBmYWzPisWTNvGPj4bwzB1kjIMMtjG/cQbUCCL
FayH7zJN2q8hkaGJaFlDkmpZMlAmYDSzRMA4Wr1MvfHNiZOQdl8m3GJmki+U4TY+atLV+1Swu0YA
YAiNAq2kKxx3IHNzYR3JbGlyo9s23Iil2jAy33qSVYU3LJXJzGBoKQD3ErGFJIUoXNLyqSUAANkO
WWJMhpzFsm4ZMrLueN5Nt2TBx4N/hx9vim3EzGNXYHtspa5lTzqXcLAoqSeOBvMirlZ9qEUkxSMI
GlC1hZGLNmbxtuLljDkyw2NhDMyU2/DRDmfhyZeZ+dC8awmTKgiXI3HImpUkdo43jPcXeZRiR40T
ZRmlD5JN6eRUrMyI3TCykG4Ov3gc8qXsRI8ifB+M7ZOKTHfaDl5nlUyRd3alAokcnfkPDEsKc2p5
ESnzFFT5iNUeXMGXPNHKZzLJ5JI2MjvP4RBjwyqeyMCS6sQwlxI3HZPjF5Y54JMnOwXXe4wizoyN
kxTpl5LSy+XwTS4qyJs+IQ2PO8eW+BjyQ2AHoe/oEAiba43cZuVjtHPDLxsiPU20QvI025Ypg3PF
mPK2jAMsm2gFMzKxmhyYMheF443EuzRkpPumLUe5YOWJNtC0j5GOWyklBx1kBWTHpMt0SOaOQaSR
Ry0cAo/5OVCYsvHlq1XqbGhnU4uVCy56oysrjQ2I3bY3B8+RGTlSmvOaE9iNxcKzFjrGEMmbi/jv
wfH8qHu334/l7RNGqVLkLQmcjFxMnKO34Eu2ov2jg56cqtoOI8uLprejwnXloeC2ltBXvw3o+vfQ
UTxnUVs/+3f/AP3dbGlBUvG10x5XOHsWblSZXwrLwlxNlxMaooxRK9rECiwJAFc6RGYpikVkZckb
S4STTpi7eivuWDitLuUksfnMtTRXCqHkUM0jpcvh5E0a4E3jj2uAIu2RLUcUcSeNL+mODeZjDguS
W4ASKWSga/IkFR5+VHUPyDcoi/yTcZKbd8qSmy2ejNXlNLKVPlLKSb8zVjXjJrwtX47EriOax9vL
NmYPbIy2eY/aBVqIoMAmgq9HU8cUZkdl7Hw8KWaHKN5NASAxudqQtOUrHKkOJIwSZRh4iOYYUhXg
v6s08MCZHyDb4Uk3vdJWOx7pkJFsG3xpFDFCk+TDAMj5BD34jbxnM2xYBXd58LEhwm74PCopg4MP
5Me7FSKyZPJl7rifjxwZf482ZumPg4sXyqOWeXfsTsY7vnrhbPhXSGGFZNyw4I2G45Yx9pgRLOax
40cZa4uNFmCbLqLavDkNnMtJmoCMrGI7oZZsOZYAyly4UUfD3PjRuDFLSxyCnkIG5qcnB8gA2qdM
nAspoctMjIhx4js0O4zY+Lj4yaBe0g62oi1XtXc17g0VFKpBnWbuzY5GSLDeVQbEHy4oakkda7Z5
BCMeJ5d9eOmyCwzcRsxk2kJgxzFGsDRFTICkQuI2VA7F6xoo74xjZCotJH4I8rccOWDM2x8vIx9m
3WeQp2ZKWWof8cbyLaT/ACAqwVVuHLrQfIIXKYEZJNNIj0MHCE2QRBmmXyz3Rxu2zd67TNBhRB0k
Td/jq7nk7lt0OBBFMGEkV6jkZTHKWoMQIZb13Uyq9SxDGfEm8wkeTyElQkatHkYyqI8aQu8cseRA
JNM0zQvty5hm2rNGXj/KsCbIxVbkpvRHctlVACQqKWYRqHBIjBC3qVrCR+yHHUxQCSw+NSRtibs0
Ue3DoVFLNHcutGglyBankC1d8h/siXLyH7dt3F5oly4FG0/GcGHHysyDbE33c03KowiGVSytGAJJ
xAcbHuEHIm1ZM0VnmlmaKBUpnVBkZDRx7PLhTY2RixYEm2ZM8o2LDhTE3GJUhfdgkeb2zxxzxLAZ
u6RoMYskCkqqoBDzkgJTzR5rxZkyyxYiQtNlqonzrVLJNNSYxV7EmV+xDIrYWzfksm3ZkeFBu264
k0CxSzy4+OkJCG2Q5jhxoBEsmZEpmzAakyHIZlrHlV2yUlLR+ZgfLaSJIhFI6vIY2mWXtAliVzmI
tNnqKfcDT5pcO0lS5MjmQu5iaZVleRme9wSTjGXzJBJEmJucBYbjLLD6B9C3AedPt4Dx5s8TxTRT
DjyMLGyKOFm4xXc2iMU8M40IBEu1xMwys7EMGXj5A4pduw5qTB3DGcbrGGEUUxeMK3fkohGK5mSQ
GKaZBHnQseulhUmJBI/gzICuf2tHJHIKZEcPt5VhlZMTR5EMo03LZIsxptk3GNm2zOWmx5Uoqa7T
QW5MMgHY12UqcaeOeB07Gi2yeVG2+SMw4mGgURmot2mx8KTDSd02nM8uJsu3Y48vapYk8Nr178B1
POr0eC+pq2ntwe5096NW0todfbhNc9OfB7X0531FHhOvvxcq2f8A2b8jtvyYWQ9JtTml2yBQuPix
1nyGR9pEUkkeNjoMJxFlb8nk2xJIrswALci/OlU0hiUnc8PHSXM76m3CEAyLLUq9wxkVEdpWpABX
Z3VgYEzSRY0SFY41Pp+2vTi3eLyYcoKvwqBRQkEWN6vXcaV6NXoAmuxqw8aSRWwmuMI0uAaXAoYJ
sMCottvWPtirW646rjSYeX3zJJGUal50sRkafHeKgOVDjtVuKK6bZkG8tAVbk3XYYC5XGJrwBREz
Iox0mGBhvjGr+scnGDZG8rG02Vm5Yxdoy8mHF2bGhnVEWndYxk79gQv+4btnPBsMkj4+FBjVEe1i
B27vtWI7YDRGsHbp8uLcMXJ22bC3SPOgyD2RQEvLvMYk24yG+duMufh4nyFWxoYECNkxRGXcXMqQ
5OSYY4MRhNJPUkZiVsjvxo9zjD4eA+WkkTQ40CiSJ1CU8cN77cpilgVfy3JaRibQ9vkUiRjEuNO8
gnyJo5fPES2TCyTLZ/ikxOODQNT54V8fBIcHgIq2lxXOuddppb0OddKtUxEaukzQtJJBjuDUZMeD
SjmpPfYUyq6iNcMQzxS1JtuTNBJtWfj7kyyYsns63XFS9SumOFy2Zo5qws6FI0zu58jd8gNB+CMp
/wAiOHE3TLxXxR55VKAm5HcCQqmghBe9CFiJYZmMuPIgdlByAzxx79NgyM0OfjpLaRbROxNbjgR5
K7MI8WOCWOeHftn/AHWXK2mLbcSKVakiDhXZDFLegQKVhSECpiHgxZJFkdD5SwavyCI1yySsndTM
145ASSKzAQsm4Zhl+L5ZSHcVklwe14nUkUjXoWphesl5I3hTuHeteaMVJlwoFyI5ZCfLPe1FhU2f
NhtPvkuYBlwGvNGadDLkRwRpRsavUkoWizTMvbGqI2W5xoHjOBCsO2/C8Zcf8/GwK+Q7zDnu0jqQ
lwpZS3c0bBmdCe15UQTZpJWJ5CqhQZCzDNjTM3SLFyoF/Kgkxsd8sY21YUESzz7Zk5WfNlCSNZTn
zxtQDeKDtkVvGFcgr3G5cs0TGIYxUTTZSqcjOp5pZaWHn2UFAp5iI4Tjz4uyCGRMSL8WtxyIGGNH
D2OWZgKJFZMkBGRmGiEZZJFRZMmWRMbElmLYiohw71HAInaJA0sqMRkJGz5LuxeVq7XNeOuxRRMY
JaOi8dZERV1vSSG03jagharEVtGM7Ph5EUuVlfHsHLGTm5eyyYm54WatqtVtLV09X30YBg23+Oo8
6aExTRTLxuiSLLtMRYzbliGHc8WYixGk+2QSt5dww6x87GyB78LRxPUmzr5GytwxWjnxJalawWB7
tfvHcwjkjWhlyxVFLHKNGRXD4EVy2dBUedBIQbi16lwIHb/nY5izseSvbnXOmVHp8HDev2zb6O2Y
FSbPgvUuzMY5tqOQMLb5UG47Lm5a/H8nGONkx4rrl5MX5UEe6Z74/wAdijMa42OHdnPOvc+gdOde
2ltPejoeG1daFuLrparVar6GhR6cR4L8qPWuWht6B4bVbW+tqtps/wDs3df/ALeLCzpqi+O7vIIf
iWWwHxnbY63raNrjjxEfHlh3FezIzJoqbe5ZsHK26J8GOxgaeNDdWU5p713Fi0+bkuIjNLK+NI1S
wyor5LhvOZCpkFImRJUG0SvUWDjxVYDjFctDw2q3oOgdd9wJMfK4BYUGF2c2PPgFQwPLUG0yPSbI
RSbK1Y21CGP9uBobaKG3gUMAUYII6fIw40bccmpJ5pFhkSBNwwJngyYu88wY2rqcr9PtQo6X15Vy
45V7NvkN3odaPX41jXxBEBRhUAgs2PAIV9XJzsbGaff4SkWTu2TkLsEjUduwiQAoJtWTu2Dj1k7/
AJTuu17rmvi7Ji49KiJqguzLet327JlycDEnwpNvngmxvk2VhxY+Pg4+Edzl7cPaWP5llkSaBopf
jciChkYGNIs+5ZRTborCd45GOPKywymni3KA5O6ZqtBsWVOY4YIqSGNBdWqWZMepnYRLjNPIREuP
DYJH/kMiyqsWaJKh8NXooblVSpIYSDho1b1iHGyfjizNK2bkQxnOzssYs0ONGmZA9B1ar1er1ejY
ASo1G9doNBQK510rIXIYxyZquss5ymAakbsGbEGqDGLmaUSB+yloXajaraRY4hy+0du5JC2R8jdE
rFnLLyNRyGOTcZPNSTSxVFl3EWReoMntaWdMkuszDGfMyzlbbDHUSd84UFu24Ci4oWoBfITamJoK
15ceM1kY7pW7xrI21Zr7Y5GNl47Fp0ilmV4I7zZ2As67RFLiSbZkw5GJuu1R7rBlfHYNpxkyYw7K
kihihimBAag11zJgseLjsZZ5QkmS/axdmMEfldW8C9zsGSzGpR3x4HaV2jJiTPMihd7aM7j5QKGX
ElHcUFfnTvTDNloY+cR+BMa/bkoYEFCCKBsIXNXoqMib8HGr9vxjR22KvwGWjDmrXmzY6O4yClm/
JZexERWynVEVLsFkyyGwNyw8mD5NnwZWQ3KnBcMrGOMMoK9mPjoJ5JcmOMF5ZykSoGkVBlyTRRbD
nRvHm4sIbbRmtLs+2pLNk40TQDeCIvy/ynLC2TnTAR98zwoIYWuoVriVrtCg75hGJEmCpPngBsmW
Urj3IS1WACB5mzTJizblMhwNtgE+Psm34kWTlJH4R45XxmjDe7SqqyZUkpTCTubwx0UklKxQKHWN
3edFD5qLT55JaaR6Ic147ntAoyRpSyGSkmZ5pZDHPuCfjiXOXx/lz2E07nGjkQR7XjiKfbkir8CA
j9jkrJ27Mx6wpFQ4mdhJNi7/AI8keYuPuNT4WPtuWd43F32aPLyht+UHmtXL1TxOiusm29gjy58d
YpY5l4DwT4OLkUcDMxim6PEYp4ZhpkbdjTG+44Ygz8efj98nbMTIaSDccYvkY/emTOqrFh5BbGyU
Mburf4Zn78qEJmQtrzqSCKQthzRMMuWIRzwzDpUkEUpbGyYmTOIZWRxRr21ZEajgxBoJs7GyN62r
Eyaytx3DIGzbfirD537fRHD7ehbgPB7a8+A9eeh1N9eVc+C2g1sNemp1PF720Pp7R/szc7Ehyn34
rWXvWZHRz82asli9eCMhdh/Nw4FSPAkxJJFTIjgy5d63HdotuwxNB+DOyw7WipLtuOKjxkioYmRP
SYaRqcYAT4HlDbBGxg2XFiK4WMtLHGlHn9La3Fn4aZcG47Jl4ZItwKOb8MELSybVsePDAsUSiwoa
S5WPCH3nDFPuWezSDLmhdceJlhncQ7XnSBNqxRIsUSUw7hvezwRyZeMRSmxiPc2V10A0PCeIcznm
0B5tS9a99gMUO2nJgFRo01RwpH6NqtTMqibOxYoZN/kRp4d5nXH2IqMPAxsNAABU+XjY65XyEKC+
9bmMf43HeHFxoKvwjqxNbzl5S5Wxd8kkh7RuMfkwtqV/wd6YLt+3OEyTnKi5lpsnGZY54PBCoywK
XNtT5EMojmgSpt3ghoS5m5LjDGxo/wAm5WZaM4pZAallWVUhjqLP/FkyIsOdJYjDUcsaFFUhzCzP
jBaEbgn8kUsk9ZQnK5e8TQxzZU+UNnORDKkR7xkSChmxikONkARTIy52REWy5JTFluwLKUDVHCEJ
jFEFavyq7aKWWmZo5vHMaYIlRTyAy48/mKrXi7U7btj2SPt+y9Xo1PkZ8aRZByIZsXzodmxsfbcX
IeXInLI3jBoxA0+KLiSaGocwGocq5iz3XHSUds+ZjSYmFkrPHLNHEInSRbAH3JtWQwA/MIqJ+8Ek
UzWqWs/aosmp9ky0l2zByMOo1iqVFepcYw0rhlzYsacbWsuBlbPn42bFuWOmTjZHxoYSiePy/ZKC
GjMctK9TwySSY0HZUn3FIQ0hAZkQl2x6cOaQsReiQKjYRZdwKbKyCmapaJcZpWi2+AUsES0BajyA
FEUb3K1PEzLE8sS/kSU2RKVgUIjUoq1dpJYWJXlIgJhCRyoj5TxqsaCpJiTFGEBHOCdJGJU1MhtG
OxIVNnlVBJkM5SLn3BQn5GQ08+RjZc88GVDPg5Kz4uDl+LDw4MeDcJjt+VJuk2REZI0dwxoOvbLD
kzSwYQgHZFCGZspVmQwRLMQZCrTZyR0+TPOVxuYQWNgHyABBiRzQbV+SMzOTKlzEwJpcfbtlxMeL
eo/xsjIz8iOGSDwzxCKIvmrUzOxilmv2TF7KpeYCpM1Ep85nrumehFSoKYqoEoZsmd8dsmNkgjMa
YsVttj8UcSQOWXB2ncsiptjngZdqMkLbLkRCPKndYd4MLDcMaVQsaVElz+Z5TPgZEuZBjyQZe2YI
wFypdtzxvuBnJmYG1thyZy7fNHtrSrlYm5q9cj9K6K6y7f20mZkY9RSpKnHenjSQT7TCT5dyw6g3
LGmN7isjAxsgdm4YQx9yx5jrbUVLBBOJ9pYGQ5MAxtxfsSXDyhJtwAaPJxmGcjiNea5bqY5Y5RoQ
CJsKGRfHm44XcI6RlkDIj0dvVG/KyoWiyIZh04ToyIxkwIjKZMuNhkw3BDDj66nht9B76W0saNzV
uI+mdb6X0HMGuuh0IoV7nrw9ePrVq2j/AGb34JdzWTEiMmY7F8qRyrTVCrymKbLxIWnzZDDtsskc
3x7FkG34aYcfLQGr1c0CaDsKLsRfQeqPobVLFHMmT8Tw5Wb4bk9x+HZ14/hs1R/EkWpviUbrL8Oz
BT/Fd2Wj8b3YHH+JblIds+P4uDo+Xix026RMFz9wyVngyWcNgY9EdsSYma9QbRjRtHjQRnhyYI5Z
N/2042Vk45jbHP8AkyWvJoNLUfRvUA7pt2cBr86HWgLttOExw4sONKtb0elTbztsDTb9JMGx93zU
wdljjkh2/Ehkq1SOka5fyHCxzLuW45zpsWRNUG04OP6I6tJZd43eOCXZll3PPOy4dt/eTDkhATG3
idJ44sezRx2rIUhRa8Xc6We4VhTThKD5cjRYwiIMtd8woSzCvPMKGVIKGY1DMoZgpcvnHnAH9xZ6
kGHKTjxChg96OuUxXHye6VXjj7pnAgy8pofjsIU/FU/Li2yKNJNsiKS40qul1d4GxJBnMtLNHIPx
17YNxIWKUMXyoo6TLhelcNRtRCGuy1DlRNBuc8jKv5l6SSQuckXjLFi0nnWP/C2PEghikaW/LQmp
5RFFj7nNiybdmHPOOE3LHh+O4cGZ8w7hW17bmZsLF4ZQFajHUmIjV/yYahzVNJlrcSsGxsuWGWbP
jy3wJomhd+YejKpLqGE2KyHvaJhuUiUN0Q0jiUMBcqQQOTG1L9xEuYHRRU8UbxvhxyQrhfg5Px7e
YJ8rIAkim+Mfjh8hDOsgkpkZCktwr0rVKpuwCRwwoqPKIqlzmYR9wR5UUSbjEtGXNnp8eeIJIHTu
p2UriLdrEAMAGyIVp8uC354ps+Q0cuc0ciY15ZDVzXOudXNBmoSOKE8lLlzA/mSUMwUJ4GqaIs0O
XEgSZHqSfvLQzxtHmi8sjMmPiMilPtCXB8aCXKArteQhQtNLzy8TJjxdozJoakiOXDtOw50lbJgK
mRlRo2Od8zMOE5WRnujciY2K9qKMiNZPyIIqMmVOUwy1PjpEC8ENZG5KtNNk5BTCQEKBRKrSeWYx
5CHO3z8cw4Yibb9r+PZTvgbZLg7m6KEi+QyYxXLk3HNyJ5MvMUTtM2VJJLAiwYyRrRf7MzMjYy55
NM80lCEXCgAyotSu8ccIGRDhl4SkKB8cDJkSHLjmxdgl3LIy/jy44bBWIriK1bVl434e7TQy5LxE
MmQprcdwjkldO4xOUqfLPfivmu+Q2Vkw/G48uDFj/Ey5cuBY1ZIJp8rKyoVG5bnuj4G7wzrHgYkc
2Tt6uceabHMORFMPpXRHWbbwKXMyMaopo504ba8qnwcWcfgZmMV3R4zFkQTCp8LGyB+Pn4hh3KF6
HMcRAIytrxcin2zMw1xt1yIax87GyRPg401SYE8QXNnjKfh5FGTLxxHlQScDxxyhtvVKE2bBUWbj
y11D4UDFVzoGjz8eRhzFHgvo8UbmXHmu2RPAEnjahb0DqfX530Og4Te9HhGl71ehw9aPLhvp1o0e
A866ehe2lq9tAatV62f/AGbtFkjdIsPJnYbNH2wbTFG3jSyxoultRYcY9S3EPUHqmpMzGiqXeIUb
8zcJmfEznWZdtRY5u5hi7jMkOywJUWLjxNpbjZA1btE+bj5u3yxh0EMkj9730HDarVarcBrBUNlb
sw8goUo5npAvfNiII8bjJArJ3jFgD77Pk0m27lmtj7IiUMbHBq1HkMrdsHFXJ37LlC4G5bjJh/H4
YCkccacFtLVarVagObqCu84uAtbDO2IZvke2xxbruCbjKmNI0W7JHFlotgnOpYg0KG5xJEERyIFE
2UzqkcTATJYTrfzJX5EQoTwGlkjavtNALftooKMYrsNdjig8ygTyChlEU24xR0NzmlcvDHUZfIEB
hhUSqQJQa7q7rgNYMsTUcPuleNGEm3q1GTIxykyyKq9xaKGjBCaMM0ZizRRIIU3DECg3c0k8KM+W
skqCGVUnSKsnxtF4bR3XtAkaIRQCop1UQZvkHKjRrdp7BcYSiHDIg2LMx0xdy3eGCH8ZHSDFx8VN
72zcJ87K+NNFhLkgEMjUUqXGjevFPDUWbYxZINRzc0meMw5+cSJGd5xGmQN6wwse6YUheGCcPhzB
jBPIcDFaNTajTdGBNBQByppPBOWSi6VIscq5G2jHyNk31DuTMDWR8aiZ8qVvylkN5IyCktJLzVwa
+255sxWspsYFZMyULtzOY8WCMWtUqiSOATRo78jJCtflstNkzNTMxoKxpYJmpcKc0NvahhRihjYq
148MVbFpYIWVvxwb4tEYhrsxDXgwzRwYrfgg02FMKfGkUWYUWa8bRhoPDa3OSJJB4JYTDmKxMiKJ
MoCnd5KRVAMyCocPIyazcDOjnXPWSptufNyV2DLwsfbmifC32eXHmbdM/LSaZDFE0cUMchIAucgS
SLjRQriJjdhLRRtNnKtZO5qKvlZBjw41IW1M6pTZFzuOHJjY+ytP+LArPuD7a+e+L8fxseLaM/Gi
k3XIx5KyZ5xjE8sCRQwgeFikq1Bh9pmk5S5aipMqR6KM5CBauBSP5JJ/Kk8kMeYxlZmfCmyXk2TK
aMfH4YI8LCxjDkpAcjAzV26XcN3TMiycjNYxqz05AUmiwA3TcBMY0IpCgDtFOcPDUT4GzNMj4F4o
JpdsD/izxvm5EMksOPmxy98ce3bTLgBoMfKME2RjvDkxTVPjR5CurwyR5boAysvHb1nRHE+2L3DM
y8YxTxTJwHhZVdZdqh7vJueIINzxZqFTYuPPRw8zHMe5LcWZbcc+LBkLPspBiytzw2x9yxZy6K4k
22OwfcMMpkYGYfBlRFc5Qysri2r48LqMTIgIzZI6inilEuPDJX4k8LLmurKysOC2vIiXFhlQ488S
jMeIRZMM1Ec/fgNWq1W9K1uI0dBRq2p5aHrpy4hRHHyo2q/oe2vWrW0PAOetqty0NbR/szgPzbDg
HBf6TnXtwDS/Fb1SQBJn4kQO9wESz7tKwwsmQ+Hb4gc9uwR7pOY9lQtHhY0YFh6+TjpkxbntMGO8
qdjhqU34eXo7Svdmbo/dINFtTnltEJmzwLLVtXnhjqbeMYO27bvlKuz5Oa8Gw4cNKious2ZjY4yv
kHarSbjuQx/j8jHH27Cxl4xxQ/qzoy+JuatJFtjZEkLYqRzZ2MjZTutZ582esgqDFzpaTaMt6y0W
HK21UeKHaXDbjKdviXcM6ST9yzA0m65jn933CsDcMDMwotrzfycnaMNcfHTJORNtEccONlTyzSYG
fEsW4CZ3kkQrkwtQkjJfKiFdmRMFxIUEnbKyYdgPMtCaYUMwrSZqEpmCy5IpclTSzC/cK7rhXID9
kglwzGy5KgHIlc+WYVHkLdljlBimx2i3BTQyIXDSR1M+M65DJEkbiFMbJUPMSqiSTIMYRaGNmIgM
zmJYwqAiPRjYZMvlmw0VkSVjUEYRJceCUJuYWLCyfzNyZFNZaRbni4vw6LFzPk2PhYL4z5EsaTI1
Fb0+PG4bEkjK5csRizAwiyxcTh6haJ6KwOs2F4xD54Djbgkhxc6GedxYluftzta1NUkoQZWSj0ZY
QRPFRyMe6ZsZabFjJ2/5AMfclyIpBk/HsedtyjzkyVaWOpIw1LKylJhRyUUNnFyuJkzVHiQRC1d1
iSBUmVClPnMaeaV6sTSY8r0uA5oYkC0ExVrzKtLM7Mykq3cDer11pBciyjITtY1audRp3uygrYqQ
7ihPKK8yNRgglp8KQV43Sky5UqLKikq1TQJJUzz45VTTOiiOKfIrHwYoBG1zlSRBNr+J+aLb8CLb
M6RkWPyS4EZlkmZGVFkSFBGjFYkdgkbKQAKlnZcmfcQBLuTyH8fJmMWLFFVgA0qLUyZEcOzT4+Qc
9YjuWfDJPDs+x5yw7XtuOJM6NFgy/k2JiY2TuMubuG1qRChLquPLI/7c6KU8ju8cdTZqgPPLLQS9
WADyhRip+XFjSy4dLBEy4W37hLHhfHlC7XtmOmZMmLinOz8QYz7vkNFJJIQkcqkKtg6qHWR1gR0U
teiwA3LcTK0UfcVsglkLNt2IpkTZWzI/jea2GuUV87AMsmPPhtBkQ5UcmDJE8WdHIWx5sMqcXPSd
WhU2Wo8ySEf45o5MaTGEBIqDIjnBq/rniZFYZG2oznMysQxTwzL6POpsHFnr8DMgP7k8ZjmhlWXH
glD4OTCybl2FHWRTxuiOMnZMeSmO6YDxb5jMVdXWfEx51XCyscrnqQMeCajNlwGPJgkrpqwDB8GB
qC58FR50bD/FJT4a+R58mAx5MElWPoy4WO4WDMgCZppJYno171bgtV+HqTp11GvXS1GraHQ6c9CK
9jXTW9/QOhq2hr36Veve9CjVvQtVq56ng2j/AG5v/d09uEajgvpf6kejdRT5USs28qS824zKdryH
cJtOOUzcc1FLvGWj7Q07RbZhRBERB9HvGAczGy8LKUkFSDavIa76767676767676767q7q7q7q2I
j8nOa5Bruq9C5r4htJB0ly8WEzb3jd4z99ygmyz5dR7Ht0dIiRrq7pGuRveLEJ9x3HJeLaMvIrG2
fEhKqqg+lfgmy8aBY9zmnZcPLyBvW3LC23JnxxncvIZM0KWk3HJGJ8dQVDi4mKNy3nHx43ysyWXC
wsjNk2rEh22GbOwVG7Za5+XNBjkQ4DGhskQH7FjV8WwYYcLeYE/bttiTJwd9w4MPFxMVc3F3HaMX
bkTFfMxpPj0W35e5GOPEw8Kdc3J2Zkiw4Mn8rNxfwo4sifNl7ZsYLmY7UksbVYGu0GjEprwAG06g
ZGStJuFimcrBMkUMgGkkU1KrPHJGI4MDInlS5poopQYjA6zLM0rXd2dKikWV5vJjkRSys7NKYImM
uWbIGIruYlZZXeNPKs0aqyDsQmhat0m8UFu5sCENUcN5V5aAkF81cdpt6yJh8bKHC3PNh2/Hk23H
y1h2zGw03ZMls3I2nKxcdMkVdGDRqwkwVq+VAYs9TUeWDUWVQyb1A+KVKxO8THGyfKjjMyZIkwM0
ZAZhZgakdUWY9w/HhIGGsqT4HjKYbgz4aSOrIgnhgnrF3efAzos/FnXI2zDmrPxYosiaGSAsEkEv
mRsbb2kMUSRqTYXFpMuGOpNwY080klBWakwpmpcOFKiWG8vcAzMTajQrHW7lvuyI7i2uOt3c3Z18
sVrGhasNOd/vyU7ZNAL12BK7nQiVWDYcUtSY8sRiypUpJklG4L9sQmnqDb40KiweWOMeWadljWNt
q3CCXF3d4s+RZMi6SXEsgSVIZsmhHHRv24+eGWXPRRkbsopp8qelwbmONUB7QGmsMJY89twSWHMz
XlODssc0dYnx0ZOS20Q4+PDJFJFlTS/veXnTT18iyA0m344leOMRKhWOjMtLkmJHy6aSR6Cfd9qi
bMjiBy5Z5I2Eckm1zM7bLmyLj/HsOGDEbHjiyNz8edPKxkycyWYTN4U/PU0mR5STaoe00FiLkgUW
osBW57mZCiE0o7BLMXO3YHdUGzy5MUBz8UzR4ucnlytsMU0U6Vk4Xe2Nnd7T48U6FMvBPbBmVDmc
5tvKMsrxvFe+PmxzGfDu3ek7LkSwOCrD1zVjxFVYZG2I7vmZWIYsiGZeHlwvHG4n2uJ38m4Ygg3P
FmDxQTq+3SROm4yQtFLHKuh4PcgMMjbMaUvgZ2G+NvMpqDMx50ZEcNtaqVy9xxVE+35R8OZCUzOa
lWGrRxvTbeFo5GXAyZkElSYsUjdubjtHnwswIItRo631ZVanwYSQmdERnRqUZHo3ueenvxctLanQ
8HOrVe2pr3rnrbTlbUjU8R4r1auXpW19+E2raP8AZm/93QDgGntxWoepbS1W9EcVqd0jU7hhqW3F
nMcm5ZMcm2NOTBteLR3LFxj+Tn5CxbXny0NsgZhHGOMepbi3XbxlR5uKA/4zmmhkWu1q7TVmqzVY
1Y1Y1z4BFIawVlhfM/TagDUcMjnZfi0srPl4OGsu+47PHPvuVIuyZEoj2nb41RERdSQBPueJCcje
shgmPmZrwbI14cbHgX0raWq1MQom3jEQLJu2cMbZceMxxRxBedbv3IoxN4zF2TapYM3c9gycrMx5
Y8HEG7ymDLyZEZp4DJiRw5CwYW04wORtwMHx6fNkTa8bFjkiTzQ4UGMgZpS5IGFkyQDIypsoYmbJ
irmZKZ0e3ZMGPFubpnQYmdiY2JveXkbjDg7XBJi/I+yA40ONk4u5NjwzY3x7zwPs0W2Zm4YcuXhb
fsT4+du+3wR4m14+RPkZuLPt8eLntkH8kKRLEwBDApemgjNCORCJ8hKTcApiz1NLlKaWVWruFBwD
mwsxw8cxxv4ZEycNXQQ5WJPuE0mcbSXSCXuhRIhIzSMBcjtpInVYE8KSSBho1bjP5ZseBlbG+zGh
BKihXabLI5kl8Jixcafvy9pyMh48zHjgTc03HdJcLGkrc8RtzxMD4pLjZu/QYm3z4zZE0SzIxKg1
JixSU2JNGRlSxGLNRqjyr1jZXhylyA7grURm8n48xkfGzAfJnwHI3D/kzMkqYr+VBHIkkmR5mfIl
Q4zl2MyOwjLBo1mjxsubBmX5HiPDkTtkTEXrJxCpLAnH6FlUTZyLUuTLJQR3KYMrUuNjxV51UNK7
Vc1jUGuZY+xtOlYwspPNDcSp2MdMVbJSGxyI+x6FJaOK9TJ3w0BWMndIX7siZbS1c1AxkqXChZnS
WFmkWePCyGxyXRFOS8pjxVvEFLNGFVg1m8a1+OY1z5CIMDHQQg9tSZkSVPugWpNxllb8fJmMWHDH
QXk0qLUEcuWdwMuLkZbxvt2xiNWfZp87NTY5Io8HBx4IZ5RhZOVvOP4ROeybKcRxQyI2bIJsvbbm
Zsjm2RT5BJ+9qCAUSAI3E0uacrEyEw8nLb8afGk+KwYO41M8McM+XjGBd1z8eH/kZEixiJch27mN
jmFRhqO5oeyANJGArWaSPskJokAbluZkKR9xVFRZZS5wNuLHC2jJmXGxzFgk2MuMrs0w7XxcvbZ8
TcYsk1k4sWQiT5GEysrLkYMUzSySII55cenjxs2PJwsnHMeZjZdLLk4lPHjZsZllxAIXiEOVHKbf
Q24iARkbakjnKysQxZEEycXPTlrPh4+RRwczGpNyeIpJi5aybYUdc/Ix2imjnUg6ng9ptvx5TLtk
kMsGfNAYNwRykkcgn23GlDQ7hiCPc0kCwQPXmyojHkQyHnwTYePNT4+VFX5rxV3YuSrYUsb/AJks
TI6SD0WVWqTCRizZkIGSlKysK58R0NHoaGh0I4ba+/QnW1EVava3H720OnTS3DbUadNbVagCOC3F
tH+zN/7o1t6wq2g4uXHah09AyxLTbjjhv3iSRnG4zIu1StRhw4Xn3fCiYbjmzsuBuU6x7PjAR40E
env6d/XNb3shyBJBkRPHmyxsM7AehJsxqRtuLY0e2SNNhbH425MSa51aghNYWE0j/hZjSQYLkbfs
2PuEa/DsOjsvx/FMOXtUEkT77mtHs07rHgYcaKqoOCXLxoRNuJ7PycmYY+yzuuPteJCw5cdvQny8
fHiO55OQY9tyMisfDgxtL1ekYWlEbqwRRvMuSm9DPzsts/EXElyWybeZmqxvhpOlYW0l2xcXExo2
S1GeXKEUUeLDmn7mjKr+qvz8ZGfmEZy8oeOaLM7ZmOXkpt+HEI8hFiXFzGxqyslEbE3HJZs6VctN
sycZMXfZky49sscLfSiv+DjmPNWLbdxn2g52PDskOBn71gS5MO0bUzVu2L+3Db5MrMR52hZZ4WFg
QUvRx0oeZKXLkWo88GlygajnUj8li3epoEGmjndovIpaLECuk7h4p0PksYojKyRpGLmuvBmS+GHm
7hVFF1mVBYVJlSsY7yVLPEglUSHHepJpJEBDLjuuLLmbvM2PsKk7buWVHiRTbLhZNHBx9vgydv3h
8jN2mbChjylaleN6aNWqTBjYmPLhKZ9jFmA1Hlc13KeF8fMZqE6NQa6zYsWRCrtE0cisJD3qclys
kMpZWZa8Boo1IGFOqMrynGliyUmBanlC1LEsjPlhA0jyGPFmlK4cMYiaO86swN6uavQNQG0fdaja
RGupvV6Q9sXdQNjIPIhq1ybJHV6lXyRWrHQPJIbmojepF7XqFeyIG0mULtblUS+NJ7+S6uJsRlpD
QgR6gPiZpJHrHgMbL2sAtpA4aSFRGVSNciTcIoVl3Yu3/MnMeBGCsaqC6KFM05yMh4Z8uDETB2EX
bcoMnIyY9tyZcXA+PYkKL4cHLyciCKGPNnhTJy5zJk5DStiYESx9nadzn8GGiyscc9kRkY12kkLa
iVURh8hcXKOOy4cMgx/iGRPiLG+HPlJ+RNHjS7XmZU2QxDKwvp291ZLK75OWMepp5JmvQlyCEilk
eFFWnn73vW57l3UqEkBYlkleU7fg3ONtuSVxp3MkmRKw0dUdZMeSJZIYJai3CbEdWVgyI6vBPhNj
5MWSrKrCXb3jdZZIZMbMjnrN2qHJqLMy9vcQRyVHmK9HGmxGAxc1VlyMYqysv07AEZO3xyscjLwq
x8zHyE4ba20NOiSLJtkRpDn4zLnY2RU22JcZOViiGeOca2q3BNhY8pkwMvGEMgjlXMyEK5WOxkxY
JQdreKoczLjdpcLIPjyYguci0rK45asAwkwoWq2dj0uZBIHw4ZSz5WMizxmrg1b0XjjcNjWCGdET
KikPUaGrehbT24etWocd+VddDy0t6B4TR4vauVDXlwivejeto/2Zv/d+iB4BpfUcN+C+sk8UVPvG
ClPuUzxiPdslTs/lcYG3Yytue34tDdnyCsW75QGzoyxYeNFQsB/QZcPHmp9i2qWn+H7DJT/Ctsqb
4XAoPxSIA/DstqPw3dhT/D95Wn+K7wof49uyh9ozkoQZsRGVmquMVfb2adTFgLNjYuxMUTBw0CgK
OCSRIxJucfbJm5M7Q4eXO8GzY6tHFHGPStVqto8iRrkbxjRoTuO4R420wRNHFHGvBap8vHx6n3Rw
J8WfMptvwoJZ8wOMnblkMuNI8b7eBM7wRK+RK52XNE0EWakEfbPmKPCFYRMuZH3rDmWLGKYNt+GW
lkixGO5SjK/aJZJxBiYtZTGZgyCSSTlPlz5U0GNLEtlLwh6kgjWjkR9kOTLBXe8rpvLRx5T/AJk+
PnYskW5E5W4Kihctgm9ZGyY+SmJixYWRvmzZOYNr2tRj7sr4GRjTZE0K5NqWWNqCghoVauxlZZp0
pM0VHmA0mQK84IE16LoamyrDyKS8cMlPEI44shg5tXaKAr7TRYCt2n7mgQs6QgRYoJCKWp2VqyhI
0wxpCqRSwlcbIdhAyqwVVWEOrRiNDCrrC2RA2YmS+XFuuCYsLcIt03gwoRn7Wu6jF+Lw7fJvD9mX
Fh7h+MuSlwUanhRw+AL/APLhMefUeYLJk0uZ9sUv+aaRVyMOUTIYo0yF7O6dyMhogC06qhynWdpU
CmWbJaHDjiqfEFx5ex50Qs7OYcSWSlgx4aadjTGsbqH+6eO1E13V3UGCxd1LJYyjuBak5tI9gHos
KjksZ1s0A7pZG53oG9RHnMnZJjr2xk3NA2OWtmiQu8hqw75ecVRR97ubtkDne1AlTPi3rFmKPItx
C4YIL0qxxiaTEu2fFEJt2LEtmzUmCppIkQdyClZ5DJk9s+dgRRYmxPMo3ITNmeJsyDC+K4WPFiYM
OHnShFjwt+kWHct2O45OI7NjM6lSRJUx/GlSRpnkfuPyHMCyYwUgLegtAVLN2LDLjTI2JkYzJteV
lxYux4uNEm44irumPBuGXjydplSORIpGwpIG8EpIvK3MykRxyBWyMXyTTYEkcWNiBzL2Y7RL3AAv
R6bnuQalW9KFjV3aRtvwCTg7XmO88a/iXA4sjEjnHlbHPjytuONlw5SVkYPc2NndzVNBFOuVt+RD
WHu4LPHHMku3ZWC2PnYucoXJwqKQZYiyiafHeMw5SyH6ggEZO2wzHy5+CMfOx8hPRFHR8TGkLYOT
Gz5zLT42LNGk2Vi1DkxTejPiwZAfEzMU+fFdiMuMRbilguPOr7eQU8sLDKdlVMeQ+XJhqPKglq2t
6kjSUDEWMGfJgLHFyQ0ORAq5qikdJBqOmh4GijcDG7CJclGXIjbg50eE6e2h15Cr6Hg50dBpzr26
HXlx25UdbURw20twXvpz4BfTaP8AZm/9317erfiuADLEFk3FFT9yyJ1MOdKy7Z2vFhY0DPmYiF97
u5O5ZEcOzzNS4EBcAAcI+st6BUET7Qk00OPHAtqyMdJ483Hz8bJ27E3O0byIsrzOmTPnRPl7bDkb
XH8eidkRI14CQok3DHQybnkyBI8mRodqlIhxMeAelfS9X0ycyDHL7tkZQTaMzJrHxYcdaLJ3Wq2j
OqCfco4zNlzzCLFmnKR7fgBPkOTlyw7LkSOUhw5c/dMvPptuy1O54aPiIe1nUq2FlPjTYqGUxZay
x2iYNCzUEKtk4gepmeBnkysqsOCKQIsaL5GB/NyRTsrNkblHDS4WTksmK0bvAqFgBQgjkE8EqUuP
Jd0ZKNlpXuAymmDAJLIBFu2aEjcCQbhj9kERk3blbb4PyNx3TZMWTHimwlwIp8hGh3JDDBueIyxy
zSRrlQsSVYFKMIoGZaXKkWo8xTQyb0Mt+0TAnzfb5Lh5UdY5bAyc+/l5KlmWNMifvfDDFXVUjjSy
ywJbIZsWEjtSGYyHxDuhVaVeZx4wMcRESRKwESBWiFZMP2QP+TLBN+LnZ28ZrxfHy34G85keLDLs
mNMuRjEQw/EcqKfesbDwTiNlZC+dQftanx43D4IFXy4qjzgKTLU0xjkMMyIkrpKEjeM5KeYv5AWj
RqyRjoIcV5aVFRbVNkxxVJLJIYseSUx4kUIeZjQVnafFyIAzVjtyL81e9PC9/wAaey4c5o48hU4z
ihiPS4z1JhMWTCkVmx5DXgluY5RVnFX71gHZRa5BoG9A85UEoc2Gso74Mde1GN6T9VrxCo18cYFT
j7OtY8ZczEgzxiRcTJ7xDIEebcljZ9xeSv8AlSlcJaSFEBZFoSO1R5IlyNxw48fF2KefwZygbnND
+Vi7f8RbGjjwsVcloo8aLE+V7XPHmTZGdlvNO67zmrjYWyYTyAo9ghdMgLjjLxVyEjRI4+tbgTLu
OBF99GwDeWUY5yozDsyZMuNscGPGXgCHdJUVpldr1kxCZIshu6aJJkLNFUOd5i+Sgqd5DG6qkbR9
isVaFmCl3lJAvXIDc9zLFULUpWNXdpW2/b6xsOeNsPKZ3knmkHHJFHMgin26pcNJxFvSRyq8cgyc
WLJVZ8jApHSRbVl7dBk0Js/apYMmHITM27Hy6XLysAyYsOQHyLlZJsUtHBkoHmxqSRJF9H29A8XI
jJ2+Ccls7BXHzoJ16+oyI1T7ZE5Jz8VI5MGao5cuCoMyCc24raSwxSiTAyYWG4Ix/DFhk5WOUycW
cttyKWSUGKVxUkmMzLHlJS5q3VlYHglxYJQMfKiL5S2ECuFlmiWOeOTU+hemiiceBkoyyR1HNG4r
npy9O2h66X061z19zVtTw305aE6W1N9bcFuG1W5mjz4do/2Zv/d4vfjtoPTvqeVNmYqFs2Mk5eb5
mx8qR4doiDLg4q0+TiwB98gqSfcZ0XbMuUpteKrRwxRDgtVvoLUPrCqngmxopwsIUcEuXjw025yy
VJJLLUW3ZEkcG2Y0VKAo9O2ryRpWRvEEbMu7ZzY2y4sQVFRWZUWfOxMbHk+S4eXDBuOcmds+7x7l
DPlwY4l3KaVJ8/GSSCbPlpMXKjMuVJO+N8dUsFx8aPL+Rxq+Pse4Z8uLteJgRb5uGZHPIodNzxDD
KSZFrZM7uSNypikFlyIwJt4hNeLKylx4MSFMvxGVWQOcuMlGiJyJFxo5Nwmlkw8dVTEeOSu2wjKi
pkDoYwgR2IkJWmQyEIO0QRB1xCW8cqAuqnFTHyIMmHIw2TMQiTNjiVt1UxYeUqHLzFmhO1S7jl7j
hT4s7bW/km27MiHxGLcJG3D4pnY8uTssIixs+WSZp2jZZY3Frho1rtKkSyLQyiCuSDS5FecGhKK8
3PzV5hW4ZJAw4kkOOI/Mh72XkJnKIJyZBFJKq7Tjds0KFAxVcGaWWHvssYkB77gdql8lbyZeQlSb
ui006ZNCZi0LkzKYJcj9xxDHtGT+VukwWJIdvxcpJcDGxI8zZd3myNx2yDAwsXKlnYy9jdyMHhR6
fCSjHlxEZkiVHnKaXLFDIBozLUuTUOKAfZpFRZ8x5Cqs5jxEQPPYLJ9kwtXxGKGWbcMeKfEbGfyR
YioogjFLGq6WrlVqtVtLa20Kg0I0owxmvxLk4korskWg4rb9mz8uLNxcnDn7qvV6QhqcgA0nNo+a
wR9zubmpheGxqIGJMlLxKxVpUCPnZTQmOMSIFVQDGhTzymPac2Wt5wJcGLbMub8ZViXc8jHXKx9t
+KTYkOFsuPJJm42PiQwblgTwblnJl7rmJm5eFt2LDhQZO94GMX3vJynTankZOwVZaaf8fI71lzJZ
zNMXVRl5QWPMLTZGEO2Em1PKspwNl3Fp8TYI4h4Ikjl3yNFOV31J5DUKkm9Fga3GeOOCTfs11wfy
JXLNjSRwQyxyd0Ub97JArsscbzM+PHFFGrNMLKu5boWKoWLN4wSznbtuN9q2+b8zOxokjHavpzYb
xvnwwbrAsK7NjbLvO45rwy+dXxcjDbHy4ckUQCH2xoZcfckcsqus2BkY0kG44uZTY2RiCOTnBlRZ
AkxmQw5QZuM6H0zVqOhrqMjCgyAVzcJMbcIZiCCOC3D01nxMfIH4ORASqM8eTkwvDnY81e/FepIo
5Q+2tE0e5SoxgwctTBnQBNyS5xcaamXMiIOLI7PlR1GmK1CXKjqPKglJ4CAwlwo3JGVAA0Dqi5Ea
x5aPXI8V+A869miRwkTxrG8hZZEara24D6XPQ1bW1Gr8jyrrxG+ltDqatXTS1CraX4bchXI1y4Ca
2j9eb/3OK3H19aTKiQjdcdjkuWgixs2WNsHzBceFVabGgE+7Y8IG4Zc5bBz8iWHZ4UMWLjwj6q3A
PSHpHitoNZZ4oVk3RS2TmSMY8aZ6XalYJDDEPTvwT5+Fj0d1y8kjZ8mcxYOHCNGZVG47pgLGRkxS
bV8YzsVt/wAFhJE80ZxMbb8mHH+OhjFiYmMMveURhtWXnMkOHhx5XyFO6PaNx3BsXBxcNFNmaeEJ
Lm7S8+77lgHLy4pMxPuikkFjjzNBLj50DRLm5Erx4BdkEUS3JqSZ2aHHC034/cYFV9zzY5J1wZHW
bFiGRLHkqUjWWKaVI2kKsqzRlUR5KWG1NDG1S4EDUuHKtJiqrdgBUFS8ANY8UuPLkFEWXGCs+OqV
jYUTpHnnAkyNxzc8xwHH29cRMV8PDCwDbsdq+O48ePDus0eNiQYy5mJNs2NhZu9Q92FtW35Ek24Q
ybbUWYsiKyOCtdlGMV94ryyivygKGSrUJaacATTeWXCBEKpaJFsB0mBCCUrLFloirOtJIrLj5cc8
2LM5kP8AkQRHHhfdHanmyGAmKiQiQLBEKlco0e7SQnBnGY008EDfkrJFBnXkyJMjcF2/Mix4Ruce
buZQWlgXcsmHZYsB9w2Xds7Kk2rGxsGDJeaWR3hYSxOGRWD4kTFsVwS+THQyp5Ww8dYkBvWRlJEJ
ZXkaHFeSu+KEPKWJcmkWR40xzbaop4cjfMzc4MHBEsjdtqIoji9q8wvb0QL12QLk4bxHH+ZujQ95
FCSu+oyAH6lqSozY/agBF7VJzggj7nJuZj/jdObwXglSSY4+FuEgi2RmEG2YkNFAE88MabqzbgkO
z7jhY20bZjzRPHHhzrPjtHk7w2Nlbx8uyMmKPct2FPvG4xCL5PKBkZOdlyYXxprQYuNBHIVYSNIE
Z1NI6sZMiIPJmqtS55rNmlmOP3tKoCCbIQtjfHWKbfG+Ks2/4Hilyt2lWLDRKaYCo4iTyFHow55e
DFlxZexw+GF58aacQTVif4TGxuzhj+oochGABFwBuW5lzGhYntiVmaRtv2+sSFseTCy37J8mSfiP
Hl4UWSN7l3SbHXEydox4c/ccfNimSVcnBWUw5zI45iszBgzEbIz9pfFzcfKTN23HywmXm7dKYsXN
EqTRGDMBqSGOUd0+JUciSL9VPiwzp4MvDEG5Ru6srDiFcuD3NiH2+JjlYcveM2fHaLPxpa5Ecbxo
4k21o6TcMjHpJMXLR9u7T+Tl4pD4mWxgyoXeXHciCZEaSMVH5VVcywR0ccDxpJX4qoZS6VHEkRjm
mLLLE/CfQMMTMUlUyP4mV0ccB6Xq3GRryHB7Crmhpyq1c9T1r29Ac6Ao0ODnVzXOudX4CdNo/wBm
b/3Ppi6AzZUMQk3B0aR8yVlwJ5JI8FIhDjxQiSaOISbvhIf3KSZVi3fKWHZ1VY8PHSuQ/o1vqCQo
m3XHjqfLy3VCZaj2+Z1iw4IzpbW9X4bcDuiLkb5ixmP91zax9nx4iFAq1WqaeKBG35Jyu17lnqib
TtkMk0+YXiRBLEc3GnxWw8n4tuYgyszdocZmxtw3FoodvwY8v5GvdFs+47g2Lg4uItqtVq3GN3xZ
VklyV+M4gaeKPybtidpUFlIVa2bIRpo2pGhKEximdFEX2RFi5y92ZXGbl5S48sEdS5bRhs3KYTSy
uMLPEantlBVUWJl7hawNHnRU0fICaCiirAyIUpHYDthenw4XGWMfDrJ3LKesTbu4xeCFJs2OJcKB
1J7RR50uSykzJKMXOx44tzf9xkWFAsg8W+7vskuauz7djR428j8PMVMkQJkRubXq1dtFebRIaEZF
Zks8SQnvOHC0p8YMypVq+22btsythw5plmAkp45QI4XasOHxqptW8TN3gFU8wI7yzOypSLJWTIb4
2xq6pEsaumMhkDmVI4xMckAxTKwX7Gl3LLkh2YJix7tlxJDFCqpnYUWXkP8AHcBK3zbcnLmwNiH4
0uzMlS4+ZCTMor7GrtEWTH+jIzBGE7naLEVRNM1wHcx4bNSQRR0AKAvXxhY1iz/E2MvjEzUdDRrp
o0iIGzeay5DUrM0jTsjLOpoEGrcAoChUkH5E/wAbwtyjw97wJYWeCNxLiFa7XUzT0H70c/bGxIjF
OfuvQoqWhAEaCp/0RC9SNYJADlxFVR83HSnzpSjSZMi+MxJt2dGc5u0Jj5j47vO+RpIiWaBCyQpW
4BjFFtT9+LA2OJMh2hGRapciR6eRin5Sxh80mjLM1GNmoxUYe6oseJEyckKNv2jCy4Nt2hMbHzdz
GPKhxoCuWJaRJpBHHHHXlXuJuLykylLeZpKvIG3GJXixcp41OXHOsUjO6ygJEn3BCZibDctxaQiP
n9sKO7Stt+32rZdvnOfuGNCoAVF9aeGOeLO2zLx8rD/IgiCyQNBlJKJoIp1vlYBhminSmVXXM2ua
CsLdWKFYMqKfbZ8J8XdFlEmD2GDMmx2R0kR8Ud8eT91/TP0E2PDMgwMjHaLciHWRH9MW0vykgjlM
+2ACOTPxRj7jBKoIIr34XVJFn2sUc3KwjBk4+Ss2Djynw52NRz8eQnFBo5ckRGPjTlkyowGhdvLk
wLHkwyHg61LDFKDC6xsY1dZJe8Tx0CCPSvXIhokags0YSZnIZTrauXEehOnt7UTQr2oegK9z6IGp
o8FqvV6610omr8zXOtn/ANmb/wBz1PercDzQx0dwxBWbNmY0Ibd8hX215lj23ERUghSmZVEu54sN
NvAmqT9ymeLapVaPbcZHCqBY+tbi6624x61tB9DLnY0VS5+Y5kaF5I8XKeo9sgUKqoPSvV+CfNxc
en3jJySu0T5NQY0MAvQOmRuOJjh9w3LLKbIllzsLGTx52Uh/b8UBNwy0x9tx4lAAHzDHxyzTENDn
7fj/AB7ZcdN6XB2PbsysXBxcRdbU7pGvlycw4GJBjRb5BHJhx4ksD5OPPMMhXilexqN2jfAyhkQR
xMRN5QqpKVd1x41ypsuX9kCsNtJHiOOSjMzfbQYFwCTiR+CEyFAIwDFKDQtQagaOs+VHE7QSdx3O
aKfJ3TEggm3rOWLyS5gx8ZYk2uWGBcvesCB8HPx8iYxZZOanmyvETI90KiM0AooWFJvkoxcHcsf8
x8rGRdo26SN962jETExSsm34u35Mmfm40+31FkxyA86IoiitTQrImJBIuUjDFXFlUor3oWpyAmNJ
NeSJ7wSBScaOQR9kKwyrjoXnlhnxjkwx4LNGu1whkxYEqXDgkH4EAUbTCjoHikcgK+N46mhaKLl2
/imQMFBSSamyxcbhEKfNeYwbzucEeDmtj52bvOFFBjIi4+4KqM9jUhqaOJ6mwYDQxm8+Rl8o4nlZ
fHjLFI8jjGVyqqg0tSCtpxsoj5RLvgjwonjpudGibAugp3LtFkKSchVHeWoRu1eJRQhuRBHXayUp
lRg8jiKaV6Vgw0FA2o5KwZOybvi5GFvWfBNEaIogGsjBVqjDxxPJcQ86kls0nUGlNJyU8zytN+iF
O2OT9OKq3fDhd1kxVlIijjZpO5JsiA40BkGTmduPHLIDFHkhPDlEyIfDEO4L2rJMyySmaFKky47T
Z6ipNyW5yZ5KMc7UMdaCAV22o9q1cOQR59wjlxZdox8bcpNr2mPGrNznxqzQZ5UxFkWKCGEFhRen
u4jiKiSUKPEJGmdUjZvBiu7OWhmidoy1Y7NKuEY2EoEld1l3LczJUSNVggkdpnwNuCjCgMM2HmSB
ZppJj9A8UbtNAktPGySNeIw5LInJllwnifGzklOmbtyzhHmhaLNW+XtsUxglysQh8TcEkizMBsPc
IMsSxRzKfyMQwzxTr6h4rVb0GRHDbY6MdwngaPIgl4OXpMocSYGO5WLcMSot0iLAhxccbKGE+1wM
4yc/DaDNxsipIo5Q23MlDLyYRbAyS0uVjgS4uQPwmjaQE0jOCctY6Rlca+5FS46PUsUikyS0chEC
sri2h4eWh15V4I+9zOp8wBDKwq1W5nkLEI2fjKYsiCWjXLTpR4elctfajoaPoc6A5i1GuvGdParc
G0f7M3/u+hbgtpJPDHR3XCB/OklWNd3npNtkCQbXiQgRpTOqiTcMSOn3UFPJueQse15L1+3QsERE
Fe2nOuvqW/p8uRDCH3FnLzymoISQm2FhFjwwj12dEGRvUEYI3fPOPsuPHSqqDR5I4xPu8MYRtz3E
GHa8NRl58wfb4yFzI0C4mVkLDjRQCsvOgxQZ9z3RodhwI03rb5Nsz9nx4crOx/hmPA+HhY2FFwZO
bHAUw5J3nzIMek3PcMw5i5YkyMvNlxIYMpcXdsAPGvUix2jMMEuLlMigisvNDgbe6OmOkEkk0ipj
S5DRsscpgwpZJUxscDxYoKiJQJENCRC0SoAO00T2131d+53Ns7Pm74O0xT4sOQJtwg26guRmpIYo
qWGSUY7s0Jjy8gx5v4kaYBnMOHDBSTZcbNmwwvjbrM2SmTG0MuTjdsWPLJHNgTyK+1ZCjwTRvDFI
z4m5SYS7hmpmYOLjxRQbms679v8AtOTk4+2bMVxMmQ4OSsyvoaIpFIyZcgCsaUIq5Isk1M6OseBC
XnSVpIYolUBsdleMgJFKHElfitJXgUr47BkJVQbG9SOFqJLjJMxcQ90md5AhkACysyQGSQPADRxI
RUeFD2iDHgLZ0IqGdJI4Z45DFn5MS5AOVJJHuMdSZssZObC1SSl6MnaIoWmYssKxYrOVRVGltAKW
toyYmxfk2ZjpixSKzk1LIxJkZqJa3bclRQApUUB3JoAsxRSvMFT3BlIrtBpSUNwCspFLMLAg1fkU
EuTtuKkcbpyOtqKqwyMEmu8wIJebG6g1GCWEgdANGQuTzqViGwo7t+MvkkKKZ2N25LB2hJJh44sN
I6UIoM8d2y3jyHz1p9xtU26Rl23JmozZklfj5DlcKMUsKJVqNhXljqWZ4lbubEwWieKAS4hgSPIG
1fG82VdpwcfFO7TmGN2kmKxItE2otT86/wAcI/KQ15zJTHmoNNlRxB5ZcySPEjjjmCgBGWV4lQo5
WWKZXXc9y8rQJclVjqWVp327bwtbFhsc3PxokKqqDU+sdJI0lSRGgp4JMesbJBWOVZRk4cORSZU+
GyOrqanx4shJI3xaieaIXxs5J8Z43jzpYBPtsE9Y24SRm9TYgJTLKH29I/QkBqn2xHdsjMxagzsa
dOXrTYmNOG23Jx6i3B1WGeKcVeuVe2psRPtmNK3fuGI0Gbjzsb1Nt8ErGLcMSmycOelE0ax5gNNj
Y8tGHJRe6NmE0yBciJjy1OjorhsNCjJkKFyQqJKsgPAfRZVYNjik80arkIzB0JhVWyPkO6YOQn7d
jgSbcVEW4TQNFLHKpNe519q5104zodb1y4LkUdPcmr8NtCKtoK6abRbyZv8A3PSfIijJ3PHDHKkl
ZIc+V4sRohFgwRHtWiwAfNxI6/dI5G/J3WaSPAy2MO040dJjwxjjGg+ut6tvVeSOMPucF3nzpajM
MhXAypmiwMeI/QZG4YeOp3PNyGTaJ5zj4OLj8E+ZBAMnfHYjCzpTEdvxj+Pn5Y8G24YOVn5ATaY5
HjijjFZWZj4qyZ2buLYmxwRMAAK+X7SM3ChgCpsW4fnYWrMqLJmS5IOXh4Ldu5Z0ePtOJEoAAydu
M8mFtMWIm/bxkxvKveNyxTjzsO4KSpwNxV8ZBlZUcCRRrO5EQkkFRyyCoX7mimTvL8pJgoksVEoK
xreWVwlRzzqE3AhnzYwN13+bGpN93phh/LZicoQSwbfuX/I8GTONxk2yEY8UuUcjBmgqHyIRDHKW
CRwwkxySZ+HHRy8Z6x8aOaOS4adEZWnnZcZSaw8mTFix90SVlkjNEAhsRO/P8ztj5naY9ymStvmU
ZWXNHDj7Bt+YMXethYZG5bbHDjY+b5FB7haoiFl3CaQ5kWQ4pMykyhS5Qpcq1DIDUJ1K5mTJMqC6
QSKsiZKdxyYAuPNNkSO2VBltmpRzJbjMNeSCRlyQ0jlO27CMZUbDJ26Sd5IyEWRcaJT3HFlEzJAU
pkf8jNZXTGeYKgWRSvhUZsPlLAiUCeWbFgesuSOMQwtK3axqPHSOgKtwXFBlpawdph8HyHY3SbHj
Ck9JkkR0iYAxGvFRCgFWVS1GoFCrHmE5OQnMG1BgwK8whrsAox0F7XjIEnOppHjyNryzkLN5Qx5U
eAC1TY0cqz4ksBVyYUPcZZRGuK1oqAvR5BQC0ovLguFc5MIpsmANPPFMRNCtSbgrQJlxQq+6i77n
evzMiSic6Svxp2oYKUuLCtBAKIAougDZCLUrypHhA5qYb+bLfFlyM3Jmgz5GGQ8k2BLl5A+M5qRY
WyYmNHFNjKu5TpJly5aBny41ps5ryZs11kBEs6WaQFoYFFTP2OV7KkVXZNvxJKGK2LL2qI1aJ6mi
71U2qWFQhylmb8Z0eONVSeYzvt229oxEbGmwsx+yaZ5m+lIBDxSYxfFhyispaWLKuzqrq2PPhHGy
4slaIBEuHNA6+DMr8mXGrwIFjidCk8GWpTK2+sbLhyleNJAUnxjDMky+jb0D6ZAIyMGGc+PNw0x9
wjkZWVhVuA8HTQ6PFG4bb3SknyIHjzonq2vtodJsOCUqmZiCDNil0lhjlVttZC088awtCQmVKo7s
eYfhKlMJVMcjO/5HaylWBHCYoyzYhu7PjCHJMoVlYVl7nDjl98nuu/ZQOJvuE1HP2dhHDjzRyY8s
fDemVTU+MxVE3n8fCzZ3zAb0amx4p1fDysNsbcEkoVarUdeeltOh4LcIFW056+2hFDQ6HXka60BX
vW0D/Jm/9zh5U+REpfdI0eKeSdhjZMkyYKxtHixI2kmXjxV+4o7S5O5NLDiZhZdrgaRcaBAABVvT
5eoP6USAJtwxoic3LmEjwWjhzJBBtMKUkaRjS1e3AKvxPJHEs+944Yx71njH2bDiZURBozKon3SC
Mz5uXPTY8CVGcp6G1RuTm4eOhTNzUgwIISABpNkQwDIz8id4tq7yqqq6WpgGG7becHM2jcf2/OBB
FZu6Y+IfyMjOMe35WQuPh4+NFqORMsfbPNs7ZGbnYuRn7hiDIhVWV+1Yzt+X4ciJg1LDBWZFJG5e
ZkJMMnYIx5MryKGDfgh2WIos6IJJN+x48hJFlUOQD3PW8zGJewoy4+XkSbf8fyMtvwtvjSeN4nl3
XLyIvwvHIuG00MqZPZjSPJTwR4saSZEs8MMc1TQxMkWG0haLOxmjaZlxshjkNDiytPCcZ1lSRIcP
NaUfnYz4eZM7Ai06yvJk4jxvkoI2x5fyIWLX2zOxmxt3xf3mty2Z5sTZdvw2fcvHt2ZHMhHK06tJ
IsZFBDRjoKRQMgoSyClymFDLpcla86NRkR6iaFQMo2EiqqzkgNGApW8kUTgRRsAkjLHjOalxccn8
dWH4OKxycCMo+FLGsWXJG0UzGA97FIvBJO+Qs2NKjhi7pNtyTM0QgXsCLnZXiSONppEh7VVAKPIC
QW8sdGRaDdzFnotyhDOIge3A3GNoPke9RNJjt3A1kH72ZgLk1N2RQF2NY0gmhZSpjUu2dKI0Bscd
xPARYxKWbLmWOhkEl0Xxq5UsFYYygR+wAbM+O4eNBg/IIonwXYIBkIaDBgXUGuehUMs+KQgkEKGQ
scU/4B0FkDdUHOSO0+RJJG5myyA2Yx8eWa/FmahhLQwoRS48S0I1FWWi6KTKlhkdzZOS+LUquuJt
bJmDHTJfPy4k3PKyZ1tMrRR/h5UuLt3xvLSHb9hxp6TasPbcuYRxSyZmMI/3t4ElzcieSR8hzNKq
oZ7iB8h3fIMWRKjMZJgojRHP5EsbtF5WbFkaNYzeNB3O92ZRNSxKmk48RzNyYyNGMhI5CUypJJH2
3be07NgzfnZ2JDSqiL9AfQmxmV3GNuCSGXEEOX2qrBhk4STtFmSRyDnRrJw45wmVJCzYbo4ljyHl
KuGknw1kxoZzBuDqQQRLio5TJeNgQRXvraj9LbSaCKZBgzwsu5tGUlikGl6Oo4OurxRvU2CtyuRE
Y8twkM8co5iuuttOdTY0E4MWXiiHPSQ3BB6TYMErvDn4qrNj9yZE61HlQSU8MbUkDxhVUukk4IyY
CJd5xozHvWOximimGhANSwJII8VYFyt0lCySMza9xFRZckZh3rLBi3PGmq4Iq+nSmcRrt25ZuLmy
MZd609+0NG2GJhFJkY9bbt/5EG7bj/yYt4yUqLeomMeTBKKvR0vROl+O+nuNbV14OtX068B0tptP
+zN/7tFlFNkxAybooeGabIZsHIllgwUgK48ato80SVLuCRsmXLJLLjzzTHa4XkTFgSgAv9At6t/p
5czGgqTccmSpmJEPc5j2yaQxYsEP0M2fjQs26Z2aY9nmneDCxccay5EMVSbhI6yOZaEsdR4OdOiY
WBhI2bJKseDPPUOPFBq8kcYn3cvKm2SzOiLGnBarVv8AtxzcKXKUHafkmLBsz5O7biuD8exscRxp
EnFuAc4hx558mXZ9rxS+QJjuWJLCCe9elbNmGaKGdlpchJBJFG9TyACeBGOCZVZHVGdYruDGm67k
kyEGtsnyIWiz3NHcsiOPFxp90mO2NuWTL9gGHGIc04eMwSbKkyYFhSFlkjGTIIYcmONZsxUhhMgZ
YYZJIkEIXIkx3SfH7ZxJOmFLLEPyucMmK+Rk4aFMfBx2BYXeFGaTFikd8eVBAJgMqNScnCKR4CMk
0jAvYEbXnxwy5GdB4tj+PDDG/bKiSZu3QR7Zt2JmZGXumJLtoxBlZUZn8bCeI0ChqwrtrtFFBXjF
FDVpRXfMKE8ooZRFLmV+Zehl1+WDUU4RlyRS5agflyLS5DmhOLnIAoML5WOsjpHKyyRLKJ8SVnUZ
gYIoZI0SmtX6pJ5RGJpWmlxcbxx2sKkYEOyBO81eWu0UsZNeJ6iSzoLtjwRRQfIdsxfy4FHaayP9
uSljAnc2bN5JKxZvFLkITUQEaTSGSSsSbxyTx80tFHLIXesKUOjL2sEIix/9YHKZJDkfFp90aDem
z7zL3RvOWPfXnQVAzeLzRmhIhJcdwvWZieYEMpxz/wAdFBBfyzHmyip474+T2pKrRqBJGCJQT/kN
CdmkynmxEjVpcbbp1zp893x8zcYRFg7eXxMPBw2kn/4+65mZNOXyIJJoMfaM2PE274soUbBjxzS4
sGPJlJEkOPvm1Twfy4Y277lvs+9NjmTHgmlkeoJEjGRlGWu12rsgAgw4EYi0srqRIkSLAyiWJDJN
JMppXsCrygyIJHkJEMISgwVaZlRdw3Jskxx9pQfjmbtnWKF8ZNsyQz4OXIYZZnlPo39Q6X1yMVZi
swczYkuO8MrI0UySiREkUJPg1BPHOlSxRyr48nAZWxc6KSGfGjxsjsUY8cpeSLJA8+AYMyKYsqsD
DNjVBlRT/XsquJNrWvys3HMOZjzrwdOLlq8aOJcEBSPDULzhhnRXlmSJMnfMaJX+R5XfjfJTWNkx
ZUdc6mx4Z1/EycYRbgoKsrLTxxyLkYEsEUG4Lkv+85MM+Nv2LKY8rFlP46A71kyxy4O2QOkm24b1
Nj5mE2HuySgMrDrU+RDjrnbsHDuWN9LV2NRUjQE0HrC3VoKxcjHyozyoSRksLLnZ205GHi5AxKyM
rHfcRl4xr8rHNCZDQlTtgUofJjGTc90lasmKWNr0BegzoYtxyYqg3oVFlQTUeO1WoUatVtLUKtqT
XtfT319qtVtCa2g/5Nw3E/uUUmRM64E7vDhJGyQRobcrWp8iCOjuEF58rKV3x8yYpti9y4kAIAHp
j0AOH39QesNR68k0UStu8LNJLnPQGG7R4mfOINuxoaAt9BJLHEs++Y6nxbzmrj7LhxFEVF0JAEu4
wIZJsyUCaFDjxZ2UYdmFpMvb8JPyNwyxDtsYpI0jWhRNqy95CkbXnZbxQRQrwChpJLFGN036HCTc
fJJJ8IixMiUAAejDuEHduez5Wcj/AB8bdiZEAkjnibHmYWOHkNjzwypLGhsQt6kklkyLY8MIihZ4
wVbJyseITTZ0gxvxIzmYON+Z/kiO27r+FS4udulRLGqSytishhwIHzc/cmj2TxvPChkysXsAiK0j
yQnz+NcbGxyuNjzYoXHhyxjYmSGhx2vLjLJAMGSBcXZZYZpG8VRpi5LDP8NT7nK2NDvxNRbpF5H3
EJlHOkdhLkyp3xNFkjyu6BlGMwpI0YrjOrK0omxMyHx/IY23OPbMcSYUuDjwNm7HHmrtu2xYI3T4
pLmSrs8MWDHg7h+Zue1Lg42A2XmS5b5GAYcppl/LiuJ4jXchqwI7a7RXYK8S14RXiNdkgq0wN5xX
knFeeahkyCvynr8s1+XahnEV+4Gv3A0dxNhuJo7iRUu5sxTlFus/3YGMWblRqV+0IZe0eNKZFaip
FAItOXBjdg0ZLTL1g3XMhg3DeZ87PxiTEam5zB1mXIcY8B61esWQSxZ8vbqDasWUSw5816JoVDIY
5GQSmcjx4xPYDQdfzdklhfbt/kj/AAnH2KilTGpoqVIZlBbmXN+4LSsGAFZ+IkqQITFNJ3LjJeUC
9LyHXGXAxZlGBhrS42OKSNA2dm4+HDg4XmyMieHdsrMd8NMcQ4+Dtq5Mci7ZlbhmZu15ORNJsEkE
GFsuIYl2+BpvBHCZZIPHj7vjRR5W7SvlZOdFI+8by5CLesPCecY+3NjrEWiIdJKly4+wzrY5KCvy
1o5a1+YKbKVgZkYvJGwjmiQDIShMppZRRigdrhhcUDTOqrum4tOY4yxeyqXklbEwXx62/EbAyM7b
ZNt3nB+QY5k9S3pX4bVNBHMiyyYzTYvjrtN8fOWSvbIwizQ5v36T4Fmxs/yNkYEczS/k4rxZWNnI
8GTi0qwMIsmSJI5Y5VnxY5qXJmx6Vgy+vfgOh9KfBhlbtzMNYM2KWgQw9C3AQGEu3g1PPJCuTuC5
AkxnJKMKCVg5s2G2DucGWOlHpUsMUqtgzwsNw8RR0kW5vu2DJizzOZHDUsrKcLfoDDnZa5Obj+OW
AQdlRob5O2LO0OdlYMmRvsKQ5WbPlOsjCvICBSqTSQk1Di9yzYdqkgKkgjS9qjmeNpNxypUEjgrk
5BixQmOi7jjlp/x5c5tkxiYtngjZcSFaEUfZ8Z2yTcZPkmzZW35G07VNuWVv3djyXJOPis0L4xNN
AwoowoM6GDdsiOoN1x5Qrow4OVWojW1Wq1WrpoRqdLUOWgOpOl6NbR/tzI41zbV0pp4Uo50LNl5m
ZE6x7jNUm2LMyYOPHQVRr7+pbT3/AKGPoGYKJt2xImkyM+dCcaMxpnS1HtMVkRI1+gm3DGhY7nn5
oTZ5ZpMfDxsZdZsiCAPnzzDIyIY6XPyZXx9nypmg27Bw1m3KJR4dxzVx9txICABwZO8QwUuHuW4n
G2/CxRx5GdjYom3aWVs7NEhHx/ddyb+I4kmNtGy4m0xehk7ljYxWLccuTHxoMZs/MyZJcjDy90xp
caXGG8YvkjB5VsebcKxIeRzHebIikgUw9seLBPuE7wrKsdEtIykkTCsoq7MTf49uLTJu28SY2QsO
5q6YStLH5FT8dpqJwseslzPT4CSpkvBAuDFjELBK9IO0BjYNehYDnUkaSpFGsSbzj5suRiYkizDB
jnhn2zOiqDbsg022J3/gwxhIoyYkhVWgUtN4iUYqfOhkVB5I52B7ArtmSR1GXIxswwLue4N2L8g2
hsfDzRPuLBETEePJM8EIjkw482FNrgwZd22KTcn2ja1wcfcdjy5cp9qx0wsTDy5MzdttTBxNrXNz
pdxeXbZMRczMhkz/AAyiSawzICfyIqE8JoPGR3JXKvtv2iu0UVFdgrsFeJaMYpY+TRXrcIDHKk6j
FCtk5CIsaGncKtu5kPe0q2YntM3+OJZnDyjvTGTnAbyYiK+QEVV3nGxoc+AHxmpec2KrebckY0TV
6HXGjEMW4R3F6vSi5gRYIs+KxvQPIc6xQywqzEYtuwDlNGXytgw94Y7jg5kAf9KL3KSaS7oygUFJ
JEYrx/eslpEsQTesqJjHjm6Yw5gABGLBXJRc5libcZAseVmSB59wCmCXcp4/jTwQ4WzYpix9oxGy
c3b8RIExoUTbfxsbL3jKTHhl3zHeHb82eKAykvPO8tSPIT4cjMEUb4y7nlxY6gktFGztiomPjvmg
VNuDOTPO1FJmrxGhEteNa7BRUUVSu1a7VrsFdlFWrulFDJdSMylygabKRBn7k+S0UZYhRGkjvkSY
O1XGNFl4c2N54cf5FhTYbYWRLFmY2VurQxuskenX6S3C6q62mwi8KyU0UeTUebNhujrIssEUoVsj
CqKVJUrJxYspFmyMAq0cqZW0KxxM+eNpsSGcuZ4pI2EjRZi9zKrqcaWF4cmOU+oeC3AdeXAeGbHi
nQYuTA8e481dG9Q1vE4Eb/qVmUrkI4MCuGhkQq7Kdl3cytV+XPR0R6kwHD/nvC3+HJi3PbpMGTqS
KSNrZWK+O+1yywYUU8Mte7+OJd1nXKkEF1MYFdi2MVWZKxFSSo8YCi3jHcslS44YS4dPjMKKMKNx
QJruNJKy00xkLMaSR1O2bm0rWrtPaqljjTbftO271HFKmXuLd2RM8rILvt+Rjw0+NDJUm3mpMBqk
wDTYrrRicVHPkQmDeZVpN2xWCZuM9d6GhzpvtAdG0FX0Ol6tVtOdX56W1FXq9E67R/szspDuE+dl
pKqbjM0m2rO8eDjR0EQHg9qtpagNLfQjTlwD6G3Db1ulTZ2LCGz8mUyIKjeZmXbJpjDiwQ/QWqWW
OFMjeo0Ig3jNTH2bCgpFVF0JAE+4wRB3z51bM26F2G45qwbNGAz42IrZs+QEwZZRDDHCvBl7vi41
CLc9zOHteJhihw2rI3bFgrIn3PJpZsYZEe0blmvi4WLiLVvQyczHxVOVuG5DH2vFg1k27GyZBjRQ
w5Ee8yZ8yAtnYpxsg1DK0MmHkLkRLagxAk3BXaPbMgqI4zHJt6GocAEzthYcYSfcZY9oGPW57dLG
sGRJjzPi5u8NibXnyPFEGMauDeeejjjtbyRnJzZ0MOM0Z/FWsODKiK9xLSIpEkbV5Fv5o1oZERry
xmiwALYshEC3TkvbGa8KFnjEgbGW4RQOSCV4i7NEiyIJEmKqqm4eOfsAKrDLYXqZO+LMjMOTsk/l
wbsyY2RJjnOzsjITDzMaGDOyfy3j7WjJL57ILYfZlVLhYqUcSHPhTbYMFsvYxmviYMeFDP8AFTLN
Pjg4uH8cnhzd9wTLBsW0MF3zBmGTteyxNhzxZf5mbtS4mFt8OflZG7Y82Am3xZeVDmZ34mRjbZkZ
EJ2ecV+yZFfsuRQ2TJNQbBgpBvUMWHJhGJgBUrFRJ2ksbCV/FESHj7Cz5kndJWG3fEzhDjG7x93c
sm5x4+RPnZO4Q3EV6ZiciUiGNSJ4ZFKPWFD5JcqS9RESxSoUe1YEPc2RJ3MhE8LqVbouHF5JGnCy
ZH2rhm6A07hc349lQzYO8zRJgnpjsO+RCHmcQQQy/wCSQtGSahXuVvvqJwqXrqCixnHU+OeS5UBR
jJ3tNNGpE0MkeKZUndu1cKRIcrK3bCgjxN6yMVYt4zVz87eJ85cXP3OOMLdsrNTDSTcdwlOFDMsJ
CimmQVkTJ3RS2rdtz8CsHkeOFycWFoQRIaEQvZRTMgpp0FGc+VJu+RJGeMNIcaRZBjzxyBXibzLA
zZaIXyIwSEy4yn5K15VNfaaZbVkiVqhXuYBY1Z5cqTbtr7Tsm15MGbueJDIIZjfdtrXIgn2zJxo8
bc993HF2TcUycb1TqfVkx5IG7Yc1SyyU+NkYBx8mOdWUMJsOWNoM1HNFVIkxZ8RsbMiyRPAk6PDl
bZWNmY+bFlbc6CLMikA/JxBBkxZCy48coEsuNSsrr6R+mZI2ptvKs2ZPC8M8U6dNPfi6jfISgJ5k
0pse248zRgSQyU2LIDtW6yI3CwDVJt690xlkjmh8c0aq5LKRmJOU2/e4sfGO84jrHn7iwzdxyrs7
MYT3I6URbS9WHdseYt8vC8zSQ5OFOjkgqDRxlNNggiTAqTEZaaNhR5aMauagyJIX/c8y7ZuU5i3b
ISKfNyZQ2TMVTFls69rY8cfY7WbbMkz49zoVU0+NCQ+3xtUu3OKfEIZsUivxZBQjyBSz5sdRZuSz
QpI81tb1bhNGuenWra3onTnpeto/2ZeLCuesSKfqbaX0Gl9Bwj6e3pCpJ4YQ25h2cZstLNAHTF3D
IrH2zEgoAAfQT7jBCfz9wz6j2UzSw4mNj8EuTBAG3CWap4wEO5kP+y7hmPBg4uOs+dFDV8/LEG2Q
JSqqDgyt0w8UX3TdKw9tx8UajVnVR+6q7zpkLTZeOtLseTlvBi4+OPRmmihV9zyMxsXZxZVVV1Ul
Tm7guPG28zhosjIyZt0wBPDbtYix2XNMUkufFjqIc3OfESLEKTRuJcSF6kxygztxZXg2yaVsXGnh
YdiCePKynycR4H2vK/DnxnyXBYCmYdsUlpVczyZucIjAqxAyARrK7wJJOB5J6yIpJlXbnxVcwRUs
2OwfIxFpFRlCSCrmIvkQ2/J7V/LnNDNkFR5kRoOpodoqaZCfDHHK0vYyZcqs4DKHHd5ZiIGcuVDP
p8mxRBl/FsisdrjJl8arKqs0hkZZLOvktdlL5Mzrj5BhORlmVMOeOKPOn/IaOaFo5MkSbkSoGC5k
ysq0cOEitBuLpi1iY0TQ5fZCy7Phio4Ulymw4excCLMXJ2+DDik3LHyt0SbHCmeInzQV5oa/IhFL
uIEW6Wz2wMM40ToFimfvYWoDufKcNJht3x/61Ykm4rb1PdMhD4ZudvZEy7i26TQyZSH/AB0hvkzy
d8kD9j7hFQBYxqMeCQ3qGTsfPi7gil2NseAm9QSFHz4bHtLUiDGhJu0knfjYP+sCpIr5Xx/ZMp5N
z2looW/SWsiWlXLm8kgPMHywAXpnEK9pWWXk8Uw7xzMi3WRzCmLs2XOk8bwBC6peWRThkmGXGgR8
kGWPIjRGbyTMzViYsjGPFWJvGafGDrLt5Zoo4EDzC0YEqJfv/GVZtz3FcOLyvO6IGaLHjjBKivOp
abcVQfnySHLN4ciImWPGyGysfAzxkY+xbnUXxvcXg/Yspopfiuc0T/F89pYvik7yn4vuK5Emx7sq
jClDBHVsTEaBG2+KRZ8LJipwCE/xSf5cuTbtr7W/b83DkwZJ1hdmczQrMsM7Bs/B74XxZ9nMORFl
R4mWso+it6eRjFykkeSA8mIZcOOSocxlasjDhyAuRkYjI6SLWVgJMYs54nvyy9ujySN0nxZp8bFz
kRcvbKEePmBMp42IDBsZkaHKWQ8B9I8FuG3pMqsJ8GNyHzsOos2CU2q2ho6npnYq5WPkRNFLQqDt
mSTHe7RMtBmQrlg1t+TkyxJuMIqbdMKBkzcN6OkkiQpFibnuEeXssZkyMKXHXapRj5m+4cQw9kxM
TJhh27DhO5bhFhQySNI9Y0gSQYoIfFApse1MpGgYqcTd8nGbL36HPZjDMVxcoSZ8Q2+JNzxzS5GJ
JRhgkqTb1YS7YRU2G0Y1BruFD/VFjvKcfDwxDmTxogjZzZkLDuraMmGAJJHINDXKhU8McsTqUKK8
skePEitDCaWONNL1bS1Wq1W0Ndaty4CdLcqtp72raP8AZm/9yh9P7DhHL6u3qWpmVBNuUcaiXMnE
n40IQbhOItqhBjjjjHr2qeeKBZN3UsmPuOUI9pwoyqqo0NgJ9wghDtn5CudvgqOfc8ox7NA0o/Ex
hJuDtT4c+RUeJjxjhy9xxsUeXddwrF2jGhYC1W4cjOxseP8AOzcp3hx8WlzZZxFsJmqOKOJPRd0R
cjcgax8DIyahhjgj4s3FbIXF2RhPv25DBgSRZo95wTBKFLiMrEdp/HliU0GQxkVkZcOIsk+ZudY8
MGOqrKjNck48XkkkfNlztrTKxpIWik2TMJJx5GV4vtjfHxlyM6eQphpFH5EEhMQLGIm8dpGfxxSZ
DnuDU+DJPXgmiKY/bM3hxmEUrUTkIFntSlHosqATIa74mowxkSyyQsclKjMmRUkTRSEEHEBXHqNG
Jhg7ATQGnyvG8uPss5x8+FzbIK9pglkMeBEghR1k7goJFyReWaKJcrejAH3bPleLOzoBDvGeTFvm
VaDe2kSJWFO88lY+TLFHPJI5xMueKKbIyDMN3i7BnvDkZXyMvDte4CFM+fHyg+NtkJeCMyri4xDY
WMsa4uCGxsHDOQyIE3DO/DkxZfMuVOzDt5SIVVW7ULEnGk7Jc0kRk8wCaP8Ahhd+7Fwf04mPJky5
u2bimNFj5QkUfbUZ/wAx510K2nhxMY+bIfuZzy6GI+aHExfG88ne9CktPDi4/a+RJ3vR/wBGCR4x
Ujhcv4xuuPLjbtuGOmKf0oS9Rp2QyXU3rBYEQR3kype+Yt3Rj7lVuwwMJalSwxsjKmm2uZZcH5DL
GziF3SWL8aJlnyRHhx2TGhWuzHcvjY6ouNJ3KipAj5QoZkqtDkTMTIA5mEdLIHRJVVWneSsjKigG
S7fkojSVjwJAJ86yuznKWDJXJTaIyr7dNG+07RgipcDEYSxYsVQz4SzLnYhI3jACvu0N/wB+wiZf
kGIrxfIMQyY++4jn91wJEzoosiXJOEseBuChMHIjmjEZJytuiyBuODPjNtUSBNh26Z8nN7LazQpM
qzSQHOxGeJXm26ts3Sd2xMoTL6NvpJ8dJ1SZoyY5cIlYMyI/mbc0GRDkoyq6vjTYpgyo5hU0MUyW
y9vaHIiyEmhjnSXEzdukw92gyayNtF8fcFkoxTY9Q5UcpngjmTvnxSkiSL9IeP24psHFlHhz8YRb
lExBDDi3naDPToytHBdMSKLyMoFZEYIcWNrVtWe+LkZGJHlR5UJimuVqDds2Ctuy/wA3G3SRVO4b
zLOMTbHnxo8ueAqYcipMqR8T47Mqy7pu6YiyyvK+u15coUwJIkuJYPj02OwLRkVaudLIyn47vGNi
Z3yLcMDPXAw9qEGZHFHPh5cmJkbtHuGPK28Sq2RuEWRAQRrY0aimBVctYlbOmcxQtOxhVQ+OGD4r
CmikUxzTRVBvUi1DuOLNQIarHS9Mkb0saJViaJA4b0K56nhGg0sK51araDTaP9mb/wBwejb17elb
1Rw29c2FSZkCE5mfOZo8Xvjed1TbJJGixceGr/QdKyN3wcdfz9wzGg2ZpBDjQwcEuRBCG3CaU5Ed
l/d8cGLD3XNOLtmHi0+ZjR0MnNnZdtjJVEWhpfXJ3LGx6ORu24tibLiQUFCi3AzKi5O9YcT23fcE
Ee3YAbJ3LMGNscQqNEjT0SQoyt5xoqXCzc4QY8eOlqtpbiVkK7hPtUku57jgebNlXMWbuglYc9oy
zBPG/cDNHEs+895wtikneeEoFQsEaPtl3GNDPkicQwJCkWRIK3zESZS5VtqzVzMGcLRORltFjRwi
OaUK73pXjvKQjxlpCMWZmTCQUsaLXKrDQ9pFc6NiHiQB/wBEMqSKVRqlKwCJUyI5sDDpI54JH7ck
4+NCzv5HrxTKMZXR0ljYKl65CiQazYhkY5DQ5GDN58ZIyCyyLPNKyPGJyI5SaXsemvff0yGgxJ7m
DZNwynh+OThf42paP48gaHbsCM/Z20OVXr7gY+5qc2HM1cX7078l2RsjGjWPHMrwRq7x+GNkmwmn
ePGtJI2esG7nyRwIBDldgGTIwpT5MfKbshJrFTvldVlif9WFH3yzOXe5EOEP8WzTxw5E08MMU2XF
O4NwekXOU6Yz9rysETneQ6Y7ESZDERnXGJEmQ/Yh63o/6Nv5xqOUkZbL2X4xAw3bYo8WE1i/9iSY
ibPW0gNY8nZLIyxRkm+M/fAL3KEVC/hlkKEA44rDwn/H3QHGIz7UM8MV3Chn0M9RUU6R0MxKOVH3
HJiavyUpZl7DIFBk7q8iUuUsYfPsMrdRErbqHEMZlaBFgWLGyswwfHpJ4dp2PGikfb8YPvuHEZ2x
FhrG3DIwjLm5LpPkzzAEUsth2P32m7may42Ky0Skas9SzRxrlb2bySSTNA7RuzNE2375DIrzx2y3
LQRDKxh8eXceydCknAyqy+ObGr5Bt8OfjY+4YuLi7ZkK0WPOJko8Z9S3oyRJKhaTELQENDOsyz7c
VfHz1kesnCjyCubJBKCCOomwWjfHzVejzrN2qNxBPl45LYW5ws2btlL+HnAT5GLSsjq+KUMeSPrb
ccuPDNRwpoCme0VRzRSjS3LTd9mEwUvj1iYUGVSxGNZ4QFyAoYmlgwsfGbcssptzwzrLtsTM+2EK
ozMYgzZOTt+Ak2aqhVzF7cgEg4+X5GmxMjGEvkL6AEnbNmEiZO7eJ4f3XGgj3y5BhnR4L00F6fG5
NjsKKMNO9hQnJoyIRAFnm3FkONO5uJu2mbuOLBFIsmPGhY30AuSLCGNBFt88cc0EqlWywDFKspbG
JD4SkPgEU2NKpSbKiK7tmKF3nJBG+Gv3w0d7epN2yWqLKmeaM3TQ0BoNfeudE1z4L1y0veuunOia
2j/Zm/8Ac4+nEPT9+MVbjHAPpZcrHhEm5Ts0wcrC2ICuFuOSIdvxo2AA4x6U2TjwCffkd/xNyznx
9pxYR01NgJdxx4mkbPnMh23GLbjuebUXx4ymOHFwo/3CN6WLLnMWLHG3Fl7ri4tB903I4+zYsTgA
DgmyseAS735nG37jksJtp22MS7luC42z48ZVVUell7jBjxiPcdzGLhw4w9Lc81Y8PB3rMiyJNxhy
Quys4vgvJveITQ+4KxVsDcppIUgfLfFgixhjTVMC6yY04PNTlToixyRYtCYMFQFTZhvm2thz/H8y
PGdMZstklaNJn8lFYKEcBoriK3lgB/KjWjmIK/NWjnqK/clobitHclr90UV+6xkruMbEZiGvy0oZ
MJCzRACVaLKQniUBq7+QSKz48LU7eJI5HZS16eJDSt467+4XrureYDj5nxrJ8mKhrJLR1HkIGM7S
CFg1d3aTdiyBhl7OrHAmTHhkngIQoVaaOMbhvMEUW0bplZU/dehRoAki9OpvksqZEk5LNlYrq+RF
ICqsAVhhggWKo2klWCJ4kx5FjmmyYVTcpsyPMiLeKYMZZn7pcJueU/dKTWOPHBiNds2PxzRL4cai
f8WIP8GBhSZk+f8AG5Hxo9ukxaItTfph/WdMaO57kmQqVaTrasZAiRSrkK6draYqBVEi5CEEG1Ef
4Nv/AECppCmX8d3vGycPeM+EYhHOE9uQ3WVRNi++NH3y5oJhvWG/bPEh8pmJynAekDGKHHlG57bm
ReLc8pMnKsjAwxtX40VHEWjiNX48wrw5VePMFFM2rZ1dubfszKEeRRhlowvTQ00H3na3NYitBHsW
x/myY+240AeTHiE26SsGz9wyZncyF4ZZGzoVMOLlMjZEn4+JC8wZTPaJC5N0Z076D9tM9Zm7RQ1P
kzZDBa5Ci16wIciR8fb4Vlw/jzrDh7FPjyth48sGPjphwyO0jcU+LDkLvewyYWXhpLjNi5RkOPkL
Omt+C30FuIgENHLiErHkrDk3bKw4clFly8BopoplkRJF8WTgHGyYslKyMWOelyJsdwysJ8VJqmhU
yjMmgORgcoN1AZsYq0OartLEkq/8jGMU0cy/R247624pESRDgBW/KyYWTIhc8+DO22DLQbZk4zRZ
AlTOuqyXJ1RmRsHMGRGnYTmPhwy5uI+HlyZUuPlJ8nmAyJzPLSntYb1mYeGDi5lT47xNj4uRktjY
0W2Pn7pk7jJGuDh4uRnZORUaB5MeAYtfisyGAXbGNNimnwgakwCC2M4oxuKe40DMK7mc6LLIASTo
aU2rb8kYc2W2PkumB4cSZrR7fNFkQhVWr3oqppoIjRxIzT7epqXbrB8RRQxENNh2poCC0YFYWDJN
IqhRXWrUa5Vyo6X1NtOlcqvR0vper1eutbR/szf+57esPT9zrb0Pf6W2kkiRg7pCzTHIdg+2xsIt
wmdNtjCKiIPoGdUGTvGFAhzd0yqh2R1MEEOPHrJPDEHz53aWAs0m6Y8Cqm554xdmx4VlyIMYfm5O
WItvkao4Y4hwlgoyt8woAg3Lcaw9qxsUDhlz8SI/nbhlt+0IKl3XExwse5bgcTacbHAAA9LKzIcW
Ly5m4jD2+LGq3p525LG2Fl4ePvm9QPkvt+d+Dm5OZtuft88eyYUM/wCTNFkwNjyw4jTFJ8TGONLG
8QNA15XI8slsrIeJII5hIpUL5EWmyIxW5bwuNAu45uZWFgSxzHImNGWY0WkNfdVq7BRQV2iu2u0V
2iu0V2iu0UVFdortFdgrxqa8KGhGBQEgoPMKWeYV+XMCua4oZ9qGetDPShmKa/LSvyhRyAa/IFvy
BTZBrfcczL8eyPHmx2s8AnTJxp4DhyzNkB5I2SdZaBFNVrV9pqXExpak2WJq/Y2A/jrAYWDFho79
oEgYGcgfkTivyGZDDK9S4KPTYSinw2pIpvDjxSpH4IQvbjLRy4UEu6RrW2QybnWbtX4hyDA9LYJk
gKl+eGAKzk7ZQCzSkCo2KyZeP5ZJnu1PyjxRaDYZ44Z5siKOOfPizMhql5Rw0aUFmyZPBBhTlJMi
OzPa8Sd75svZHizGOXKTlUMZd82URx4cvZLkpZ6P+jbx9i1LZszY9lxZMrKwceeIixjv+QeuO1ml
jMcuHH44oz5VPIoxUl1WJGIkiszqHBWZBuGwRQ/g/I8bDhxtqAzZMWSPKzBOPy3lkinlaaF3Z4yZ
VWvNHXljryR0XjruSi6XMkYoyR3LqaWzMJGK7JsMkzQYkWGuVv0s7c2JYWCgEsq00i2du4StCJZM
mV2xgsIlmVlRXSGi1ZOdDjjL3KbIIUmu0CmaooJZmwtlAqHa5DNuuwwI20ys7FF8vngEbTOw9CSK
OSt22xMuIPIxwsw5FQTLOlvrpsZkYGHNSOeTHYhXWbDlx2xs9ZTWRgBmgzyDTKGVoZcIY86TrJHH
KrLLhAYrCnbFzihzMArNiZoP5OGIZ4p1lxjS5XYfb+iTYSSFmysVcfNhnPB2JW+YzQFmvw7ezjJM
GSo3FZSVlkvli0nBOO7GrbYJMjHiWfGaMnPzMzI2nChZmY6YObIJY+4LiYP5L5+0RwCWZYXEkTEp
GabFiYvt6Md1xxBLoKtVqANWpULF1KmoULHx9zZ9lbJx8mOGCaSJoN4mSod0xpaUqw51c1ztRSNq
bGiJXFjNZWLjY0WMuFkhY1QWOnKrVbU31OlqtrYaWrlY110HOgK2j/Zmf9yhparaW9e3pjhGo4b8
R9AkKJ91xYS2Tm5K3xYTHLlSLFt0t4ceGBfoWmiQSbtIyvhbhlPibZjYrAAakhRJuGOp782enO3w
Ft1y5hFs2RkRY+Bi4yS7jGtSRZ2TUO3Y8YACjheSOMZO9QLKuLu24VjYEGOdAdHdUXI3bHjSNtwz
THgY2IMjex5U2/PzWxtux8cenPPFjxyZeVmDE2tInVVUcdtZp4YElyp8sfmPMMf4bitX7Ftfi3XZ
jtudtk8UJh2n8zCbKg78mJZalyzJRFjs2cCVkFCQCvOoqXOiiVMoyyNmyGjPM1Fms0pkD7cJIoto
dZUeKJDlwAflowkzkRhJkFYc5smTLmysMYYy81Js2aHIyMXLgxcE5GbLuP5mDNi4U2RjYs2VPkbl
DlYUe3Q5GdFk5c+NlTQ5cGLhZM+a+VnS4cq5EpiG4RmhmIKGXCaWeAgGItdasKtRUGuwV20VNdjV
Z6tJR8ooo8mJBI0GTjv3Irm8kjMe4SK8ZYr2gCQhFkvRlW/dcq4vJkyKTMSO2ct9wYxMw7EWOIRE
3jUd0YMkyd5lWxyVFNmLT7lGtPuaktmZLk/mvX4rNSY8YfYc3Hgj3rKxpcZ8GVdxANZVvxyTc/44
MtfJj4aXkY9zVGxbGN7ipeSwC0W07ek6fINlefEhwkxlawqY/wCLHUuCKxo7VlzGWUXvAwyIJFIa
FRFHNIZJL1iyeaBlKtCoijnlMkgNRET45FiQfxtvv4xep3KZXx7fWgmzN47YmNLcZLUCQ08XlfJb
sSJ+x8te2aO7NkgpiobnFkHj7bFsAHO2zKyYMfcju2VkJJmvknIaHIlkw8HKnTBwsrNxoPydxibK
k3GKXKbcVnyRuCzyrmw5TjcI5bZyyAZaziXKjl/KmV/3CNidyxZwk/x/ZWmEeQu2RZO55eXPFGq1
LmY0Z/JjIWQsWKmSaXyPuPlZpVJBAyIY1ye7ExEhWSbvMsyRLl7wzUWZ2C2ruAod7nD2iSWtv2pi
MWFcWaDI8NbqkmakkTZcWFmplxepvO1+SgZnbCzhkrFNHMh+kPpZGKJaSYSj/NglJEkXKwkmEOTk
YpjljlWfGhyVVsrBaKWKZayMQSmPKZDyNS4jxP3Y2cssOVjArDIIcyfHVoI5VjzDGXjimRY58WoZ
kmU8Z+ivwe+g4LXqbEhmXxZ2O0efE1XB1+TzMI+FGZGg3WTIx8pjMs0XY2Vzj4EbvxsLEfJm3fLj
WPGwhtuFiZ+xblWf8cx40eCVGgw5pmGx4wEG2Qrkw9q1t7FIcvOaZsnHjnj3DY3jSCaWRlXOipcx
xW5ba24K+xbgGnxp8dqw9obLhyNqkiEOMXafb54V26OLu3JF8wQ0lwNnj8ubNloMjK3jMyB3NcSG
la4iyp4jDvUgqHccaUjtYKrMWyIlMM+PI+Zl4+DJmZbznZYmFEVarVarcBvXOufDarcFqJ1FDptA
/wAmZ/3PoOdW4rUOM/U+02bjwA5+RMXVKSdGKYOfkVDgY0TfRTbjEhkl3PNjg2tfIkECHWWeKEDc
myC4ZaO6RwCRN1zaxtgjFA4eHH+bkSumBLI0eNBGeKWeKBZd6nkKbXn5LQYeLjJwT5WPjq++LNIN
uzctQcLb1n3yWSYbPk5mRj4ONjeozKi5W8PIYNrM7RQxxL6ebu0OPUuW0rQbZm55x8THxU03vbE3
HDnLRuu/puvxzEwdyXIGP2xbhimCU8wjmNsfchIhy5Gp5WAjiaZlW1XRabLhUuciVfyoFrNhnji2
jDXJSbByUy5sKPNwsOFNom3D47kZkuzxS5cW4bO2xrHss+44uNjZ2NvGX8cXxbDtUmXt297XlYeW
+wYkq7NtWW+bvuyrjYOPsCviDbNyTfc744gi2nas3O2/M2zO27P3H45NPHBu0SR5MaQuzZ2Q2FLn
4+Tm4GR5crGwnwNtw5JZNwaXCy5cfKgx8fLfJH5MYZZY20tVq7aIoEg52N4Mzacl3jDCgwqXFile
SKN1VUCyIjIDIWQyJLJI5KIbkxsJJH7/ADrX5C3bKWpMmJwc6Jabc4wDuZNNmZL15c1qMeQ4/GWl
x4lrtUUSBX5EVeSR6TE3GQbHsoVd4wcdcZc6DIcuqrMSyohaeRrtD98Xj8EIpRdkkAknTtkqXrEt
l2nPihj3recaLDiyzlC5rJNoMGpoSJcyTww1atvm7JZYO6XPm7QdMSXxSvEHfOmtTG+mBL2ySxHy
ZShcbbv0A2qYA5Xx7AxQ+VDHPjmh/wBp/wBV6x7+OVy7mste+HAj7pC4kdSVOC3egVg0+dJDmfGJ
Q2OQCIsXG7pNnwpZ59hwzI3x6GadvjIlnzfizOJvi00q5nxVjjt8Xy2iyviuS8OZ8VyRjz7BluJ/
jGY6x/FZJpsj4lEZIviaiWP4vjRSJ+NjpuOWcvKQNHW65mechsSYzYEo8KNkAyRGQlBJSMETHhRp
nREnVR3iQR1mbvFEZ8ibIYLerBaJJOJts+ScLbIonx9gM6x4qYm3tHExJvpNjtFmzxv5MXKjyoeM
nh3Lb2xpXVkbFzlcRTRzJ9dNAk6rO+OzQvAYpllEkSSVJDLivBnK5IBEuJJAcXNjnq1SwxzL/nwj
FLHMs+LHMY8uWF8jAhnqVsvEeEpKyZcU1GHKxTBlRT1NihqXKMbcj6Vq5+tbht6EuNjzA4mVAUz+
2lZXX5QoDcW3SlZHFZCXEoLY/BjXKR5y42Js2BW7bk+bNQJFYwZ5sRbJ3coZIzPEqrUGdOkCyxvS
yNGY+2WX9mRc3ezHExRDXx2eLHztyjK5smeMxt12bFhO2A4gbcYmd5MWOaXKeaKbPWCmmkZ1yAQH
Qtt8X4+DkSFV1jIFKFemxpBUELGWTyKBuGTiq2W05KO7/pWDGfLkhhSFKAo2rlVjrYUa6DQkaHgO
p1FdK2j/AGZn/c9uP309xxHW9deEcFvp5J4YhNuMgp/O9efD7kj3HJqPbIQVAUemeKbLhhL7nJNG
uBk5Tx7XgxEAAaXAqbPhjpnzslA2Fj1NnZLJHtuTkPFtWFAX3DGjKtuWYuPtsEbKiIOLI3DEx6fc
9wzFh2USNBi4+OODI3XCx6XP3LPqDZbNNn4OCs+652TJBskskkGLBAOAcV9Mvd8bHAw87cTj48WO
vpzTwwLuO+yz4+JhZ+dJhbTi4q62q1fNdkc1ts2ThZMAfOkjx3wcrOxFmhZTG6qDUCdtB1UIBIVk
eSmgnC5U0KiSdyAs0iw7blyH45sGLFt2RtMUW9SY+O8Px7Fjxdu3dMKXL/IgCYMuPC255GNlRRbp
jBE3RW3LM37HMG172kOPm7o2RO/yC0eHusuM+dvL5Swb4DFDmTJl5O7xyRYWX4ceXdh+ZJvmAFzZ
YRuMmJHk5WHh4pXZcSAT7xjQzbds+xYeHhZ3x2HJ3PdtjRMbB2Pc8jBkin2XP3HAmnyJFwpsDbYs
3KeXIONMkyOCdLVvOMWh27cpkdNyBobiKGeKO4Cv3ECv3AV+4mv3Br/nSGvzJzRyck15ss0WyjRW
Y14WNDGShjxCuxFpbGjYUZI1BzIRX56McvJyMWLEeXOScZv5WFt2PFGVC00iLWHuCxDdNxinjxtv
/Hl8dkDd0ca2krHJD5d+8VCAKhctPOveka9zZFvOvTa9pGYm/wDxrHbHihVHcVl/9dGKRK4ZMmUy
yV7RX7lLGOYsZTotyYwwim7vKdEJDQuJI5ZfKu3D/HbnPdcnZN4ysTKzdzzHgPQ/9tx9yqWZpFiM
ydsluca98OOghx1HZJkp2S7cT5w6yCTEimkw5VxIhuDSGLJnjrL3bNwQ3yrdFzcX5E5OHuIkaTOh
UybjjRmfecNKm3TBijyfleyQHO+T7RDA/wAl2lZf3bb3M25Y0BG+YzSS79iQyzfI1Dbru8mXLAci
0UDrT9gE+388GFlKSLKsk0ixq7BBBN2f5Hdog0cuZCq7lk5LyUFtRa1QYs+Q2Hs8UVbPtBzxLtka
mBHjhZ2bgnj8sfaZ0fvxZYJo54vTIDDOxDt82RDPjNhZ4pHSRPpLegdHRXX/ADYZaNZKhyA5YKwm
xLVDNLEI5Y5VysKPJqLLmxSCCCARJjvEYcpHqSOOVbZGCUkgyY8raWFJmI9A5WHXZjZirkT4zEQz
p45cRYpklB/ot6kijlD4TxtvRmmxGVlPDC3bLe4lXky3g4MPrh44nzd33JhwYI546/bl5ARdsxop
Mm1h2fckYYr51KzxtW1yiRJttVzmlIchfIrYUqyh8MBtxORJi/jq4eJ1GHhEiRou75BtyYmWYhRj
qJB3ZAEeFmlvPj4suRQ2rMKqkSU0PbViKg3CaKsXNwJTPipPDNDlFhjIEIEaYGJLktFEkQeSOMS7
uqtjZUeQnXS9Emr8B6k6X0Gho89LUwq3L3Gm0f7Mz/ueieAV7cHtXL1BwctB6jOqCbc0QeXOnr8X
CiaN815P20MkUEMKfQvJHGJdxxkQTZ2QV2tPJFFHCmrzwx0+fOzZEbNSzjtftyJcfbJWoDb8MtuU
8wG3ZE9QYeNjpxEgDK3bGgfybvuRh2aBSFCjWSWKJcnfcVK/H3bcKx9sxcYZO8Y0FZGTumZLFsav
LHDHEPRtrlZcGIjvue51ibdj4vq7tv8ABgCVtx3R9v8AjsEdKqqvFLEk0e6bU215nxnckw8v5Ptf
dKgLRb3hENCbuzoiYuLlZbY+1wgy52NjmLbcjKrO/GXPeB9yysbZ/wAdINrhBh3GTGOUcnIkkzJD
EhBRoZbY8EcYyGTx4kjhibU6QqfBCB4oBXghYtjgFo+YRSvYUiWQ37FYwz9rZccZbIUQNj7YUYY9
5buAn5KucnzNj7hlQY8OdkJPm7m0y4+5YvhxMlMvP3KKF8Pb9rw8bE3TYZZc/d9oeCXJyYMnCwvy
5Yo8hGNZCGWBsYK6IBQUUFFdtdtBatQFcqAFC1EgV5EsZ4qOZAKfL7EgyZso5G4vFNLg5qYW0wTZ
+TvG1SxZG14MTYkfxM4Lrt8e7Qy7dPtko2LA7YsXJwMh/MFxEinWGJIot9n8WZhbh+ZWTkuUg5nK
URoKjPYsv+WGslvFBjm0uLJ5FjQRCRu6dRWzZkf4++7tjYmDiytIzVmcsdEMkYKLWbD45a9sGHyS
GYCXPh7XOmDD3yNMBLnw2OkMZkdFVFaPxpt3+sEVPb8n43jwlsiKOWA8qb/u5KdsmOlZE3fNJ/ki
tWPcSZLigb1mpdcEdkcJ7ZNzkyIpcdZndAkcgZqNzU2HjTrDhjtMkUEeVucNGcLWdvWHinN3PKzX
VWJAlatt21Lxfjxg3YTSyANHI8csc0pXCEdXjRXl8iySRIcieFTOiAmR7BlmihkeWRnndHfHxEy9
4kkJJYxWyoGASlV3OHsxNQ7fMkO2w4gWHIxokdu5+KT/AI888BelkbAnR1kT05I0lR4jts+VhtgP
g57Q0jq6/VnXqHhlgb/DmJFO8bVLiq7dpEkeQCZI45VMOTgHGyoclanxUnpMqbGYFWEuM0bY+Wk1
ZGJj5KrFm7eVXFzKXNKE4rRGHLSQzYyysuRJE4KsPf0T65o0emp1NDVlVl3fA/GmaFGUqV4EH3Qm
6lb08fIix1wCPMZpcXId2dtcEfY0piiigZxgIF1FxQQBgB3wyZEEq7o80eTt2cMiOCdFKla8gBny
cOTZxFnZEA2bPJhwt0iVcHcEfe5M3IK7PkyKdlmFY2DhYj7juLZE7SNM2346RQ3qXGhmE22VLgSL
TY7KTGwMOZkwGHcvyUk7KkcJIu6iKNt3nYSZMxoNW0oVh1Oh166nU6W15UQDXbXSutbR/szP+5oO
M+parfQD0uQE25YsQ/MzsoPhQl1Z4pDhzyVBiQQIAANLevLPFFHNuGSCMTLeTGxIscakhQ+bAB5s
qVpJcVC75slQ40bVHt/YGzcPGQPueWy7VAXVVUcUkscS5G9xrS4e5ZhxNrxMRQLDWTLxYjLueRLQ
2zKmeOLDwYsnelQNHn7g+PssKskaRj1JJI4kn3WbJrG2WAyKqqtW9LLzcbChzd9ytyh274+7DFxM
fEi4edc9d42lNzw27ong3DIz0aKeDcMtVeLGilebC2VVM0kWPGXnnkXKw8GpsyfLMeLFhx4sK40c
GYUeN0lGZCUkjyUK3ikoypHQncnygUUElBfGC6pUubjgwzJLC7rQSiURjKSVmjuZQ1G7UhZKxpIe
3LzkjbB29YaypRBHjY4gj7CSRzKrewplW1r0YFNKhWpDNI0W45EAxctZFyvx86fJ2fDeL9p3PynD
jxsibIjxpTmIA2Qj0MuK5yAgTKWQtnRoWmlSKHKfIORnNjyMMxMbBnm3CbcmzMGbF22efCwYsvIz
N72047bPhwHEj254svJxot3wcfEbYjL8c88u3plZb7hsixYmLiRmCLFxsTeMmSGGLbJ8aNNyykmA
3WNY5JGleaDulkxxC+HPub4mZFJNFi4kcCzjshhf/Jn3LCsluyDCfvjSO8uXL3zRH/JjSFMrMYhR
zyF5nFwYIod+2bGaZFVMiTkc42hxLLC0hMmTGJoDQBNQKMfHLEswE+Oy9pVSzKox4CTdbTwOpVqw
YuxGm/y5Njj4H+sdMgt+TtO65mBlZm6Z8sBqU2zP98eS4hgHM4p7ohyGMtqns6imXyQPF2wQsCYm
h/I28pk5WR8Vx5Kj291rwIiWWNmm7gJFkTLlwsWHcN6edf1FVY1tmBH2qII58ZjKSiCV8qSSjHJI
WiKJ3NSu60XNsnKDUQakdVGA0bKndjvN/jfyxhMveUSpZpZmtVgKjd1dsEZBwtnSQbVsmRDnZ0ca
tHDFD6LorrA7K2TiidcXKbb5xY+pkY8WTEsjYMs2IcF8XJbDZSrL6FvpulTYpLLJHkgNLiMrB1li
SVZkKKssuOI5Y5lycC7Y+f8AdTosi+CfBOPkxZC5GLHPUeXJC4Nxl7VG7Lmso7JoK78TPTvyMUq0
U6NBNjVDlRS8PLjPo2+ilijmj3HY5sZvyCG8IkDoyEC9LB/jxkuoQWlS7Siz64ptPmC0/BEWTGwp
jlMMYskcQiGo0WwIjDtj5U8LSQY86z49jLBYdjClLhkkk88zuWLSAPI7Mzt47nQCsWLyzxr2py06
URenx4XEm2RNUu0yiosHwpJE18n7aQllZHLRxntx8R5pIoliW1XFG/DfU8Bq2nLS9HTlato/2Zn/
AHOE+gPTHENOnrTZcEFfusuRUuHLIbxYtCHPnONtuNDSoiesSAMr5RtuPLL83i7ofmuMxx992+dZ
d0jjpvzMh8XbY46jghi4JJo4gdwEtSNIRHNlM0qQlocbIkCxYWMZM92K4uRMMfFhx14rVPm4+PTb
tl5Zj2fImlgxMbHGrFVGVvGHjgTbrmtj7QqVNl42MMnd5WAw87MbG2fFhKqqj1CbDM3aCAxYmXm1
DBHAnpswVc/fFjWHbM3dpcLa8XD9TyxoPmuFiqcDOeKVtxaQrkNlTK0O1yz7kvbNkR92XlSlEhIC
508cu27rFkThgaDViZb47T5azGTGgmDbY7FcHIeljMVNItgwIaJnGTDNEe4ioQnglHcx7gJQwoSI
K+5qkMymPu7XiDx+dIqwI44WXIjVI5w1S/IHqbc88MmXkKIdzzErD3KdpixagaWSg1Ai6hS7BbRu
8bz5srxw9niytqxNwbcvjwjoYWdn7bsyQYU2bsk0M88Sbzt+3pj7NNnbBK+QvZvOBj4cmxS53xub
Lm25Z90gzNsyNlqTY0yhtmDnfuO6bLEcHbtvjTC/Y4U3zJghkx/j+Nj4kG5+HwrmYzRrntFl527x
vFDvOTFG+TlTTvkqEd0WiGWNW7gj/wCN8tSYJEaQhe35TmTYsmDJJJFl+QCHkuX98Ea9z5UnfLhy
dk8/bEh51HyfDjHl+2ZYOeSpscffII4vkHyQyzQTGSVzc55/xK5GKTWK9xlw+OXCh73yJO7TFk7W
z4ex8CDubJk7mrGk7Wz4e1saIyyzuETrRe+LgfoFTgGf4/BA0ssccsLizT8svEDBs5iZQaxJeyeZ
O2TJbw42K3fjisb9Uj/5Y4lMW640ksXx4GCdsqFQzBCclWMkvc8rdsmXviQiaWbIYRtWPiSStBts
MZAtU0CEzMfB/kBxy4JnVImk8odl7ZnygIoXmoQxKv2GNNrUl4Icasi06S7guLHPlyzMATXbaj1x
cCfJZNrhgjxthbM2/aPyIpXHcsU8hk9KeIyLDMJVz8GLNi23PkwpQQw9PNwosyHDyPx5J1G2PjZj
4bqysPqjwz48cwSdlJjkxjFNHMhsQ8EsRTslqPJAbIx4shTLk7eY5I5ErIwg5hzSrSRxyoRkYDRT
RzJPjRThocjbq8mPmEZU+IPBHIYss902LFMVmmxwrq6/U9OA0eH3rpWRt2FkjI+Moal2zOhaKNY5
AmLLUcficKpqZBWUvbLrB/uz1tJqBcxxll27FKxhrA8d6VgKaBwyZKu023M0TRWMGOWaBCzxoO6d
QkYKNWQgTQBCvaa2WHuk4b6daKhhlY2TE0mM8reJUWOMuybYaiiSIE0TparaWo0BVq5acrVy1Oho
1aver1tH+zM/7mluK3DbS3D7aj6OXIhhDbvE5lhz8g9uPirGMmUQbTCGSGKP6BmVR8i+SLKWYsVi
kaocaFI4sshvjmVt2S2XEIzpapsyKESTZU9M4RXjySsYxY6/DyJU/K2vCAk3HJePbIxQAA4mZVGR
vODAGn3bNXH2SGMxxxxLrJLHEs+9A0MPdsyTF2vBxFl3HGiOTnZDtHt+RkPDt+PE2luG9X4snMix
qMmfuYwdux8OP1Nx3rFwYny9w3KfbtgjgpVVF4bcfzqPOORtP7lueblfBc+PLxNlbb58vZ9nw4lT
cctxgZRWbDnVkVMZGEeQJICXZSpwd6mx6xsuDIQNUcvYy5cZp8zk80shYh3UK9R9itZFGflRSKCC
Ycl4aj3AsxHbTr5A8aqI2UVPIbvmxxRzz5ORWDhxRomPj3lCZE7spSTZo5DPseYSkGRjzQY2J2LZ
aa1pJRGFVmH3CrGj3qwkRqBUDwSMW8prAyxDDlZf5LwBDDHFhHeN28S7f8fSJNr3fK2/99leKOLZ
GwhFny4xYzwBcDKgjkz82GRY9xgMYz44p58t5VXOyIkbLBaeWSeNYkV5YzKq4aIPEtpYlYqVQSTU
8kllYeKLydv4ZajhC5w1qLb/ACOyZuPjMEyoMZUvnOCXPZjQES4MZ7E53UHvzEZoKUfc48MWK1mx
eeQKcAK+FAZccf5GrP8A0n/SaRijZMXniAGPATfQGxZRkQELBCTcmlNWE8GND4I5H7mFf/owOaVk
kifaM7Pws7Myd0OOeYyB/wAyRlhXcEuKXkyATLnSd8uE/bMy9rY/2iT/AGYrsKnyI4K+PrHmZOTt
ySQH8gJ+TH2bjuKQJl52VkslhUcZlZcQoY3kjpMm5WQGjKAHYCjazYsJjix71NjvE3juSt4lASM9
pFgKLVk5cGOs+5zOuO/lQxFGJC1FDLO+HsyIYsF4jgY+CI8nIgxo9zx5UfFyY8uDIh8qY83lT0po
mLRSrKu5bXFuOPgbpHhLDu2BKkcscq+hfTcsBcyLEyFyEZZcCXEyHwWFiPr5IklQPNhtJAb4+Uk+
k+MstF1JvNihJI5VfDkgfHy0nJqfHiyE78jBMcsUyS4bxPj5aTllVhlYPJXliVIvvTKgyKaPIxKi
miyEfGlhMWXHI1vTOlqPocuM8J4b00aNU2HjTrmbOYElbIFQPmBs25k1U2bcsVhi6xL3SY6f5oVK
x6nivo0cb1HlZuFIJcPOaDFeBNvhlkAxrHdZkCe+TyYm9L1dO1dqgWPGFGrVzNewFxQrrVgRmQmM
hJJTiYPiPK2l9L1ejoa5Ub3NDQ8J617aWo1tH+zM/wC39EPTt6bukYl3bESmk3Gej4MakkkaoMKU
lcWFT9Dlbrh4Y3n5U+VGY7BWhjoyHuvptWVHBkbblY244pkRQ+f/AJJhM7ReWUGLHWQHJkdsXDjZ
c9WC4OZkNj4ePj+g00KVLvHcyYm5Z4xttx4JeDJzsXGptyz8potmeVkhw8QSbmS8srTSRbfkSVDi
Qw+rfSWaKFJd0my5MPZIlpVVR6eRmw443ffphJhfHZ8owY8UC8FuK1W0y86HFRcPM3STDx8bEMro
o3bMGZlYuwGSWdMWFG2eKLHRGnbP2kT0yz4smNkxMsuMrVLjshgyJsZ9v3yKelcGlkoMKDXFrLzs
YJkc4uSaaCVK7SaiwJrQYscTPKKSSMrIe5i4jDzvKJ44lE+bLHLt8uTmYs7ZCHEgyMdUjyO+OYER
KwqSNJk/bZoRJHvC0sO6sh2zPL4eNJjx25fdbuq63Koa7Uq5FBy1O6IfyWBLJKkk0pfIlmkgysWe
NsrMzJYNu3ifBbDzocuOQMUx0laQogVGsoa7xZBZmIcNgRFlieFYJshqD87GgDRW9PjRSVJiQSNH
hBSMGFmQFWdyH7uUmTGg2rPiOTmTLHj7jJmRPGT2ZSFHym+7bX55v+OMVix90qMJBIOx8KPyT5D9
80P6sIXnWmlATJzSMjHJYms+m5RmvbFYlMsnu1xFYLlg92gH24hPdlMQtCv/APW28/aDWQB5vjqw
NkkC04UTZXLLyn7pAPNj2IN6wwyYrElo2KNMLtO3ixQe+LbUDDdMJpE+P5K7U2X8gw4cafdFQS7i
k0eXkPKyoe6KGMmAwRgTIa8iGrKaswoyMaVkKweIxRyhQR3J39xlymLoXJZVvepJUjXM3mneSZuy
lIWpAcmLD2eSSsTY2lGw7VImRPChZESNHVXXHYozA7VmAgiceGTkfTlhYtuDQZUeXsuLLnTY+1Yx
2/ccbKz/AE932z8lcTIjz8eJ5NsfGyTgMCCPXPp24OdEBg8UuIWSLMWLIIbnToki2kxQcdWMOVc5
OGkoizJIJAwIIBEuJLA2NmpOcjESaosqSJ6kxgWaKOZ5S4aOXJxh48fLAypIWkhhyFWSfFEciSr6
FtLfQniI0PBejYh8CN6TZUC521SCSXBaJhtszAbZkV+2ZVYsWPkfHTEb9tqCk1tu1ZMzYeEsI4jw
4G2x5OPkRtBKnaKzJfxyu67fJtuytAcWUxld4wjCMWF58nco2iyaxk7n8XlyIYlij0tyq1Wq1cqv
oQCBGi0RRo0dSdAdDoa9+Cx4bcO0X8mZ/wByuX0/P1TyqXNx4hLlbg7FVjaGfGBixs7IYYkdhpf6
HO3rBw63bcUzcnzMCWZuEGx2zecmBmk82CmFOUVsKJfHnTIJdqw6Em5ZJi2qMBVVRxz7ljxSfmbl
niLaC7LDEnByAyd5x4ADu2ecfYcKOnycTFDZuXLSxq8ke3zSmHHhhHoDiJVRl70oaLasvIljjSJO
C3HNPFCMrPdkaPO3atu2XDwR6rMFGTucrTYu2RwyVuG8YmCkMG6b0+JBj4UMmUTJkZEG3I+Jl5s2
HsuO43+DG25324FM3a5IqhzJYqV4chJ8Tm8bocLd8jGrEzYMlAaJIEbFqkMnjjeWnn8Z7kehjKuS
xsA6uPIbRqsSGfvIwles7IRBuDv5Nu2dDGskeLDitdu9bdxpQCLqE+5VX7Y0cEE8la4Dq1XFdwpp
Y670I8iCnyIWbyNTyOpaeJqdh4z5YxjyPKr45CrFHIdx2eSJ5cfsONlz4Uu37vHmx9lqcd8aRrGF
A7bFaSVkqKZ2AkBpZ42PIKXW3eDXmJppCoWOPvll7B5Hs+Qi0+el2myZKES32/b/AMqtywBjwjw+
NCgSchsiRu5sAkZG6KVmA5QjxY8cva+4R2fFXwwH9UfTB/2YsYmnTGgRN72zGxc7Go1nnm3+ulUs
ZJVx45lEsR5ClUs2RN+PFcT4560OYTtghDLkQHka/wD9fb+gFZQPk26XccXccufePx/bL/7Mv+yB
+2TMiCTwRmSRnEbZMfjm9sb/ACwZz3kxTeHEkkjbNy1VNvyIMzI/ExWi37a0xs1oJkXGxJHQYOTf
JikhfHV5IhAgVULUfyoxHlLYSq1Fb0JmjMWSrUXV1ZOQS1A2pmArM3eGGsjKnyWVL0Aq0WNYu3Sz
vj7ZCi7Xt0c0WHDlRyM8vZFMXbTIiaRSsWbjYe9NgzAxzR4hYR+nnYGPnQ7d8dyIFG35edu2ft8b
PiZXmXhPCK3Tbm7kaLcocctjCDKk251ZWX6c+jNisrpLFlBZJMdr30fGKN3R5K3nx6Pgy4z+Vt7Q
zRzx1kYkc9JlSYpdIp47TYNRSxyrNCkys0kFHC7HBjaRcsBjjyQCLLilMmMyUmSob1b8FvV5nW2p
4bmpoIJNsE+452RDg/KJANh+RyUPjGZdfjuHEY/j+3/kptfx2IKu1x1JkM4Opo8ZrbM98Gf5BuUM
oE01ZEs0lDAwzg7flHGjG5yVlZAyV2nx4u4b+gkzfEKjslbTjO8oq2nt72twWq3LgOlqtRFWq1tD
qdDw8+K9EVtH+zM/7nq8vRFdfRtrLkQwiTc3JKZMpRMGFo13LJobdG1KAo4r8IqTIx4hLvu2x1J8
mhqP5KO5N921wc/AsMrGIyt1ijeP9xypPkeHDgxMOfHt+O80kGPkYWMceNn/ACVQJiZs9Y+Fj4/o
T5mNjiXe2Mn4O6ZkePs+DAqqqC2s2TBjibdzKse35WW0G3YeKJdzhQytmyCJO4x4BZo444x6l9c3
dcbEUY24bm+PiQY/qOyIs25PMZs2LvxNkmnZESNPVyMqLHjC5W6jHxocWKaeGBZd4ys2TA2KHHkk
ljiT/kZVZu4Y+3JgbVI81JKkaZGDizybg+LkxPt2X4cra4ZRPi5GK8GdYmKKUTYzLUcksD4G+I9I
6sARXKzuVD5LSOk69s0wSkY5Ma4QUK/+Y4k14kjMeTKfJmKceXB2lzkFRdIo82ebGBBgUqg7aL2K
ut/KtCVTQcX8gFDLxmrzpdsl6WSR6VbMzGmCyLDiwvUgx0IMTF4LibFygwkWURwKC6hmGOoPbW8b
WJ45YGUrJJjybVv6zUGDo6yqZMiRaYh1DUyyV5HEj5EckMZmEEaogjjIdTL5fsUPkxoJNyU002VJ
XhBIAAknijrJ3QRV8fzY0xN6ninx5MTJXcwCVzLRJWCvadxXvhjBZ8hrUORaMTxTtYnqn+vB6xuU
ZfkcCR7nvEm4bli3saz/ANb8lsaxoxWZMZJcCbvSZOx/fFSwy5jLLt81myY+16xowzZ81zgS9r5K
dr1//r7d0BrIN5PjqxGZgtpAO/M5ZEv+wGsxfJDt8VSOWfOXuU9Ntv2SMXkw2/yYYBTIxjImFjfi
rDvO4PjTSSS5ODtP7o+LsWBjKdvw+zdtrxZs/wCMYcQaPb8RVyfimFIcv4xn44mR0r8buryyRFZV
Yc1MWSKWcNQcVkZ0MIzNznySFJKgCi4FY+HPlNg7NGhXbgW28YLx74fHLFMk8VTQ+QQTeQaSf8ab
d5cva9x+NZsy5iMgzvVycdopIJ1nTJh8bY8/k9C2p51uWHJjyq650QKrFBPJtjqyuvFb1LeidbaT
4qTUmR9zeXEKOsi1LCktCZ4TJi1FlXM+FJG+PmpM1OiSKYsnEeGaOdJcWSJ4MlJgeYeGTHP/AB8x
Hxp4KhmYPfEzD5MnFq2PkrfIxTDNHMvEfpOul6tr7cRp445BAi4zNkztRYn0Tw24DV6vy7EoxRGp
YlipOwAwu9fgZDVHteT3HZsx3TYpKi2SJGSNUW1CrVarVa9W52rt1563rnVtTaj1rlXXQURwir8H
tfmRVtNo/wBuZ/3P6A7pGH3KIN/z8mjBDHUMuTKo20y0kEMY9EaSyRwLkb/hRGX5Jkmp91z5qPe9
faKLEVdifIFozCllrE3d4BDvuE4+S7li5EbdeJELHYtuixKQ52Q6bZHcKqjikljiWbe8aN+/eM1Y
NjjtFBDCNTyrI3bAxwu4Z+aItnkEt8bDibcZZqkj5w+d6h2uEMqqo9S2uRmRY58+fupwdpx8RtBx
W1vWTukUZzspIaSPdd3XC23DwV1HEODlWXuhSXG2oJMSAM3e8bHeDbdw3JoMeHGjfM7nixLNuG7M
sm27UmNoSAIycyTIi88MO0XyMqG+MmxZ0z5eI8b5mz8lefGaLKjyBPh08bJWHueRiHD3HHylDUwJ
CxzvTYoFMJZFjznjdUeYK3jH5mN35eYhqHJlxziSf8pMwMcgfkS+VI0keTsu9WJCLI1CIigQK7UZ
7yK5ikmSNIYhJNN40MjGCC0SB3bxV4hQVhXc0VeRiCCS13VFjjIdQSRdu0t5EFGeMVuUGJOr4TKJ
IitbXvz47QTpKihO5uqp3qkRkLozMEK0zSlAIkqTcYUptwlembKkoQpQAFNLGgOWSuHkncJ3wMtc
0bKu5Vg7VFBFvxiho5mNPLNIsZzizQ0LRwH/ACQ4aWkZu5gKxu7tkBWQ9VP+LB6bbjLk5TYOI0eZ
tsGDmY4tEaz/APbMpAUFmy5BDCahkMcmQokijUu2ZKIoSaRirG08AUkkrjwMxZkYqzET4469pGNg
dAOWWD3Yubn7dmy5e8z45NZ3++X9dQ/5Ea0MBoDyYx6QAQQZCdk8BKyRxvbIyCqYWY2U0CoIsvYT
l57vh7XDNu2XJT7jlmspZchFnyIji7tlSBN+mDQbriT1JjYuQMv4njyVuGLnYC+SSGTHylyGI5tl
CET7rK9Es5VKuFpI5p2xNlC1hbJNKm07SMPGnxIp3YDGkYBlxmO3ZmmRGVKOsiU6q674MvEyW8T7
PsWUWT1SARJG+PJG8eRG0TRvBN5BrbjNiM3Dm2zJ7hIgjjx4ociXbpY3SRPrOfBPBHOiyS4jNC0R
gnSdaZQylZYT/gyo7zYgkgx8sR5csEoYEVNhK8keZ92RiJKY8p0epsZJGjyHDyYsb00bqYc4rTYw
NRZYNS4vJMrtPqGr6c9eXEfRt6PLW/oX4+vCQDXaoOh56e/F2121arUdDparVaulFqLWoGuVWo8A
Olxf3Ne3pbR/szP+4eAcduEae/Der8BKqJN0xIyZsnJj/HSOknEg/DmmcYeMKChR6m9ySx4Ilmli
7gBfuosqV3M1FgKLmi5oc6BAoEk3IoPTIHWfCApkA4YYJJn2H42MVYsWGKhbg9qZlRZt4wYa/cM7
LqHaJ5xDg40J4GdUE+9Qqxh3ncKg2vFhkm3DEgoy5+SO3EgMYzsgQbdjQ6j0rcE+ZjY9T5eTlPj7
TFZUVFt6eRmQY4lkyMlX3FzJgfH0NKqqvq5GRDjRs2Zu1YuLDiQ5ufjYUM2Xuu6zYGz4+LUuVFGx
gnnZEjiTL3HIzpdv22DAj0kJzJAoUaCc23PMlSePbjurbrtaYbZOBDOuVt02OYM546KQzrPilSrP
G2DvhFPnY8UccyuJMxWEsbNkP+PFjwtHIkgjSsydJpGyIsQRY4ggGBHFHNkOgxsqGBVlwCSMaR7P
GSxBjk8pmkVaWVAYpsYkzxKzTMwESpLDAQCiGrgV3CmmRQ2fjrX7hEabNavzJ7jKmr8mU1+RMxDz
Me2UnxmvEteNa7FFFQRm4TxtLEVXZM54IMbKSZe9a8qCmy4lp9xjFNnStTSZL0Ib0saLVxZp41L5
y9+dPlQM+EJ9v2rb5Yo4/j346xYkCx4748Q/NhDb2VyZsDDdMns7jkAPBGS8kr3eE/ZMoiULQ6yS
iAZIo9f/ANOCP8eJkNjTSfIcGOPI3aTN3GA/4jWb/vkQPFBH21lTGWTTBlEkUcQirJmMsvXTbpbF
YAJc+buc6bfLzkiIlyFC4m39AeWURWywwSbjatzSOPO3D/dMfuuaxlJfKLdxPLGazrATlZDfdmi4
BrGkURzQJWLgRYmQIzGmROuJiZGZJPN3dtRZCSskoFSCQNivS5KuYlukOXJA2Lvf3d2JlQ7jgIk2
1oFycjDE8eTE4oLzsBQLMcTZ5Za27bl802wRQR4AyQTNIVp1V1gZlO4Ya5ePtmY2RDpF/im03bZc
fdGy9vSFJ9gcT4OX+VD6FqtwSRrKn+WGX/FlQ2YtDMJR6UkaSpJHJtE6uiCeKOJMWSbbpIpElT0T
9Oyq6rDLiO8STVBk950kgNRZHc0mKQ4mjmUrkYLwzxzpUkMUoUzYxPgyoy0+DUciSrJGsiXlxCrJ
KkmGVVXlhpZ4MoHz45V8fJHZkYxhnjmU8J4rVyr2q/Bbj68Z4baW4ydDqeI0NOWp4ud6tp21ahpc
V1rnpeulXq9GjoToKvV6N9Dpy1PDar8FtNot5Mz/ALh9IcHLhtVuIm1SZmNEJcrKdFjmVhkYnmEW
4ZDx4CIqIqL6dtXkjjG87rt5xYcseIG4JqwFFxRaiSdACQFFAi9hRYUGqwNZGKHqSMqajhkkrB25
8ibbtjw8VeEkAPuOFGW3SWdk2/PzY4tsgjfhnyoccSbvkzNHteXlVHiYOIH3CMkw5c6rJiY5GNmZ
Qgw8bHXQcQ0txPLHEP3XJyCm0kuqhF9OWeGFRmZWYcnPxdtjx8XdN7OFt2Ngr62ZuSwGHbHkm3n5
FJt+bN8xSRcfYpMmhNiY48eVPUMEUAkkSJJMvL3ubDw4MOHSeV5pIo0iTgkiilrFCKu47XFky5+N
tZhEDPHnbTHJTR5GK8WbHKJcVWqSIoTG6x4+RMp27OhnjZjSQOHjh7UzcgGiqxx7fiJCEyBPlS7g
8bQ4xSsfCkkkVMdaCfdaUmXGMkakxRwo4DyoJB4hSuHP4zsw7EL5cS02dejPM1HytRiU0I1FAVyr
uUEyxgHIhFHKx0H5cQVMpXWLMSUjOhZvzILiWM0GBpgGGTgd1TRzYEkE+azx5WS4He1CNaAAoGiw
AfKgSmypPDt5bcGwYMk5x2M5M8GzrlJibbFjTZBjgrL3nFECbhubRwbTnGSPHSOPepo4cza2gY5M
/bLG/ecaMo971jWaPKWj0gTufJk75YWE2OR90y9kGH/p2fDjyZcvacLKgbDixjClozWZzyIJrzZ0
vZGdcMv583u8B1hLeSQsImJLaROUdO2RZZfIuBagKyltWLk5mFuUm47qMdmZmz/9koNwKv4IC3mg
tSEhgi+QsWZ/8mKt2Zoy9NIwixs3Imfa5BNg/IsqOItmY6hvPlt+L4iI3NMO2Y3hmSONKMlGQVJl
xRjI3tkqTKyZ2hcOYJDPHn4InjnjCHF2+fKbb9mUHFxlhyMZsdKyJvK1+DIiZxDKJY9wifFyI5El
Ssr7VOrqrqobFldPxciGVJo/VyIfKscjI0qLOgvMIZllXhtwzQJPGok2zIjeOETwnCMMkuJJDNHO
nDb6U8cuM0bd0OYI8iSJvapoI5lWWbGZ44p0/wA+OWx7nGzVlNEAiWCWEQZCyiTGkx2gnWYWvUmK
8TQZSzVNBHOmVguixZk0B8UM4TIkSpIFdVyGiIIIOvt6h4jR0PB7UdTr76ij6FqHTgtr09G1Wuav
V6Ol9L1fU0dOlcqNEUKvy9W3Barc/awFbR/szP8AuWq3ojjGlqtVtZJY4xNnSoyidligw4Qpyp0O
EJQkUcY9UkLWTvG3Y1ZPy6IVk/JdymqXLyJSXuMa/cGoyWoyXoNerc+5BRkWu8mrE19ooMToKFWo
bLl5ybf8OnesX4vjRLjYmNiJqTyfMxUpt3MpGPn58Me2R99gOEkAT7thQE5OfnCHY4e558PDT82f
IpsYLQy37Rt8k9RQxQr9BPPFjxzbpkTPBs7u6Rxp6jMqiXdJJWaCLHbcM7My22/45EhAAHp30llS
JGmys5stU2fa8v5nOyd2fvRzduztufadzlx5MR8aSF3SNW+RY7E4ufvEkEEWPHpkTOWggSCPiBtU
ytLD+05E2RNjxriJtWfkSTYwmrN2p46SaaIkszSRT+JT2HHlaM4ecmSmXuEWJHNm5xjgxYsWLDxf
PJl/pnTHx4sQTtKSuQgUWEwSSOVBQiUyECpwLgAKY3au2JVORElNkyMWZmIUCiwFNPGKbNjDZGVL
j1K2QuNFFPNiYWFlTCPbs2DIzJIy0f5uU2H8e3LIxYdommxcH4/IuPJsbbQMLa1jyW2aePKyNvne
bIXIjkky5Mcx5MUgzccZEMOMI8dF7aDrRyowYsozyz5U6ZG54OVE8m1Nk4e2bPL+JDsC47Pt+IIY
Gh8I3KJWmypXmypyI8jI/wCTi7thxxjccQh97iji3FBmtt+EcZMkFIoGIkmQIvaAMa6qj+aA0T4o
Dc1gS9rtD/mypAyYi2g23OGHLmfIcCCCLPkyZ1P2e+TzyoYux8qMSwnXDiEUaSCZZ4zHJpt8NyJg
z5kXjmodY0LtGnZGqMi4FDkMuxX4/DA2ZyIzESPK3C/kmS8ECd77jNc4DdwIswvdgy44vUIBOJHf
JxZV8rxx9kUI8eDFucmPu0ObiznJFY+UkpSN3JItkKHikYMUzolim3iMVLn5Mp7JHpMZqWMLRljh
m22dVaV0jG5QK8+zbSuXDtm3Lg4skEMrcV6lVoZLJIkOeNnykdZEyB3QRm8elqyoDIq9tseQ47jm
PVniIaNliqZGFGzrHKkqell4yZUUckmHNG6RjJgOCYnkikhnSddLfQW4OehoD0MjFSYiXuAaXEZW
V1ogMGjfHqKZJVkx7NLHFlMmVNjMCrCsnFWelyXgbIxVmqHMZGrIxkmpMmSF+RGTgwz0+NLiTRZg
lDRTQmOfHyQYpoGinSX0+evtxe1Hh58Rq/r31NcvV6UK6irVYVarUBRHAaNWr3o9OmnO1uVqNW19
iNOfByFG9c+ECj02j/Zmf9z0TxAUKtXPV3SMSbnArH8rIRMfGgryTkjBkkkjxYI6FDjOltCQKfMx
ErJ+Rbbj1l/Lcl6yNzzcglyavfTnRBrHjPjNxTFRSuljItFi1KhJEViO0V1oarV6gKB9gyoGhz4h
LDoSBRyMcCTd2ZXh3XPjj2eJpViiXiZ40qXcO4Lj7nnpBgxQmTcYFNtzyT4MHEr8vNyqTbY2KKqD
0B6E2RDAJt4bIkg2iTIaHHhgHqZO5xxVLjzTLLmdsa4U+fJiYWPiLpf1MjOihaPClyGeSHHj3ncp
NxTZvicaLHGkSZWJBlw7vs022vt29z7ZPv294WXsG0b3lyYK9xXTJnMdY+OIV9IS8tzzpY5Y9sl3
ebdNu/b5XwhmPk4c+M8OaRTQxzKweKly3hkys6TOlxYJEMMT5cqSdgwkSczRNmSxlCreTtjXLVne
NTHkQyyKCFvYNKq0+WLtPI1HnRZQDkxAeWVosRjmVg4uXLkx/Hc9MvK+JrkyfsfnOBtWKMeHBx4n
8cKS/KGiXbZ8SAbfssKwQQ5eJ+PiZ2PIItzxzLuWQmVWAIo5pPAzTY8LjJ2rFdc3YXmrddsN5mkw
qny5Vh8Mrbfg4jT4m0bfkQyYWwyYmSvx4O2LtkAESQRtNueJBlZm6RsMnes+QKpK+WGOhIO+Xtmi
/bYnKbMhGDs+IssiR+PN3CLGkxchZhmyO5QWbJHMimYJHiMFkdP8mc9mNRsUd3/wknxYw/w7Pgx5
FbhsuFmY5xUx40H2mpueVlSdoxnuMuHxzVjxeWXJcKsMhR8+LuSo0Lu3bBArlWyoxNDQFYENPP8A
5slR4cAc+dsy/Zj7jl7duLb3uEkBJNbgB3wN3LGBDHI5d8aTxzToFkhTukMoac8jGbOsYjMNzJ9y
Rw5GV+Z8fmD7b8mMEmO2BjvJDHjQq8wNPkqoyN2hSsfNkklkjdZ0gY0mOKLRRiTcEv35eQYtmyJB
HgTJhLmMpbaDuL/HNvyMaVrg+h7XOK+9bmEztr3jJSOL5BhZeGi9qcE8QjkY+CXGlaGT1pVEDK3g
MinGlkRoWjdZE9LPwY8uLFnMDY8pvNA+A0bSd0GQk68V/pOvHNDHOvkkxKKPAIpo500mxhIYsg3l
hSVXWSFI1kxagyYsheVPHHIpiyMMhsXPjMmTglJEkVkR17J8IxTRzIwDCfBV2jmlxZL42XQnmx6a
KKdBK8BUhlo+odLcHvb1j0q9X9YcHtrbiGt69+lX4jRo8BHO1HloeIngtrz0txbT/szP+39ESq1J
n40ZyMjKNN4scS7zBAJN5lmMu/Y8C4HyeGUrNEyix1GqSxvI8bxl3RBk79tuOMj5ZKam3/c5aO75
5qTPy5KLsaJq+nOgpNLCTSYxc4vx3NyKx/jMMce94mNhgxSUIJSRjgUFjWu4ngGq6K1q2rczjSYW
Sk8G4TYuM7brKyTQZ+TUG2RrIuPjqRxS5McbHcczJLbWuQxiw8VH3CWRv2/IyG8+3YdX3LLMG3Y8
RAAH0Es0UKS71NMINpyZXggix4/Uys/GxQRn56xzYeO7yhpMbaWaQAKOAei7oiyZeTmnGwocYblv
uPiIz5m85OFt8GEmssUc0fyb41PhrtmRJh5m0bpiblDpk5AhXGxzH6s2JjzlsjEwMfM2cbrPueNt
/ifGEkWbs9gDNjsuZFIskTCghqPNWCHDzYMmPIcys7HIdJgqssLV2gA2BPhldFjiEk6KHyXajc0Z
I0LZSls45ONTbXNk4mPs2XkYWz/H0hqDaMLGkyBjY6ZOfiY8H79hmFN4Jjx9yzIF/MzDLJkzSSbj
LK0UmFHAPLCWX7gsct7/AG/mBWjyoJKny/Cn7y7O+8Z3fH8gxJXdIZEzdgxmSHZEyoMbZIRUG342
OzrAj7lOIcNvl+CmPj/Ltzx6ydyzJZxNP3w7xnw1D8kUjF3XEkAWGSlVBUkxqOVbCdrrlPGcjcJ/
Bn4S5S4sARMwWSPke/y46fc08nfkd5TIcAGRu9zUKGST7bTp2VByj2vcExG3D5Fgw4+PmSZKDpej
92VK5aRH7XzY/LBasSMRQyOWe9RMJYpYzHJt8XLIl73rFkBGVD45Y0Z3ciGG96d+7DwTzWsu3i2P
EgmzWRGXNiWHLz/1RBvyNxcrHp3eXHgsiJMRkTACQXqUkRQt2uoR1xEh7tpwsyU7ltuTjszolTbn
jRVNu0r00k0pSAmlg7JMlVs+dCqtlZEhh2zJmrD2FXOP8aylQbNh4tRbeyz4WPBbB2zGxoYYooUJ
ub+iVVlnyMHb8jE27E/Bxxlbbmwi0PBJGsqPC0qYTrkx4eQZV9UgEBfEykwtGWgkdGx3VldfS3Tb
1yosHI8oglXIWfHlwpI28rQZAmHAfpPbQ8RCsGhlxCAs1QZKy6yRpIvfLjlHV1fFAZ4wGxs25q9Z
GIxqHMDGTFlx2x8qOfSbEbvhy1dqeNHpsKWJ4M8MGx2WkyYpCYZYWhyUm+n66H0Dwc/U9zxjQ8Aq
3OiatfhPCdToK60dOtHW3I6e/v6VxR6itp/2Zn/brl6A060XVal3LCiqf5JiR1J8ql7/AOXxipvm
Mlpt8z8xzPHjBtxl7pMiWUtI7Vc6K5UnNyGrF3/PxqwvlkDqfkeEKg3vAmo52L2btve4TPiytCx+
RZ4E+dkzkmi1MaOgq1BSaSB2OPs2bOZNnkx5F+M7Rt+K+HjQyVNNDAm77njZh7rUWNX0Bq9XHAop
RRqzCobl9pnyZJv2HD8MeNBGeJiFE27YkdSjMzFx9qihp54MYeXKyVjwYY1O5YsZWDcMowYWNAfo
Z8nHxkl3iWeSPZpJ6ihihX1Jp4oEbKzc2imBt5lfIlrHjzc6PEwsbETW3p5e4Y+JUeLlZpnysPBj
yN3yd0Xb9sz8p8TEhw4dbaMqsN/+LGFsOefEn2je4NyjnnSBMeBg16v6mZnQ4ceNhT5M2VG82Pjb
TM+Tu+O7YK7Pnvj5kEeScvbJselZ1PYY6ljEixvLA+HurkwCOKIHkpoGiVpshI6ad3q4oSlmjyBk
Zf4mUM2b4iMl8XaIBDiQY8Ays3Fjycjc2gyJ91lyxkyT5NIkPeoWwdaZ70IQakKwjCvJPHGmTNlQ
MI8LNbHQyeeN2kUtLJNUmOwpchxGqCR1RCgiSosmfHly99yJ4tu3eFMddzm8xzJHZ5TIXnTufHw3
bK2THmqXY8pHyNryYlZCKKi/MGDPzsesf5JMtQb3gT1BLGVMhraokeXM7Rj5G5mFoWNspmLzHsx8
Nrqv+MBvvlP3SB2xjXtgoFRZbS5Z/wAqcl2za0yU3r43itj/AI8UMdqNJzyW5tWO4dBjH8nKcAGr
VBJ2Pl4/keUiCE9ajbtbLj8sWBDYZEnc469YNv6i9Zt/FibvkbXuT/KC0LuZG3HrAoWPIAnx9MBu
5cx/FBer+SCJe53cOYxdgpVHGSNy+MTgQfKM+HG2qSbIlpYmNR4pNLBGtSZUEQlypZajimnxcDZf
LJF8cyUiTYcTFEOFJBkpM8ZeRpDkghc4nHlTIKsTR1v6G77Qm5Nt7HGlztsx8xsCXwnhyo3tnQ13
DJihlWaPhPAeCSNZFUd6ofIIZCC4bGdWDLw24d329pFxMn8uopI8uLIx3w3jf8qsfJE30I+imxby
FlyKiySr6EAiSCWNoMlJqliSUT4zgQ5b4yI6yLWRjRZCI+VhNLjQ5SxZjo9T48cwE82KVZWFSY8c
pYZOIFaLKTvyMQvDj5KrPNjkEMOA1y0txW4L8fXXrx+3XXpqeG3Dbi6aA13VfnVqNW9DnXPQmr0T
R51e2l+E6n0DV+EdNp/2Zn/b4rGuQqTKx46m3zBjqX5Qi1kfJ8hql3XJlLZMrUXY8ANvSua72Fd7
VBmvBU2RLO/e1LMwoSXq96uKNWorWPjTTvh/E8qQY/xnboRFhYkNW5fIksBJ+V8YwshZ8TP3tIaz
cufKYvV6LVcVer1e9XAq9XrutSPQLNT2VTO7lbXxZmil2jOTJxsuPxzcEs8UQydxnCw4udK8eJiY
1S7mBX4+fknw4GIjbk0ojwcqUwwQwLxihrbiny8fHD7pk5TQbM8pihihX1CQon3KWRhhxx1+Rm5j
CeHFyMXaO6QAADhtVqtVqtwMyoJs6adsbAhxazN8AaDCy5Y8bATcIo40jWraDgIrffjgnDZU2HPs
eX+7Y/oX4c3cY8asHb5O7QGxlnjjXccjPzJsH47GuPMIy34cTS5STiVJ+RIdshPuxNznxKw91x8k
CQWfKVaaV3rkA2QtS5+PENsnztx3FfjuBGohx0g/fcRYYt4zIw080riyMzeRY8UQyHtInzkiOLlt
OJWe3ezp50SPzNuuRlwJDjossCTTI8Uv4hTDKdogDlsfmYYPF+GwqPDKUzRrXjhBGOXAwsktHirH
RliBElq8hBDXksTSOO/vo9rVPtuHIcnalum0zXmx3hZ1WxStu3A40Ue7SQxbRvE2Q247jnsuTtkU
rxxcsxRWYeeKwWXNbsWokMkqOGadCkqqWMlo4ja8xvKOm0Z0Pg3vdsXGwoMn8iMGjUPOcnmagfsd
u0B27mOl6hYtHkuWk1xX70mcRRmhQ/0bd+oVm/6tq2+HKzTjQNHnQDHys+i/bi4z/dPH4pa29CZt
wJMorDbvix+VY8hbIW6tEySLjvCskG1Dc9z3z4uRCMZRTzY8VNnOaTGzck42xF2i+O5CRTfGooMf
bIpEkjmaIySNIdSBZEV4sAsyQSmROE8e4RHtxpxMmVF2yY0jMvD2/jZALbZnyA4sqkMPSvrPF3qy
jJiv5xBL5Ua+KykMPS3TAaJoJ3yFjkiy4cuKXb5IpEzlxsoyfTddT6Nqnx0mDsVCTSY1AgjSfHWW
kneNvafDV6P5GK2PlxTmiAafDeGRJ8fLW8+EY5ElVlVg0M2KYMiOda61NgRu0eTNC346tSZKknHk
jePISQ8NtT9Bz4DR1twW9AcFuEjW/CDVjry1PWgNOehq1HU6c+A6Granj56Wq1Wo9NpP+TM/7msu
Vjwib5Bgx1L8pNS/J8phLveTJT50rFpZGq5+pBoOaW7VsuyRbo+XsGLiZP8AE9kG1Y2DiYi6syqN
3kx8jG+M5cUmxS534sMktO/dXcK7qvpzuTz7xXcKLWAvSJI0nYFHeFqxNKpNAUvI4GbkIQFkxKFZ
O44+MEzcrMaLaE7zLh4qJl5c8jbdHNI82Dhh8nOyKj2tTSqqL6Y4psvGgR9z3HJEGykGKKOFPVyd
xhhP4mVmUk0arLDj4wtm7u2HgY2EnqWq1ZeZDiRrDmbg02RjbdFLPuG6S/j7dtKY2Dk5eQqqi0OO
9ZGbiYw344W8SfDsPdMXN9TP3AxHB28QtpapskI0WMSz42O7lFZd7bGfJ/GLy52Bzz9tOOq9ISk0
U2KbkNG2HPlMqNenlCBcd2XcvJNl5O35CZGHhPiyy73kSRF3KszLUqCdIEix1WQXaY9zNMC8RkqX
bhSO0LLuJsN0ArLnfKXEg8EfesmW00NbjlQDGyIVRfjm15RcWqytUkMRAVUrvBplBost2kiC5GS4
h8syw5MkziBmdfGxAFqvapJ40ByJGr/IasKXlRmiUzTYkoaLDJfGQ1+1F4/BIsnxPETH2rf5FTCf
dmfKLsYp1IEjd7xKWl3FT3VjgLFG5WTPT7sOO8kj979S/PIvW3bPFLB8h+OxCOOBYaFMeUBBmOkE
fe5KtUqlGvV6jQu5kSKsuPnarUemIhVcle5CdB/o28/cOmbfwYe9SbduY+S4ckM0rTSbhR/0q1m3
FAaHXETxY+Z/khrCcpPOfFjxt2u9u8L2rmxZH5ewZC4GRvW9YeJgpj5uRUGxszQ/Hpoocf47DFHH
ivjZkczR1mSMZcSMx5MEnli4DU3+Obc1ONNKe0ghh6ZAYffizEJNEPIDHIsqcE0SzR+MZuLt8rOk
DNiTetkI0TSouTH3GSo3SaMd2I/UeiQCMzCfCeDKDi0GXDlYku2zQypuEOPkEt9CT6tuB1V1kjlx
6jkMCxyJKmjokgCyYoR1dZIkkXJwWirG3L7gQRUmOklJkTRtLjvG0GXHManxA5jyir6MiPTYk6MM
nHmHbkQsDBkqGlgpWDDg9+G3Lp6HsdOdW4TV6uODnob6Hh9+C/pAUBwCrUeWnO5Ne/AbCr6e9q9r
Ver6nU1arURrbS1jQrmKvV62n/bmcsy4tn7xDi1k77lyVNmyOWlc0ST9csJsSAbk1se5/t43Ddnz
odlmGTso6VNm4kAk3lZKJzJmiwpxjtNLiymbvVmuOZq1Xruosaub352UBO40IGkCwxqPLarliqUF
vSgAAUq1gSxxPt+4tlltxgZ/FueWyYsMCS7gqlYtwyxBg48Am3HGhB/c8sw7bjRsOnqjgyMyOFnn
zc+HG2iJZlVV9bIyooAGz84quHgoqZGUZtzjgbG2cmX18rdJGkiw4MUZu+SvkYuyPknLzsbATB2x
5D6fy3a58nP+LfHP24WHqZe4vJNgbfHhLp8q32fbIMH5q+RWEcZ4KJtW57lLNKuxJFDt23Jhx7zt
U2RlZ8MmZlPtEXiyMOfFeLNNZCoyY3+tA7nGwkhE7+OLJWNJcfHUyK3dl99BquDQSMhcbH7rQCpj
GG8kQWTMxoTLuQINyegixRMmGndLkylExw/hUBa3qVihxZzPixR42OZWuJ1ryLTSLdZYVqQ3a/Oe
de1Zfun/AOTXhkFIgA5CnyEUu8khCAU8saCXcIkOXuE0K40+XnyHZ8548X4+2S4+KTBpPjU6g7bk
QFsbtm2ybcUGXmZs8mRtcEOaE7V3BwICeWMLLOPJjDm0g7EvzcebHVfDj0v6xzyb1tW4QS43yTdc
fHwo8hZiG5SGyQfrJ0QCGGDIIyMtLjTFj7VyZjJNEwnx2HaSajUyPlzCGLCk8sUilGq/+Db/ANQr
M54+Nt0Wfnx7VgRxbliDEytw/Qf9IoqJsWGMyS5LBI4x5ISLGAFpc8mw6q3fFBIppHhSaXa03rIm
+JQQxYfx5Px2wWxcuKUx1JIX0tUsYljzZGheH7MjhljEkaBcjG2qRkqEmKTgt6GZEXjw5gjZSMrB
hjycOVGUbOiLorQbji4k7P6B4PbQj8SSaIkrIFIKSICcVvSZVdciCXBnhyBGWWKeLIx5cCeN486L
HyHD/UniPBNjMrISZYJ0mFW0kxyrw5Ak0ycCOWoJsnDkhyIp1plV18UmIGjhzAmS8LjmJIo5V7ps
WkkSRdHgjemky8SNfxctRPIhaIkpOCat6Aq3AeA8tPe+hrppbX3t9J047W4DVtD0Jok6Xq41Iq3B
fS+vWrURVqtqdOeh0vpflV69tp/2Zv8A3Nyyfx8aaVnaUkn65VZ2GKmNHLK0h0BtWKZGr4aynbsn
b8gNny5UTNlgtibltsy7h8hw4Cd1zMmbdowJIn+zuq9M1qLEV3rRYmgspC45usKCgUUNKa5mghpU
oCgKApV5PIKjWSZsbbw+Gv7dip+Vlyt+3eQ2ggSTc0JXFz8owYWNj1f6EsqiTdsRGLblllNsxhSo
qL6ss0cKtl5WY0eBj4yiefKZvw9tUybhu5w8OHEh9bJyYsaO+Xuwydy2/aIfxM/eMjC23Gw13Dc3
7sDa1x24bVbhny3mbHxkgXitwmwGRm5OfLh4cOHDp8m+RR7bi7Bi7hvGDvWwS7S2x7zPtbYmZj5k
H7wu75W0Y2HFi1JOmPHus2bvS4WypI2+YMONHLhOYs3ZblllgbBhkyKgxo8dDGAMzP8AzMjNyS8q
5E6NjbxlwGD5DC1QbliTASXoPXkruBokUyhqlxvKIsNO/thhUqctv8kzyMQJTk5EiJZe1lrJQSw4
8GPAz7krVFuCSHtKgxSFYokKMsFppEWSd+0dqNXjWOmk7jHGil5lWpJGeiVQTZ0cdPmyO0G3ZuSY
fiuU8G74mZjD4NjYw26R8dYsDLx8Zm3fD7o93wHpVx56zdlgmrFhWDH+STqs8G6SZmRKzlcoH8L2
I7IIT3JjRXnlbukNY7Vkse8Un+yHnk+2FsEmTDvPxzJgmihETipj/ig0gj7nzXc0IpLxEyQmGTuS
GTuyPIIDjTVhiWNsiIhuw1jp2LkytLLjSmKTKS61/wDo28/eKzCRj4W8fhbom+bc8WflnLydw/Qf
9VYz2kxscJPM/c+O/bJlJ45tvj7pcm0sIrD++IxhBukM8h+OxzwZpYLUs3eOLNxhMuFOZcHiT/Hk
borY8sq+aKGQSpwnUcE0ISTGm8qIiq2LI6PpbQ2pL481v23Ly4GkGPOuRHxniZVdYWbHkniZGhkS
KmUOqucZr+lNCk8brPt02NOsBkjjmjnhl2/IjZc6CGZom9K3rH05seOanEkbQ5AYkazY6TBJ5IGB
BEsSSrLhT4r4m4LMp0mxiR5Y2oJkYVRTRzKbGpIJEaKdJNfbI24MBmOsZhkiF45aHmx6jkSReM6e
/Dy4enpdaNe1HiOluA2Gp4BoNLannoxFE0SaBNDS1W4bW1tr0q9Xq+nuaNda66muVq9qtVq2n/Zm
/wDc+QzUTUn6vorGosWaZpfjm4xRy4s8J47VjyCESyvK3BivM9bD8mTaa3H5LLlyz5k0j+ejITRI
NLdGzXR41vfyG170Bz7blUQUO2u8Cu81cmgKVCaWPmFFcqFKhJskYkmaQpEAu2RLM8cOZOsWFFG8
uTjwh87KmpNteWooYoV+hknhirJ3NVkTHzc6LHwY4W9ZmVRNubM6beXaTOsYsFmkytyWNsbaHllA
VR62Vuaq340OPJkbvk7i23fH/EPsjXI3DI3CTA2+LCj471fQsFDZcu4zY+PHjRenJIkSSS5O7y4+
NBjR6ZGRHjxS/HW+QZeDhxYOJLEkqfIPj8mBWFv2dtz/ABr47gZyKqquRkQ40Rin3Z8SKlYKcrwm
HLzMrKpbCtzgjzJsHHiw4Qy9u7bv3tIVxIe1TTR0YzRQ1Yios3KhqHf8pBF8gxXEeXjzAHl3Gldi
A0UIcyZVNNKq4dwZVMUmCOQlkNZeRP40d56A5DDdwNvYUY8mBcdMvIlUCEuGYQnHJMPdSqoZ0uxK
RrJkM1EgCTMAa+TO+LtEjVh/HsHHS2NjVNumLEd4SXcN0xMX8JCHcpjqKkRYxBjKWUeMY26iKhuM
TNubRZWdj7fFjP22M3+SCJe+SZrmBrSOBEGPM1CQi5FmQHnH/t23bsmeSXHlx2xc3HOLvmdizYpU
+Qk2ZQwWCJR4Y6VABYVawAAFhRFWsNCvOy12gg48RJw4K8Q7PxOf4x8WLC8LKQazADjQ7Om5ZUfx
rASHKgfGnz/9X/6SaUnulcolBqzVDJhr48aEh6IsdvJ8kGQrN5IYpNnnjbJmj8kcEhdeLL+1MXxw
7liuWh4cn7RJGkse0SNEx/wT0fTyIRNEsjCp4/LG6/kwwTCaPgliWVO0ZmPt2UyvkBsSVWV11NH0
ZYlmTHlYmeIxHHl7DIiuqSNA/pZeMmVCGbAnxcgRVLFHMksEm3yxSw50MczY7eqfoDxMqus2I8VY
2aBqaZVYGOXGaKaOZaytvjmEWVkYdRyRypU0KTKpysV2hDVBliQ1NjpKUyWjbrrJHHKi4s+Mgnxs
gfmeGlONkUs9m4DwdeO9X+itrbW9/QI4AKsNLV01sKNMaN9AKAq1HpV6vw2N7aWq+t6Ol9BqfQ2n
/bkbg2Rn7t+SZCakP0WxQYE2ZkfHdqmaDDxccVNiY0wzPiuHPWV8Uzoql23NhJRl1BFIqscnFMWP
wxkpG5PjaU3vdWuK8xoTXoPTrJMjKwK9ooc6VQaCUENWq1AUq0sfKwFXq16C3pIgKeZEH+SVuyOB
cLDyNyyMPDiw4J8/Hhoy7nlNHtWOHAAHoD0LgVk5uPjxjPzZZk2mV6jhijXUH1Mjc4Im/CyMt3yM
LBTwZma7viYEZObubYmDjYaVb1ZZ4oFeXL3Cn3CLETE27N3BsTBxsOOaaKCN3zd4nxMSDDh9KSWO
GNpMre3hhigj9OWaKFIxkb1UUSRR6SzRwxxo+6TKAo0ZFdfk/wASOO2zZ+RtckO+bfLhwYmRuc1h
kSE6ACtxx4sXHzMPOw4cT4vJuNbw2Vt8+47Tnw42Nt0ayZWyyytLtmXEbSoRKRQeM12K1NFRjtXj
r7lqLccyGovkEoqLfcJxDKmWXkCJCnkZQiiXtd4pIr90vkyJHmmXAnFRYjq93JJavODUZCiVXCCW
VqWTsJy+1elSzBQzFjJkKD5JMo4XxnKyo8Ta8TErdDEpfdM0Rv5p6g8TNiIJ9xyPEgusZ8imu6Fy
ojIaM3jhE2T2gLvGUmPm7fmNlRyMwaE9wgXtp2uUB7sn9LHmASct+1cdmkgXDlJjwgDhJEMTe5Ui
hZ+/L5DTlR0twcuO2vtpcnSwFOFYQSzY8mH8jMZzpxm5mXitLGQVivWOvc5ZZB7ikHlgnskcbdrZ
ShJtvFhHF2vu0Es0PxqHIjyCBaX/ABTcTqHXeWy1zttz0yH4XTvTHYtDvkx26bGycfcsPHkLJ6k6
iGbHYwysfx55laCUEEcGSGifc8byVi5CZMMV8ObS/pdNMiEuIZhKuRB4jjy9wkjWVI5Ghb0twwVy
48TIONLi5DXmhSZMhcjb5opIM2KOSTFb+gnXJwEkMGc+IysrrrLjEmLKBY1JFHMpxJcQ4+dFNoQL
S4ksLloMwJNLjEMGDxpIpE2KY3SRdc3GgePIxsXwiaeGXb/kJZoge1JEk9DlwW5Hh9iKtrb0Tp78
XtwWo1arUBehRGpsdDevdmtRN6vV+a8zauVGr8d+E869ueh0tpeulE+htP8Asl8ODn7rPDm1lY80
FXv9FCXR/jL5M8DIUbUU0aOJdp2+asn4ngSDI+IZqGbZNyhIxZo3zMiJsE8ES9zgDu5mAn7lPK4N
FRRSrsKxpATkRgE1cihIRS5JFLkKaDA0BSpXIVegDQU0kTGiUjDzM1Q4kklGSKIRwSTyYkkO3Y6x
Z+VWPgYuPxn02ZUWTdlAlh3DJlxNrggWOOOMcA9PJzYMZSM/PKQY2FH58nLaHEihbJ3IMcbbHZwA
BwW9LKz1iMzrGGny92GDsuNjqAAMnJjxkGNNu7JGka8d6GmVlwYkKY+VvUqIkaejbTNzIsKCHEm3
OblbSWRIo0L71Kiqi0KvoQCPk+yx4Jxs1cTLGauZFHEsSWq1cq+1qmwseaoYREmRjQSNvEmPPhYu
DBtuFibXDnS7rsv7dBHss5w8jaIDT7HJUu3ZURPmSvM1eRTQCGjDTRkV2mld0MeVPDiw/IJFqPfM
JxhzRSA9MqdcePFiVpBKaDivMtnswLqD3N2KsiBjMajvRIAlnp5FUTTyOuHseZn1t+0Y2DU+dBjm
fcMiRUZZRNDKlC4iy8j8eDaceUY808UVfk4ismdC1M/MShK8x7J3yAkO5b4sGTjJlHGxUx48kdsW
ObSTAIpPOG10fzRdjsYsRwRhwdyIFJoC1DKyMcbjkbhnSQqVyxc0KtxXppEUq6tpcUSS8ZPZe9Fr
UCDXLg68F6lNYuQssnYwGRBHI7YjCkUwxYkxGRMvY96xT92QxMnK2WO+OAdmLBNZpJY4227Lg/N7
eUiB1xmJj1vpI/ZHuG87hPknMzYh8f8AkUuXj8M+67dtp3X5FiZuDg/I8bHnyPlEEc2371t+4D03
RZE7GljQrmY+JIXSImCXgIBGOPx5SkmHkMsWXBiTOG9EajTIjaJ1dJY542xzBMsySxLKkMrxtb0t
zwBPHjSNWJleUTQxzxzRTbbPjZMG4QK74jAgj0bejf6HIxoclf8AkbYYJ4shNZYUmUSS4tKystTY
gc4+XkwUjpItTQLIiyyxjslx6imjmWnxyHhyPJRHKbJWKs/eFdpWAlXLV4V2XH/C2XdfwgrR5MLT
CGhYjnxX4D9D7Vb0jRF+McB5UTV+XKiabnoNEFtTRFGrcXLQ6ka3o1aj6e0/7N12dV3XI3DbMEbx
vIzfoh1xoWc/HceWBHbubjlyIoqkz8hjus6LE8gcyRFODFH3E1jr3LPH2OjWruruFNLavKKDrfxs
yEEGgprtOkMhUoRZaANAUqE0qKgkmpI5Z28cePUk80tRiRjtm0NDHFBFCPQt6ORumPDR3Dc82odo
DyQ4OJAfQHoSzwwKcvKzDDt+PAZM4yGHBJkmngxURszcTi4ePiJ6l9HkSNMjNmyA27YkNYmyzTyx
QxwrWfuUWGmPgyzzgAD0bVm58GHHjYU+4yhQBwDitWfucGEMLb5p3sBq7rGsjZG7zxxrGvDn7hFh
RwYrRP8AIcbBz8v4JBmRbZR1FCQ3MqCp5RkNhIdwy5Io5VjSOIZLwznOd1gw8IZeVue1YwT+PZLQ
ZO1ETz7NjmpdjYVJt2TGSZYyHF+6NqxsZJGy2MzmIiippWdDDuubEcXcJcnIxt1wlpMmKQK5Fd4o
PXcCQwovegTcBUqScsZZglWnmn2n41DAzz42GuZu8ssfmJD5CCaNlWvPzM733CdpsnGcJFu8gMkM
cZpMjCSmdKLq4OSorYuyXJlPamdubY1YsxljyGlIh5yZPJ7ElIHaKDESFe0cKr3F1ChwoKW/OAr3
4XZVEsrkJjKB3KoaQV4EZVm8dNCSLzKVmDU0JBWZhQII099LjutUqkF8TIjyIotxkqxBrsBp8SIt
kY7NRRlMNo4prHRR3wyGy46AxblDJNBseLmxuvMMbn9GXw50ckmJBNtWZmbvske4RQ474s9HW9b3
+2x5+3bfs+aE+P7THR2XbWGO+24UluVtbehlqyl2ET5I8byIk8WPKXXgyYfNEw/Mx8afsrKgMgxp
xPH6zXxJWVZElWTCmilSWOWFZlhlYH0tz29SIJiwxsgZCTwRTxyRZG25WLlQZ8F5MN1ZWX17cB+g
ZQwnwJ8eXFzosjgIBDwSQNDOk4qWNJkONJhjGzY8g06LIvY+EDGk4jyvuqSJJFEjwlshVTPn8+Tp
tEySbQ62O25z4k8M/mWRIseUOhPq9fSRQz5W6PjboCDpbQ6nQ8FtfY6gUBqTV9CdDRFWq1KL1YAH
Q6Wo1arctbaGjRtp731NHnpaiNbcO0/7PlvyHLyt0Z2Y/RIrO2wfH3chVReGbIggVt+xBWRv2G1f
ueC640u3qZJNknXdI0jzYGWQZOM0DaY7WIbmJbNOe6ulFqLm6xTvQ2+Y1j7O8h3Db54caSNu5FjF
RzRLSz47LkRRk9DjvdUoc6VTQ+0MzOW8cVRPPO+DgZFoNpgjcYGJ5CST61qyM6CBn3ifIX9szMxs
bZ8LHoch9AzKok3NpGh2xmaTLggRIsnKaKCKFcjcm8kG3AygAD1srPgx6nm8hgi3bdHwdrxsPXLz
yK2/bGHqbhukOIdv26aR/QHBn7ksJ2/a2il1Z1RciWbcZ8fHjx4uHP3H8U4G29rfM9yy8bJ+I7LG
cIKFFHgJqaR5GzD+RNFEkMdGwGKPIbVyFdik54mbF2vEl8+57f8AkQNsOFFjZOCwysn4/PjLLs0D
CXZXUZMM0OP3SpXm5KyGiqkeO4RPx8QoaBdDFumbEYvkMgqHe8KSknier13V5ABLMXMsvbWVnsy/
EsPCXaM3dY0WV2mkNiPGL+IEhe0AWM0njjgieVpxO6Ntk7NDiRLU0UKjvgSgQ0QYKFycgZOVmbj4
Mzb4clYIgozLCOENfwGUJDGle5r2160ptXnjZdwzAkuOxOTfkOg61bRpVFZMhdW8UwMsQEmYor/P
LSyZSo/f3JPkNH2TpUeSLKy1K0vk7gjX4PZgfMKn/TgpHkZiokafI8eKOY6GiKZFYTQs0QjcY9Yx
IeQ3kxpuwSNGlYedjpKMgtSt3Lkcjwtftw9s3HAljkEke6Y0chxJvLBwbjtUuXuONEu3bjpv+x5m
ZuW35HnxvUiAimgJjfHLQyZCMrRyLJHwTL+PNkxoj4spRsmNoJUdZE1N+O1W0ZVdYWbHlljSVFaT
b51YMJ4VlWGZifRNbjheJoMp1khmSePIx48iIxZO35EE8WVERJhMrrIvAfV5+ka5+hkYSSnHzHWS
3BPiLIY8hw1XrIwlkEWZPjlWV1IvU0DXYxTVfIxSjo4IU1uGDIY8rFOK2mJkvjTbliCIKeex7tDj
jeTgZUmHkZ2DHiZ4kjDK4YhQMrGLdeAanS3oWtpt8TTZvy3tTecCTy4mpq2nKuXB7cBGoFCuYrnR
0NdaNEmr6W5qLaHS3Iej7mr0aIoi1Ac6NXo6c+D24Np/2b//AO9wCiPShxJpjLtxiSPFlkZNna2x
fH4QxsBoKNgM7ecXCjz/AJXmztLl5EpLsauauauaDNd8X/j2KPFGmZBNE0UlIbMW5i97M6LgTNWN
sjy1D8ZyIw2PsGPR3jaccZHyjcXG27g2Y+54xx5aWlOjrZoX7Wia9Ri9BSaeMKsuXeoIWkbZ9pXG
T0bcNqtU2bjQ0++tKFxdwzDDs2LEURY1+hNhU+5Qo8eJmZTHwYkYlyMw42HHA2Xm42Gls3czBjQ4
6+s7qi5e7GRhlzNk7fsABREjWiQBl58kxxMFIX1trbh3Lc5I12/avG/qZ+dP37ftkWGNWZVGXNJl
Pi4yY6cO4bkY22/bVxjuW4R4EG27SWmihihXinlYHNyE2zE2rCOLBfTKJZVAVeC9fMt73XbzH8yz
8iD41+A65OPBlw7m+BgY2Jtp3fK3TYTgrPsU/jyNqhknl2OVaOHkCoI5WlycpWl74mrxg0YqMdq7
aR5EZd0z4mT5DOKhznyEaQ1+KY45cZSdq78eHuBq4oUTRcCvIK8lblMe3EW0ndzV2KhwBI3ePxog
EhC0wQD4tBCybkVTDk3QwkSEqMd5CkaRjivR0hheeWT413RPhx4u4Yg/zW0tUkioPK7tJI0FS5Nq
MjVe5VWYrFFCF8zhYXNT3sI5WUxyimKtUsckJTJlWlkDMs7V3AAMp1se+p+4UmZNgbnj/KsaSPPz
nzZve9cqNdKFFQakxlNQoYi/6oluM2J5E2aHLjjwcqHKjQdq5f8A19TrmxebG2mYSY+RH5YcKUiW
jwbhCGjxJe+KrUA2JlcvUyoTLG3/AC8cn8mCKQSxn/izcBAYRAo3hdlxpxPGD+FN600KTJjytfIx
0yI8WeTFnqaESCGUn0mVWXNxDjnGymx5UdZEnx48iNhNgzY2TFmRMj4bxyJIv9ElhjmUSz4VI6SL
q8aSKPJjUrK61JFHKpxsnDkxsuLJWpoUlHkmx3MAtDkiQ8wN52+ExkWNAEnb0hyvj7iBaje9Iwej
lTeLIRe3ZsxcjM33NSTPg2OE1PDk7Y2Jmw5acNjRsKN9baHg51k7hibFgbjkPmvsoIweH29C1Wq1
Wq1WvVqtVqI4LURVqtQHP2vqaPEaGnKieA6H1tp/2b//AO9oi9xYKD3Ve/oKhYwYzAw3UEIRiYbZ
MsO1YsNAKo0ly4IhmfIsKE5nyPLlbIy8jJPHBmtHGw7kTJeM5XhygMKQmPbLnG+O5U1L8b8QaP4r
iEfIcKAzfIN7yBLIXYzgU0jmo4HkCRLHWZG8+GwsRSmvaUXA5HFdnaCIWyMyDFEs82S8EDO20bMm
Kvqkqol3DFiB3medWwdwzDDsmNE8caRr9EKyMuOCl/Nzqx8TFwo5M8zGHBBICRrk7m7nF25ivThv
6WbuONhplbo+VNi7NJltj4kOMujyJGkmZNuWRh4MOJHwXq/CSAMzdWll23a48NeK9X4GYKJsx9xO
FhQ4cepIUZWS+RWHiCBeHM3B5JcDATETMy4sSLBwpZ5+A6zzlKREx0wlbcczXHPll1vV63LLfCwd
nyM7eN53P4cIxjmfDm2j5NDMflGeMXe/j2RNkyM8bDd3njTZdoQQ7xsrST5Hx6PHjTbpMvMydkiD
S7LIKfByoqJmQiYV3xsbC1hToKw2AgwIQK7ltlysMi8wLTZJoS5goZGaK/KzqOXnGvzs+huGcKjy
J5Ej3HOShu+dQ3vOFfvmYK/fcqhvuVQ33Ip96nkXC3rPwcl99zN0jk21ZMtYwKAtq8qJQljNA31t
VqArZnSPOJrf8jGbd8I3N6dwitKKQGWpO6GWad5WNWJpIGYrZajxwNAebKrU5506BwwMYlhKUAai
mMZhCyqWRZI3IoUOoF6yF/xZqO0i4sQpkKnW2o0Ze6nh+2NilSMAIpojDtmPnEbh8mfbc2LccTcM
fjiti7jUqGN45BLHarasoZYSYJNMyNbYbntt6h/42TL/AMacWx55I1kTGkZX4MmLyIzfkwFiQyxZ
MOLI0b+ieHIh8ggnEyZWMmTHhZTI9Tw+QQzd/pSRpKmTjyRPg57xSCxGRAmQnjyMTIhnjnSSN8V4
pY5U1P0tuHrXLU6uiSLLBkYT42VFkrp70+OUaKcPp7ZO3rIyZfhcMrKQCDjPA7LDkqrzY533P81O
jI1KQB8ZlvnZkPgycaNXMn2skjFf8qmDeJ45tvKZeZjZm4QA7rgSrMVhyttyxnrJNHGU3DCLkJbJ
TMIypMhFyMlC2J8gEaKysvtrbQAmjl4mId2mlychF57K18IdK9zp14+nBarUK7hXdV+AjS+vWhyo
8B4emlteVW0FW05acq9tTbUaCjptP+zf/wD3vTw8KbLkT40iQptoiaLCLCPZ5mpNkF8eCPGSpJ4o
qyt5xoayvkcKrl7zm5IJJ9OBxU3645ew4284sAb5flKs++71l14J5KWGFKM6LTTO1E31jylWOTId
62phnbZmQmKUUtKatemFm2tVasncbAAkoOewbT4YvTkliiE27Ykajcs7ID7Xk5UkW248dKqqKt9H
NkwQK2TkZtY23QRDIy44aSHJyqihjiXKy4cVIjmboMbFgxY/WJAGVvYeZcPNky8TakiGs+RFjoRl
70+PBHAnpO6ouVmZGfJg7fDhR+nJLHEkkuTuuRjY0OLFqSAMrI81YmKUNW1vWXnZGXNhYEGFHkZM
ePHjY0mdPy4TpNOVaGARVuMj52TFEkUemS7BERY04c1TnSQ7fhQPW5bNibgu/wC2Zu2tse6Jj5+B
mYmXBUx8s80xiEZZU35HSP45g/iVk7PizZO4w4BdfjEzwSbS8r5GyAM+ySVLtuXFR8qHEhOSduhM
xA7Rucwgx1ZHmUR0VjJKxGjHGKEaX8cNGKK8kcSo6hMZY0Ve2K/gjNfjIabES34dHF7RFisTsuBj
YuFvDYsj9ouRR6K9m/KiJ74XEgQSQNZ6aeNCkivXsrBhcip9y3RIkx8mbK28Hx1lt2wflIFjecVI
st0htG+MklGJVqPFekjVBoo50w530ZQweKWMlQXkhkQxtLHUDqJciaIRpKwSBm7hyrIf/HHmKc1Y
+4TEX4rjgZAROHddpXIU4fcYPke2RZ23fEOybL48+G7Y8nlhygVkw7RNw5ahWxnJWnUOqlo6BBHE
eGWNZY8d/NEqllxpTImRCZBBMsycEo/Fnc/i5Kn8WXLx/OmLkeZPWnjeN45FkTMxPyEwctpRU8LN
UMwlFvRyMdMiPJxnZtszworIx0yIyZsaXHyEnSSF4HhmSZOPlR4z1q2p4D6uVgMWxs9Wc0dZYUlp
Z3hYEEVJGkizJm47rOhNZGMJaGR2PNs0PmkwsDNjT4nF3S7JJFkbTtvg3Hf4uzc42KtKxYxSWPjE
0X7e34/x+OVssvNCfFEUWOMrkyzYOVt+djSRjctrmY7lhxrLvnkrKyGmk02jcIMjHAUjywkvmYsd
Nu+CtNveIKk301LvGY9FmkLR8uj7DKTFp7adB14La20B0NGrVah1q1EWo620PLgOhAuaNX1vV+fA
eEjW2hNex1Oh02n/AGb/AP8AvekBz+LwoQUBp9vhd44o4loui0+Sgrdd6yYJMzdsjJd5ZH9aM2IA
anw1crhoGXCxUVpI0LysaNz6Gw5XgzPkuF48ilNqBoVMtjgR+Z2Qq4Wtg2s5WQ5B9GTLxojk7xHG
iz7pOU2Z2lg27Fgrp9MzKol3GaWoNtkvLNFjos2VnHFxI8dCQBPujSvg7SY39fMzsfDjlzc/cMvH
2ONzHDFENcrMjxlgxcjcZY40iQVer8Q0nnix0kfI3KbFxYsWP05544I/Fkbm8caRJrcAZM/nGLjs
TwMQqzZMu6S4mJDhwzzxwR48cu6zAADVmCri/L9lyHjljlSSZ5GhhWFdzzxhwbXgnFgtVtIf803D
nZX48eFijGi1ycSDLh3L4m2HWJlz4Mu1/J8PLqB1SKGNi1Of3DdMYWNzTbfjNKjgCVI1Cwy502Xs
OJJjNsLpA+3xvn5m3RY0OyKUxjlQrW4tLnhdpyy/7RmijtWatft+bb8PNB/DzaOLlivBlXihmabK
EzzGHIowzCgMkVfKoPlChkZS0cvJasWeaWsHPz020CiKPKpZ3sjJIDkgE5akHIWllhWon++d2Ezs
yRhpbwD7I1DGQmpO0LgD/BW4G0GOiMQZZKaOcqkFw+CVGKiI7jnqosGl/wArc11AucpUMaedA3f3
GJfJ4IWpvNjnAJeO1Tr/AIsrb/LLBj7g6OrqxIAuLSTBSsndTOq1PNYdgZ0TsFxXcL6YzLDJA4eE
2ZcPbU2XcuPITviwHs88YkiWQlKtVtZEEiK7R1cEVMBHkYxKH0jpkgxSZEZkTyiuRqVTBICCNXjW
RIVusAMsWNIQcmNo3ikSWPhHFbVgcSUWIzMRpKw8pMmOp4CTDMJV9HKxvMk0Hc+155cVPAk8bCbF
nxcqPKimgeNoJ0nX6I/R5WKJ0hzJMSRWV11ZVdeyXGpJEkGFjCd86JVl3J4lONuzwtHIsiMiPVp4
5XjjnXbIxFJlxQyTQYOQzfKsZo888jiRpNCcUqceMxx5UpB2jcFwTj5UGVGcUl5s3Fw8KDDzdyz8
jGKtJu6Rxy5LSkM1GrUaVmUx5bgBg4ANdt6tVjUWHkTVHsuRaTZX7Jto3FJNlhlifRU7qk3XFjnF
iPRtw8tLanS+tqPoWuLUdb87ac6PD76Gr1erCrcJq+m0/wC3f/8A3vRBAoOBW1bo+PNi5KZCdKky
I46GTkStlPBCub8nhjSfJmyJPSsaCE0VtwQObQsDHOR3q5KsTf0Y5Cj5CruezzKUkFLQ6OAUxCyT
HGZ327Y58lsbHixYuAUzKoO4Y3dkZ+X5kh3CUw7PDGIceCAXocFvo8ndEiYYE+XSrBjo+f5jjbcQ
VUAZedj4aePcNyfHxcfGX0LcRIUbl8lxomx9um3B8bb4MZNDpnbnHj1i7a8kigKOO2uVlx4yImVu
UkUUcS8Nqtw5mbFiRwYj5bAAC+pqeUznGg8rcEkiRIZMnd5IYI4I5pY4Y0Eu7ZKIqLwfIvkMW0Ll
/EQuDDPn4sm0b3iZkfyHcpMGHboNtTMq2mU7dqIsaX0GkkiRR7fHJkycJFbv8cx84blhZm35PxXe
l3WOtxyhi4u1Ypx8XG/RodVlZRu2JLnYvx3bnxlysbKmyoPjc+2Tnb9znkwvjJGB+x4SSYu14+Tl
T/GE8EXxhPD+0wPmN8WiMeN8YllTM2aPFkg2Jsncsf49PNn5eyDGSD43LkINgyGycjYJYFxvj+Rl
Rz7JPDMnxvJSKPGjRxfSRzU7hFkyVGNCsjr24t54DETRNLI6lZnWSWWWU+SQhBZUftafJh7VzGmn
wOWMDW4m0OLGJHsBoguch+d+aN3odFFzO3atQNcMLHRQAJX7mx3p1FJAil0VhOzKu33Ed6yD/hh7
JJYIY4Y/kuNEYGFwDI6oA6YswBkyrV5g4TIVxc1F2OykkAg0KtW0bv4aBBG5Rd4xpPJDxv3QZFLG
PNhSM8PDMgWbEcqKyIfNCXLQqwZfTIBGKTE8qCCXFZlogMIm8EvBlxMyyEzRSL+THBMs0ZBwZuM1
biZVdYmbGkrLgeKTHnjyIqnhbuhmWZPRy8djWRACdu3FcpalhSUMk0GTi5UeSk+O4fHyUyF+kOp9
A8U+PHOg/IwJIMiOddelNjuXafJw8bc9xjG1bnnrlJjocY4e4SK8OZFIa8PbJhTGdM7BWGInlvG1
Jl4s+NJG2AzI4IIHZDHlliGJrHyZ8d8D5QVbAljkGwtId53qR2zEkWdJY2idCDXiLV468QpoitHl
SuVrC3DEiWSdcqeHasmY4+2YsIAAFGrmrVcGkR5HzfxcDb9xe52XIafDr316cHtrY6Guep4raHgv
wGrVara9Nb8RrpVtTVtTbS1bR/s3/wD97043KNte65Eb9k0yZWft+GuT8qcLk5c+S/pKpY7T8dmz
ak+HwmsD47gYiz/H9tmqb4hikZvxrOx6kx5Yjjp3NDHcyw9pQ2L1fS1W4/jGUC294bY+UDStXeKU
3Gx7TJnZMG14cAAAGhZVORn4uPUm7qrq+6ZLR7VNKI9vxYwqqo4B9CODJ3HGxwY83ObHxYcdcnNW
Efiz5dRRpChKqMjcmkfH2pFf1jWTlQYse45e7bmdv2COGIAKNb1n7mScHbQgt6eXmLjiDGnzJURY
19PMzVhTF29jLwXqedsh4I/ORyGs00cESpk7zNFFHCkjpGkwl3qaKGOGMcDMFXP2nd98l23bRi4+
6bLi7jHvGDuG0tLvWTu67LtOBtuPV6JCjHBkbhnZtxzQAo4tw+XY8GPm7tnb7l/H/ise1ZGTkw4s
Maz7rkVjfp0PCWXFyWx8d3yWSSXNTkjFEhAd4caCArIVDMWMWBBFKjlaZu6tyw4XTY1AmikDRZWE
mS0CiFWkBGXhrlDGiXGjyHWWt6zopAvSmPaPKoDXmPZjgQxBYngRx90VPBEaXHEgZEjbxKZfLNG2
QqCQdMPBmzXz/jUghfGjggw1tjWrcT9m2i596JCISSfeB7M40WwWRizUj9rNZhzpRczv2ilbtJIZ
dNwH27cf8ZrIv48iaXGzsP5ZivBue6HPY0rutKMiGiTf2pVLEQKiogUvOSxtEA409tp3QwnLTvxs
R7ScVqzY/vxXLQ5do2N4cvhmj8kfe3aCCKUePIxyYpfUy4mZVMeVBaRCjiRJ4VmTGmaROA9uNOP+
NPMpxpWEc0eO7QScN/RmhWZMeZr1PG+FLHIksdTRMrRSrKno5OOYpMrH8Jw8yPKjrIx1nS0uNPj5
CTpPjFmx8lZh654b/QsiusuNLjtBlpJwKxRt63GTHxsvObIeCPyNIrJEjEna8kSGKaeF45FkAJU4
Wd+VhyKyHLyH7ZAtnCE3tRblnMTUUMJixYsGKfK/Dniwc2GLb/j8yQT7llnKnrDbFyVysSTDyMNo
hHDFjtPjbNDkLLik1JjkFoyK5ioMiaB4fkWWlRfIoWrH3HFyT5YS8j9on+Q5McyfI89ZtlbKyH3X
efxHyt6y8tpphKnx9SMTW3pAV0NqtoaPoEVyq+h0voedEUb6D0ySdDV9LcBq9XraR/k3/wD971Nv
mbHfL33IlqSaSU+nFA8rbH8blRwqqOA86yNuxMkZnxKB6y9iz8QRyeWOVe1woNFCDbS/ETWBknHy
fkEAycVx2uCajidztfx7My2wsGDBhkljipZ4mjO5s7JkbplRNteVLHDs+DFUcEMRufobVb0ZsiKA
HIzc1sbb4IHlyoIaZsrLMWJBHpl50GIghzdzqCCLHj9YkAbl8kggXG2vJ3YYmDjYkfA7Ki5edkZb
YG3JirxX4cvN7RjYTZFABQPRvpkZ5eTDwREvDLMZ3jj8rAADXLzIMOHHgm3h1AUMyos00u5zwQJB
Hw58zZ2UiLGmmTjQZUW9fDpMR9o3nK2tsDdMTcIqnJmkHIcG5ZbImFiJiQcW87xBtWNHs8O9RzfH
s/DrbvleVhD5DvGHmbNsE275hrHP3cbKrj8SMCNUL4BOXuFY/Ti3fI7YYYPxtvChRrlZAxoRvOSr
zbpF+BD0GmRKql5FkMUfcCiujaMAQuLCRBjI0jxIVRFQFQ9dlsmvj7J4HZQudlRSVjEeAddyNbeQ
A4pRU8lzpcikPeijnkPYa473BFiSFVmLNpjteiCCoudy/TtvNRUyjxeFJIxhQlWQqb2Kdwm7JzRi
UyeOJwYkBTsiqOG58a3ng7ncTRiB1ZY20FjWz7h3hP8AGnHkxeWDDkBaVO+NSZ8OGTyx8LKI8jHJ
RqylLRzDyxRuJI+I8S/8bIyYyRjyBHrIjKtHIsiayRrIiDzR4shdIicebJxxPHizNIvrZMBkGPOJ
kPOnLbbMCCKljaN45FkT0Dapk/CeeKTBmxp0yIqyIFmX/NFLi5SZMeRjeQ4+R5D61vQt65sRPi9t
QZRABBGk8a5EO7bC+IsTvHJ3eRZIxSyvBLC8WXjPM6Tw7nE1JIGrsGXizwjtlUBpIbl0K0ayh9wP
MNWTM0ccL9rRSMtd19YZ27ocafIOHBK6Zc82OsE5veOWnw1IlwiC8LrXSveHKnx3ly55ZRmZIKqs
ow4gB+95SxjukUi1YWM2RLi4648NdfRAq1AWq2hq/PUnQcPOjc6np6Jq1dKvxc9b+jtP+zf/AP3v
ULsR6aqSdo+NzZgwtpwcEejLEsqbpgtt+bIAyRvapGvw3q4ouKMhruvS3J2/PeTDh2HK3DNk+H4m
2SY247DjS5G75bRw4+fJSbaUkiw40YxRE0Kv69uC2luKSSOJfzhknH2sA/ZGj5s0jx4ChqZlUTbn
LkNhbUsR9fLzsbEibP3fd127ZI8SO1uGaWOFJXydylxMOLFThHDe1ZWd3DEwi1Wt6bFUXJy8nKlw
sGLEQcM0zzusYcoixprnZ8GFFiYOXl5AFqNgMvIl3CXExYsWPh3DK/Hh2vB/Eg4Dat8+MRZiLJuW
y52371BnbfjxGNBwZM648G3Y7ySDiNb5subm7zDjJIWRWXePiGNljN2rcNvn2b5UY44Z4Z0gFp/Q
nkKjcZhhbftmN+NhmsX/AFcM8vjSF13LPb78rgzcfzwTYE3fvmDlR5GKh8VqkbsWadmOPESALUlO
OfLSdu2NG7XaxHulR88wHnFlS4zbnvu6SRLjyQ40QIjFbk334ptCjB0dhGhN+CB7MbKGbuN9UYqw
swnfgRu1mHcBZVzXLx7abC9TN/iGaEmVS4msAL3x+WXbnJGrHwo9RwxJOUVWFDrJF5BJH2VfxPG5
Z15gUrsjbVnHcGxJC8Gl9baOrQyAhli/xZcNop+HIj8kUjFo1YOtY3+KWL/DP6k8ImjxZjLHkReJ
oJPJHX/Vn4MuNxUt5FdUyYcaUmsqJ7wzJPHxH0MiMxvHIsiOiSJEz7bN10lRoXR1kSr634GVXXt/
Hd0m2nJiljmjrJxlyEImWfFyUyUyMcSiDJLN9F19U8QqfF7jFNIjJIkg56Y08Ri3vEiwdwjVfHOy
LRPkfY1eLH3XEMqDuV486USw7g0BURZ0WXhtaRO0mJyGhC1k/dkohCqOecAIY+gq9A0W54anuSTZ
sHacbKyI8RslJFnglw3hyBSI008m2uTNtsiVPgpUuMyEgiiOdBiC+Y7QoDfuKqLE7HiKsfFy9Inl
V9DRFW1Aq3Fer6GraE63ojW3BbQ8VuLaf9m//wDvagE1Dh5Mxm2/JgaHbpJKi2mFaOHjgZEuDFTt
3t6mDt2Rmy7b8XxcWlUKPQJVRNuWLEBNuOQ25bXF+KWZCORaxBNFhRcUZK7yasTSxuxxti3PKON8
LnIi2rGgqPBwojJjzY2Zv2H4ZMP47uEww8OPFi+stwZO4lViwsmekjjiSfcYkMeJPO8cUcYrMz4c
RBjZ26nHx4caP1ndY1z/AJCqnH2F5miijiThysyLFREytxlihSFOIcDOqLk5RnrFwgp9I1LNHCjv
NuUmNixY0XDNM8jdoqONY013HcUwhg7a0slEgDNyZMw4eKuOnDLKkMeFG+Zk8fyTJ2TKPw/ZJ8YW
4WC7tmW1twZE6Y8MUkkUGJjDGh0ycXHyo9x+KtHX7jue05exb1Duj8bMEWFWY5h/L3Om5Lij/BrL
KkSSfOyuTJ8ixsnbPhWNkJLi3YcDyIlRMrPu58ggW0TkKMyUhFUExJ2rUzlA3NaXmZm7nqBu5CLN
Ie2sXnmCkT7ZbMdyt+OoAFbif8+P/ox72yQbc6NX0hTuZh3Bh2nVRchQomUq/BAxKTuRWV/p248r
cpUumRgmdcOLcnSeKeJx1g/7snWiexATcnuXROh5maIVDK0Zga8dFgKwt5TbM6TdZZ5Nszp1yNLc
GWoWTDYhMy6LlXCqe4UdRSqI5se8UlZYMZyI/LFFIJY/UmH488kayxwytjZFqkRZEx5Cp4ARiZKn
8afKhcmGZJklBxJhYj1nBxZQQRPCk8eJPJjS6SI2M6OkiehLEssa2RkebaMlWVlrJxlnVxKkmJmL
lJkY6zrBkN3/AEB1PqW45oFkr745I5g2ubgwZq7iuXhTGdyUkUHbN1kwMjF+QbdkHL22LJWVSp2/
OxVxXjkL5GSriSJKaFqMSgxR+XcJFq1l3EgIvKIadKUGmmaJ/KzjGcqskXeFmeOPZsIzyQ48MI0m
xYJhuMCY08sAWmHOxvDiyzFNvlZoNhWTHzdtzMV8HCkmlhiEUeh4OWhq1W0vpfQ0a96toaOlqtVt
bUeC3AeA1047cfPXnptN/Jv/AP7tY2JPlPsPxNcnM3HYNjwpop8HtmQyzrGBVrDdZJe/1I4XkbbP
i004xMLGw4/QkniiEm5SvS4WZOYMLGx9GRXHyjbY/CJDRlNi5NczSwTPWPsmfPWPsJ/M3DZvim2x
pu+04ob5JuEpH8lzaX4tu0xxvieJC26bfivh7VjRLH9ealzseMtj5eYYcTGgGTlw4y//AGO4NBix
QmnkSNJ9ykzGwtojgPr5274WFGz527vibRhYooVbgzM9IBjYcuXLGiRpw24ZZUiTImed8bDER9PJ
yY8eNEydzeOJI14Z5XdiRGYYVhTXcd0XHO3bSYJdMzMaVsLEEKcWU7Z+UiBF4bVNLHDH2yZ+9Lft
4NzyJr4uNFiw8e7zd8u0xSZMnDu+04Gfj7FHJBvfGbzyO6xpssbSmpOUeOLQ6TzJBDufyjIzZsj4
5DvT7nt+57Xk7N8jkxYsZ4nx+CaMmTepHhhG6ZmRlxX7cpj2MsktQQC9KtzM3c8Ld0VSN2RaQP2y
WF2cvLt4vkgc0lVBlzQpG2S0+MrEi9bgf+TALwABFYd6kWNCgLlQEVTzyEsa9qx0pm+6Ze5LaqCS
oCLOvcuT/o22r83t44nUQ4GPHj4u+48Uu32pftzEbyIgucl6FQG66O/YGFiy9wlUq2JNcqwau29T
7YcqaHaGXGwdtKPqaGk8YkijcpPIgdMP/JjYbFV4cpGaOU+SNGDoyh1w2IQf4cj1HRXXFZlrcMYy
xYOQssVTxFxBMs0dHSeFZ4or5EONKWEg/FkIV1hZsWb1mVWWJjiyVl4i5UWBlSg0edMj4kisrr6G
TjpkRoUyUxZpNsyBpk4wnWSOXzYOdHmRzwJOsU7xv9CfobcDosilREyzMtddN4wfyUyPjGZEc/b5
8GWrmsDdcvBfP3J8uYMwbbsiXJxseVcpZsNZafanFJuGI77UqvlThDTEVnP3TMLRXtV6H3EOquxL
FP8ATCCojeyyyctsxVx8XQ1zrIxcbJXO+Pyo0uHLDIm3ymTbsAPFDhY8NWplVxHjwxVarcZ6UL0d
baXq9XOttCdeVrc70aPBarcFuC1W056HgNe3HavbnW0/7N3wnn3yHBiSTHSWKLZtpzNwfNhniz9h
wUhly8XGZsloEEeHmZJb47HJFuGG2Hkeiqknbfj+Xmnbtlw8BeXAdSQBPuWNDSzZ+XUe1wh1REGk
2VBDRzM2dztKzje/j0UIEUAqKPbajl2mKotwmJXG+RZlH4xOKxfje2MItq2iEIUjBlkNXvo8MclK
qqOC/wBVNMkK+TMy3hw0iMkiRqc+TLfG2uKNwANM7coMNY8bcdzaCCGBOC/pSypEm575llsTY5ck
oiovCTas7ceysLbmkIAA9LIyo8dT5MibGxVgHoX1zM5MVcfDlzHVQo4cjJYyMRiR40AhXXO3GUy7
dtseGumblMzYeGsRq3DueY0Me34KYcXHkMdxy1ijU8GZlx4kO14rKOOaVYYsdJN3y1VUXh3cySQ4
Pw/Hxs3imdmZEVF3mVhjY8KwQVkG0KCyaMquuRixSZoAAzMeDIg3nHniyvhGblcO6/Jxtmeu5Yu5
pImAcor2DIkdqTyioVCwGmbtjNQPZyv3ZD3fQcyVJQDtO2/7hWPteVmR7lsLxPlQrGAOdqzueTho
PC55q3PITnpjpdnN9OTqw7TSJ3MbKKQ8pV7W0x05u3Nea5a9kO26SNZcuV4m2r5TjfjbpvYy4+pY
f8yF+2TkoYktUT9juLFBc5DXZD3R1NGGpSUZHAPKoZkjyNqyoGqGEpGRY8U0dQyiWJP8ObKfFPw2
qD/FNB/hmqc+HInjMkcEolj9TLRgI3WRMkHByVZWWpV/GlBDDXJj7JJ+Sho54oWMEuTAuRFizlj6
B4poVmjxpWDVnYgnXBzPMuhU4jBgw9DLxROFKZ8OHkzY0umXirkLKknmw8tMpJoUmSKd4H+gvR+l
sCJQY6BaOkkSQNbyT5+Jgwy7tDDW6/gnJ1iTyPtm5ZG3t8d3G8GRu03ng+Q5UIzTh7hBgJKlPNNR
lKooMs2fA0BvV6iHaJB20OZZStRWVciZL7Njtk5FrcV6zsCLMEG2xxUFCijxdTz1tpahVqtoTpbQ
aCjRIo1fg5aWq1AVa1cqvV+O+vuavXU6nlQrlpz161y12n/Zm7LLJuxxcfEQ42flNibTkRGfYYMf
b5d4aQ/hZ+XWLtkMBAAo18ohIl4wpJwNlzcxtu+NYmJSgKOKXIhhB3J5SMLKnaDDx4BoSFE+5QRH
t3DKqDbMeKgAoqSNJFyfi2HM+N8W2yOoNr2iCldYwZHapMaGYoiov9DkmjiH5hyqgwFWgoUZWcI6
iwsrIaONI0p5EjWfdMrNlwNlhxm9ZiFG4/IMTFVsfN3yTbtqxdvj4iQBl5kkrYeAEHp5OUIikUk0
sGPHjp6edukcNYe3MxsBxTznuJTBixscxDQm1ZmXPkNh4UOHHplZV3wsQheLJyI8aLb4WyJOPdcx
4I8DCjw4OAkAQ+TdM/UcPybNYjZ8H8LD4egxB+TlcLyRRiQnshi8YZlRYG/cNy0yrlNZXEce2p3y
afIsCWPKxYfPlazF1icMwjy48Bo8jDypcqVlUrIwI8cEBup65DUaU2LN9jG5vpjpd+77sle0bYv3
gC+N2Lj754xt+Uwee9e+WQcnG+3FJq9EB0YWIFyLIt7mlNqyE0gWwY3NA2qde5BSKWawRaU2O5D/
AA7Z+oWp0BEcKS46YC9pRkpbipTbMRe6ae4j1B741+1GPccc17gXqXHPfB3GQ8hu8UjJsUeQmUOj
m7cWVdGxR4pc1W8cqLPDiyeSGgODKUhchfJFFIssc0YmixJTJC3+DI9WI/jzzQrNHt0pieiFYREw
Sa9Qh/HmiH4eTkQ+aOCbvGVCWqGVZo/WnhMggmEq1nYrBsPKGRHRFwScNgQR6GVC154Idzxtuz5Y
5NMjGWamEndiZiZImiSZI5ZMVva/p2q3D7fR9aMRiZeyRZsj7N1y5pTBkSQt2pKDGklEFTQJU97M
2xfMJdihg3Lb/kRyt2THfE8G5YkEcCh4qkQinRK+SYMEWMYBXhN1Q2mfveBC8iq5ZlkChSW2SAR4
lE1fQ6HU+ver6ngtzo1fUDi9+IV04L6k0dLcB0PU0aNWo8O0/wCzIkWfdczNwIMOXcsWJTl5+TUO
JkyQRYePDRHB8ljjbF4AL0q3rB2fMzG2/wCM4uNSIqLrfQlQJdxx0LHcchottjUoiRjSSaKIHcGk
K4WTOYcWCEf029hPuLWiwciRo0WNJszHgp48vNEGHBAp0ys6OARYuXuEkMEUCWq3q5WZBipu27bl
K2DsUmQI40jTieRY1ycqTLfDwUgHoW1ycwhoYpMho4kiTgHESAM7c3mm2/bvAvFlZDrX+PCixsZl
fToJJn3A42NFixaZeUVrExlk4yQBKZNwzERY04p548eLbIZJ5OHc8h8ifHx48aHhvpPMkEOywPum
bxZ8rLHBEsMXAzKo+T5OBkY5wN+2iPavlkkdfIM9H2fYNxkz4tJfun13CQduIhSDSdBmbhbgnfxx
Q4b5ONuuzv4MDb5MQ5gPkChVyDzha0lvukbuc86UEkLZXBVtFHjjvzzD/wAXbByHKsfe5sOLfvke
RuCxrIsq3sKyT/yITaC+iNY5KWOOl6c3NX0t3okZMjGw1Q3EqdjwJYO1zpnm+PtpHcDXdyjyUhiB
5P8AorIH/MjSxH3Uy9rUaxzU7dqUjdrMPuUcyPJLJGyNDJ3jJZ1bapH8mVMjRRyCROKeMSxLI3Yy
h1wWPjX/AA5nCQCMY9jQgwZFf6cuaMSx48hdPUyoPNFjTeaLccdzUEyzw1PCJkxpjIus8QlRf+ZB
hzmRciNgY5FlSRWxJwQR62RGyNHIsqVl4zY8uNkJkJTKrKpbEfqPQyInjlzcSLcYNtz5GOmTj+ZZ
YnM2BnplxvGkqB3w25EfU29WWFiO5JhvG2iJnR42BIPnElFHKlIyLEVAbkg+ND92fK8g+PRzSTxZ
mGs+7ZOJhzx7hDKkG6bd3Nl4u8ZH4+MknlwzWQBKrYcgrAxysQkkQiSRztWJ+TkogRTWdnQ4cGB8
hGTPqdLae+nvVvQ563q9GjVtbGiOA0KOnWra24TRrnR4CKtqdOdW0tR0tpbUcxXttP8AszMyZ9xj
x87IaDa4YqVVUcGTm42Km4fKZnbIz8nJNr0DarXoJesHZc3LO3/GMXHpI0jXhlnhiD507sMOeWoM
THxxpyqbNx4qM+dkNHtyUiIg/p2TuePCTiZedUGLj46u6xq+TlSyRYKJrLJHErbjNnyYW0w4jes7
pGu4/JoIwu3SbxJt+042CnHkZEcCu+VlTY2KsC+nkZfkrGxjOFVVXhvV+CSRI0ytwyd2rA27Hwhx
Tz9hSOPDjhieRrVapJEiRvNuMsUMcKaZeUUONjDINuG+m45Nxh4wgj4BrOG3PMACjg3DMXEx9sxJ
IYvQ3zIk3HOxMaPFx+LG/wCTlcO6Y0mXgfHdiyX3dkVl3b4pDPWZ+67dP8e+QYcmMCCKj++er0TT
/wCacaZEvih2yLnwZkDZEMSLBFnypISwOfLZssAXkbuZTZ527Y6JrHS7BvuyVs1QJ3O5uazT/wAb
bf8ASLktFHZoEvIv/wBgKFTG8yf66NXsSA6GyJfQmr0jWNgCzXN6vopsZIw9MQovz0zv+tth+80T
yyMeSaDCzJwjSmQ3rK5ZTmwU2OQtjpjr92QD3VelPdH+lFNmdO5lPbPmzyQjB3KfJiQsK2mUtj6X
4fGPPhuXgP8Ahz8xC0Ucgkj4coGNsiPyxRyeSPKiMkMEomilJhn9WU/jT0jfg5uk8ZV43WRNcoGC
SdCHRg6lTiTMoZYj+M/ruGxJQQQQGEkUmFPFKsq06K6o7Y0noGpFfEn3PAXKj2vdVnGmTjeVZIpX
kws5MpXVXUeTCZSGHBf1rfSSRLJW5Yj5WG8zK0kIApWZSMgSAcgoUT9RWRJ2vBv88MWBGMvcN5jE
OYrFWkImXZsHLkyt1kfNd9mylA8aUTE1DJ8cXjFnCLQypI3i+R58cU+7Z09M7ufj+3s0n0FtbaWq
2oq3Bf0DXtwHnqdLcRq9e2p4BR4Te+lq2m/knx40zALainkRBlb/ALfjjN+VZElT5M87aKL0sbE4
ey5uU23/ABzFxqRFQcDMqibcoYyDnZNQ7ZAlKioNCyqJtxRD4s3JqDAx4R6l/oLfRCpp0hCvuGc8
WJFG5IFTblGGTBeeRURBpl7nFBUO27hmTpFFEOMcI0zdwjxhmbpkzzYWyT5JjijiTjysxYBFHkZc
sECQJ6TuqLkzmZYMRpyOQ9LJyYsaKQ5u65GJhwYcXFPP21HGsCQo2Q+k0scMcaz7kyIqLpk5BQxQ
nIZUVF4smfxJhY/fJx7nlslYeMMaDQ6O4RcNG3LM1toNL1vW5Db8L4vt7xw8W5TskWPCIIeGeQQx
bTEVg0zdvxM6LdPiuRiVtvyrK26WLLgnxsdSsdXrJk7IsRLvpuDgnFj7IOBZ0afIuke7PlwRYBkI
iXvyHPahq1zMt4jRNKOyKpV7471CvZFpnf6NuFseM9rOlxl5S49Bu/PUm16c3k6C9XNXpL2kFXq9
DRBcnnR5G9X5XNXpeQkv3Ver1m/9bbD/AJKsLYwHh2/Z8WODe9phigNZptlE30Yd8WkY7IpR3xe1
QG9TGyC1Ke5JFtJkQrJW3bZjRri7YrUUXHyNbaHTN/x0h8WZnIWgicSx4topOC1MoZcQsqKPDPUI
MORJGsiYshZPUljWWPDkftzMVcvHwZ3li0FsaXUgEY//ABpoj+LK6h0gkKNNEsqY8rN6duBlDLEz
YslOiutpMSaORJUqRFkSOR8Z/QIBAJwpNz2/nt+d3pbTKxvMHEpGFmrkAgMO1sIqyuvpX+ptp7Mi
uvyLaTGysyMzpKzIy6AkUJVIEvagFzOby0pIbKYtLW3YzZOTDEsMftesvbcLLGV8WkWsjAy8YkEc
Oz7Ucx4oUhRpYkrLzo4MXD+SSyzg34PfU8NtOultCKtxE8Fr1a2pq/LU8N65cduC3Bej00NHpVud
HrXttP8AszJ4RmB0Ik3HER8veMTFTI+WKEy90y8prk8AFzte1ZOXJh7DiYzqFUcEuVBDRzZ5iuDN
KYseGIVfSXIhhBzJpSmBLIYoIoh/T5J4YhJO2QMbbo0UAAT5cMAjGZmNBjxQDSWVIlfPy898Ta8f
HNH0BQ0vpkZMOMmVu0mdWFj5E823bPjYPo5WcFOPiPkPHGkS+lLNHCk2QWrHwyzeiNM3OjxI4cfK
3DJhhjhTiyJzGIolhVVfJlAA0ycqLGSPFly5tL1lZPiCRM8kcaxpxO6xookyJkRUXUHXInXHh2zG
keThz5Xz8mONYo6twW0lkSGPIDbzuqKqLfixO7MzuLcn8sioFXU18k2ba8iH4guc+VrkPebHTsiv
RNgP80/TgYgDDs+V5FYZaRTViIvZD2h8g2o1Ct3H3Uws0a97yNdqQ14j5pDzvV6z2/x4Iti3qR5Q
Xx8zLnjU/mgGm5L1dup0AuZ5PEkbiWI8jV9LiNIZyZZF0OiLczzeMsO5dcz/AK22H/JemNPlnHj2
7c8bLxt83CIYxrP/AOyOnOozZpl7ZIlLu9JzojtNqgB8mRcvUBuko+2dwtY+7YvbiqjJmQl4IJBL
EeKWMSxli2IrK64N4xlqUk4p/wDFNPGZI4ZBLHlKwRWDrkgwy+oay1ZCrK65S/jzKyutOiyJjSMr
a5MHmjAGXBizGRcmEyLjzCaPJhLVDKJU4D6B0miWaPHlcGpollRHlxpEdZFqSJJUikaB/QdFdYXe
CXNxTiPiZdxbTKxRNUsDythZ6ZAI5OsmI0ciyJ6HXjI9K/DbiOjKrLvmytiPSuVqysCpGsIvLIby
VGLyTG8lfGca76+1MquM7ZcOfHkjaNtYp5oW/etwI/c8vukyJpa2PCafIAsNeVdfQPDbhvV9bDXl
VxRNc6NW0twHQ8FqvV+C+nLU10r29tTpbh2n/Zvue8e9ybxmOvnl7pJ5ZeEC9YW2ZeY+3/GMeCo4
4411aREEm4oKC5+TUW3wIQqrwSZcUZ7svIqLb4loBVHrW/oJIUS58gcYLvSgKss0UKtlz5dY+3wx
HS9Ze6wxNDg5WWyRpGmttLa24b1n7v4ZWbIYYm0y5VQwxQJxkgDLze8YmEzkAAelkZCQI7uz42KU
Pp5u5pEcLbnyyqKi8NqyMgQiCDxUsj5cigKKzM2PFXFxJJX1yMgQqkb98MSQprara5E3lfGh8a8R
YAEndMtQFGprdM04sW2YIxMfivpveYrx/H8JoMarUBVtd0yHix8THXGx+E2AwR+Rm8Pyram3LO+P
bH+zQ6SOI0hTu1y3tFhrd6trlMVhyHlWM5E2PUvyEeXFf/BjMScj/YTUY7Y1NmyQVkgXtSiaVrMQ
oJNzfTPNYg/4214Qy8ptswTHuWGuFlQc8ypTaNf9jn7qNRqAJ5fJJiSdryDWMXOZLXQxOJYyLGrE
1yjSR+98STvRxZr0Ky/+rth/yimHJsZcjHgw5YAt7VuH/YB5X5X5zjuTGWwJuQbHIADi9Y4sJxcV
AfvkX/j5K3TbNuylrbg8WExFsb/HJbjVQuThntWY+HLkQSJgyExUTwSIsseLIzRk+DJNiMU9jOiu
uKzKPU61CfBK6K64kjY2RpkRM4gmWaPWYeCXIHjZWDCZTA6srrMGgkVldfWyIPKuNP5lrIh8yQSt
EysGFSxLMkErRt6EsQljikCmeBsKXHySp0yMfy1LG8hw81ZxapIZMZ4Zo5k+i6aW+idEkTePjzwU
QQaDkA9pqxrGH+Um5qH/AGObtGvc+zQeHD1tVtcjadvyX3jYvxQylTwYG2z5kmHiR4sXpW47VbS1
W0NHiNuA9OHnwdTXPU0eHlp76npXWjXtzq9cxV9Litp/Xv8A/wC9xKjOdm+PzTSRQRQroSAJM3Hj
H5OXNQ28yNHDFGNXljjo5QcrBlzFMeNPSt/R2KqJMyMhMbInljgiiokKJ89u5cAz0qqijSfJgx1O
Xl7g2JtuPjrwW9LM3XCwqkyM3c2xsdRHhbVFj+jJIka5GTNM+NghT6NtMrLEVASmTGxUg9G+h5DO
zyzYe3uXAAFHhycnwpjYxRpGbLdEVFrM3JYWwdvaFtcnJWBQsoaCERLx5kzBcWIMePcMlicHFXFg
4JpUhj26CTLyKvx5mR4kjgGfmKoVavV+DH/5mfxbnP4cbBgEGNrajyrBX8nP1zSXOOA76Tv3S4ye
OLgzmJL4SyQz7OvZnbKIslIzHjYa8skWa1y3KvfIUur2Vb6WJpgCOmuef8mMLQbVlpiZRzsTs+Qb
wmTu+GbzVOf8MQvNKLNfkq9xypQkZoGxjcSxWsetMREjsXasSUK8q6RrzzJtIZDHI/3Lpli2Jtp/
zA8r1AyrFEo7XQOK3Af8hT9mi2ZTZE66SDviHOgOxCO5BSkgyuAs1jjxc45srG/DgmWaLI/xzcea
GVW+zIzIjLj48omgkPgzL8Un+HKni8sePL5Y8r/HJWQCjAhl9TJiMscEomj3HFM8WDlfkw6Sg48g
ZWXRlVhF/jbGZseSkb8SYgEJ3YsvX18iJleKRZUrKgZhjz9lAgipoVmSCZ0PoZGOJlicZUXa2I8M
7RHSeDvqaEvJh5yzLUsEkUkGRHOtHj9vWtVqt6PLhyviibq25/Et128rtk5qTAyEr7lMTWGit2mt
vx3lmhTxxa34SARvOzR5ULo0bVHG8rbd8eJEUMcScVtCKsK6170NRVtTR0BNGuWoNtCNLUdL+gNO
nAeD25UaNHg6+n1O0/7N/wD/AHuHbtrlzJNv2bEw0A0lyIYQc9pCsM8ghxIYQOB8mJD5suelwlNK
qqPp7fVT5QhUJlZNY+PHjpUubCjnDnyZY4o4xoSAMrdkvh7U7uiKi6X4jVuCSSOJMrfc3Lkj2/Ew
ahwMvLaONI19CedIFYvmPj4wiX058ppGgiMhhgjgjt6UsqRLmZDZC4uCIzx5OSIRi4xjqV2ymVVR
b1lZ8882BgRYUQoaZOQIVAkieCFYl4551gjSF2IUKOLKyPDHt2LZ6HBODu2aAFF+K1SyLGmU4etk
xfHDwDTdMh44MaBceDinH5m6cO4zeLGw4Bj41W0L3MEYjiqWQRxxoO6hwN/lmL2rPzcdF3OeJocg
2xo17Ysj9EAu5NyaU3ViSb6JyqCbvlkBDXr3zueRHyj61Lis1Q7bFC+ElzWUbY+OP87fetqWyJNI
zvc6Ykva8i1GtZk12vXKhyMLiWLtPc7iKNmLHlV6xJO9GHayi7Z//V23/f7XrNEhxsDMhnikkRRe
tyP+ZDeK+kfMyA3q9Rnmkf8Amc80IDOvbJGO58gcmJONjZOV+fEpYbH5FlmjEkWNJ5IOJ0DpEpkw
oJPJFjf458mHzQYsvmg0toamjEsWLIZIZP8ADO6B1w2bx2rHJik9WW+NkVlq235qusiUQDUZOPLr
lQ+WNgubjYs/mjkjWVMaR0aSNZUx3aNuO3oSqceQEMKyYSDBOIxpLEsqQysjehkweUKYs6CAnEdJ
HxX0yMZZalhfIOFnd7VNjsWhmWUVauevtXLiv6VvXbvKpg7hPNu+Jh4kIkDmXbjPHlYcGLh6jr8d
xGLegdd22aLOSHY8+SXb9px8Nat6p9C9q61arUQRRrodOdDnVqNHh5H0joeDlqdL1zNXtpeuRq1H
prfXaf8AZv4P77qkbPW1bPNmTYeBDiRsyIHz4wR+fkFMCOyoqDQUzKgbLHaseZkVHiRJXQcIH01v
RtQ4L+hLLHEply8hkxY1FTTxwKcjIyhDvOy4z/uMPbFmQy6ZGVFjhjnbm+HgwYi1fhHBbgzd/wAP
HI2nNz6jyJHXD22DGPoz5SxmJMiaaGBIV0t6BIAnyJJajhOWqIqL6A0nyFhVmyMqbEw48ZOPJyBA
uLjMhnmeV4okiUkAS5U25SYmFj4cWuRkpjoO+KsfH8XoEgCSTubHi8acUjrGiK2ZMABw7pmMBhYi
YkHGTWZO0kjQtLkKoVeG9qxv+Xn34siURQ7bCVXhmP5W5a5TlIfH/k0yLu+N97aDSZu2PGT/ADsh
r5DDlS5WBFL5Zu7wuO0TC8Mf2xk0aFlpxZtMlvHEjFGkHclRrdsr7stRyhUESMaaVawP9F6zDbGx
RfIxZfuK/fmS2BrnoDYxP5YpX8UZJJIFc6tWLJ2P2juy5AzaXrHfxySqCIlAGTJ5Idt5T2pgbQoH
GBs75UmT8eysaEGtyH+SL/RbQt44onMkNXoHmAvcTQNTi5xx9zL3JGndiYmJBWLt2PNDBCuNkVF/
iy9b6W0/1Z8R8eRlf45KT/Bm209rUdDaHJljEseM5ZJQIcnpWUjFI5FkT1JI1lTGY2yIVnh2/IeG
TSSISJjSs40vTj8TIyF/HmVgy5MJdYJfKmTCZkxpzMnpHgsCFJxZdMiLwNDMsR0mhWZIJmDa24cn
HdS6Q7jjYuRZlkfCa9xWTjeYSxjIbBzmar1NDcwzh9TwnW/1NqOmPkSY0u4blk50BxN0d22ncCo2
LKr9gBqX4ys1Znx3cMavx5r/AB+ac41dPTN6t6J4iLVaraCutWq1qI0vV6vXWrcJo1z4L+hz4OVE
1e+orloOuttDpfU6W12n/Zv3/uEC1AXOwbWsyK2Lio2VkS1+C8pjx4ouC16kmjjH5cs1fhNI8cUc
fpH6wcVuOSaOIfmvlGDb0SandEWXNlmM/wCPtsG773lbnKkjoUzJ0Oz7/JFLJueQ6YW3T3ACj1LV
lZEWLCczc9zlMW07RQwcvc6iijhTS+g4crN7BBA+U0aLGnpOyok0wmSHHfKoAAVfjtplZniqOKTK
fHx4caPjyclcdcfHZXyZ37sfHWBJHSNHOVuzRRRwx6zzpjp/qONC6+jkuZpMdBNJx5U/mkxofDHw
ZmSmLBtuKzP6GZKVUWii2uALHVqtVqtVq3CZo4caBYIeLcG72ijCR8E0ixRbShMesh8uZigO9HkG
f7IU8cXBkkE4guDIgO4tzzVU5kwXula5/UjG1E0Bczy/55eYpBdsp++XpWM/khsQbiNG+7MqOQxt
kZ2MEinfIzMHlj1nH/i4YvOrMr35SMWc1e2lrVhowjzUYroaFRIXe1w6kOdResaTyRZEnjiP/V24
/wDIuaJ5LOsKfHGRtu7hU/b5tzH3Y47sSlF2zHrDezkWN6BrmEoGnHdHj8kQEmL7YYN1ghk2VhLh
ZEAZb1l/ZXXW1Aa56nxZTWWaMSw4cvlx82MmKN1kj1Ok8fkjgl8sUv8AhlniEsONKZYah/wT+rlB
kKuHXcMYd2JOZk0yI2BilWaPR0EiYzEVjscaap18EisGXKjZGilSaPjPG8ayJA5jaja0sQxzjzMr
aTQiVIJiWq3HkRPjyZeNDuOPh5SzUhlxHBBFZGOstSwflVhZ5cmpYQ9QzFvRtwCj61zwe1W4r6+2
nYpN+M6m/BbU1016cdqtpbS3CTpauVdNDrejy09qPD76Hg51z4LnWxq/D0q1Gr6e+tqsNNp/2fKt
pf8AcGgmWvG99t2iXIfGxZY448WNAABwz5CwgtmTtHiKoVVUegPob/VZGe4EeJkyVHGka2qbMWMr
jz5D5PyTadvl3jdsjc8jS9QsVf4/H5sL1XdI1m3ZJzi7T3TyZ00rYe2RY+oq1W4SbDJzlkrHwzLS
qFHDfhmmSFJnDVDiM8mtqtxdKyM3vrGgkyHjjSNOPIyEx0ggYnIn7Kx4BEuRkw40UUeVukqRpEms
88ePGqstQRO7ehlTmJGhJIAUW4suRguHDc6Xq9MwVU8ufnAADjlkWJPDLkSTqcrKVQo4L1esf/lZ
t6vwsQox18+Rw7zIWSGMRRaOwRYu5cWFPHHWXIUiCWkHDM5MkzNjYEhcnM33JxY8bcTmS5EgackU
vVuRJpSFVmLNC3kgpm8URNXvWI/bIyXfIctNH92ZW27bLntnfGFWCKWOSPDT/j2rcDbHw/8Adix9
z9w7suPtk97DSJDI7ERqQJI2BRr11oXrDi7VD/fmR0dBpiR9seYhK2tjbf8A9iiCayYmlg2Xedww
nn+QZmTEK3Prhyf42FmQdqyOXZWKs1itILlpAJiLGk50B2JCf8hjNNtuRkZfx1Wiw8k/8fGk8sDo
JEwnZoNBwMoYYg74MJyYof8ADm1h/wCKTiQeHJdA64rkqP8ADmVkxmSOGVZovVhPgmIDAhsXIUhl
0t+LNpasqHuqRVzcfEyPPGQCEviy018OX29bIhE0eNOZBTKrq0fjMExLaZGOJlxsgyaX43VsGXOx
fPFh5iZSXbDa4IrJxxKJYkyaw8vnUsCyiKZi2h4rf0H34ueh4rejbjAq2gq1W4TauWnOraX1PBbl
oatVhqfR5jQ1ztoRp717W0I1voeHaT/k3GaH8uXEjyFg2bFQRwxxjgZkWmyASkc8leMGrDgH9Bt6
Yp3SNZN52mKsr5ftcNJ8yeWTcN1lQ7NucOYbVNk48Af8nNXHw8fHHyHeV2/HdmLk30tVqiUlvjoW
DZLepmZyYtRpm7sA+BtMf4eVuZihigj0toOF5FjXIyGyax8RbekKysyPHBVy+NilfSvTMEXIyxMM
bCfIAAA4551hTGx3L5OQIVxcYpWfuONgRY2BPnSjgmmjgjRWYwo08noSOsaBygxoTFHxSyLEiLJL
IFCjgz5zkSYuOmNBxWq1TzrIzA4eNt0QVL1fQ67jO0cONAsEPFmPZMRLR8BNhj3ydwq+mazMHCvk
aN2y5OLd2q+rN2qLtHJGsiS7R3HdthjnXHwvFK4P5fKu4IJBZjWU3ZFWG/Mqe/Me5q1BipVu5IyT
LhjuywK+PyJ+GzADMmgM2LYY963LlBhA+SNRFFezSoJYyBegKxIu1HIJiaxzY7GrVDH5JHIVLkFl
EkbL2kVyrHTySObKAHV1Kw7d/wBi16I5RCxwvyZ5TjBFrcxyjYrEbOuS4SI6YzeSGoRTSEyuQ1Dk
V/U55obGGYu2Ab1g5kWKsG5Ys74f2MKX/Fmcbf4c4/4c3OBQAgjLHik4LaZMZeKJxLHkXhly4zJD
FIJY6X/j5fq5UJlix5xPFlxsyYkqqdHVXTHfsfU3xsjIU48qsGEsSypjTMSyq6wM0D+vkQsTFKsy
VNEs0YVnME/kGmRj+WsfI8w4yqsFY4Eudhv34eZHmRffhMCGFqkj7qkibIGJlkVzqWJZVjlZW9Dp
x+3CfWvofojwcqHoGraWGvter1cCr2q9E0atwX4TRJ4+d7a+x4TV65URwGrcHXTppaumltDbQ6bT
/syYEXNtwvMiDzyyD8UuVjRfQPBbjvr0+ltwXoVI6RLm/Ktpxaz/AJlnTVNm5OQxJNBitJKUafMl
nfat2mwJcLdRnQwyY7Rj5jt/kxd523KHyPcMXNzGq1WrtNKhI2LaZM/JRFjT0pZoYFnz5cs422Qx
pNuM0r4u2rGeMaz5EcCSPLPPjYiQtQoejk5YhpB2y4+MkHpzTxQLO7O+NhyMfQyJ0gjx4JHeaZYU
x4T37juS4YwdtdpeCWVIY0VpGiDZknTiGgpm/KngXzy30vwzz+VsaExJwZ2UuNDtmGY19DMmYtiR
rK8zjJyVUKvFjn8zO45CZplAVeDOl8cO3QiPH1j/AMmRhgsalcRRqWjxYlCR6DTLP+JFU5QFOwUT
Orvi/dNyOSayntJLzCi7ZT90vvC3ZIxAp3LtfSNC7iwLL2TbcL5IqHLnxTufyHd5YIceWLHgFoq3
Owj20Xlla5qJuWXH2SVBGZJJGCrXQsBLGwsaxY+yN27mqM88yKze9YkXajG5RrHLQCHb+WSKvTze
BcNvLAUKmWRFbcheNP8Ar4pPjyX7pK98V+yVxZpW8cIqA98AqMXbu71BqJyJNwy8jHOPK0gi7y/+
vNrMDCJWDLxZyEwZS/kYqlcnHwHJgkQSJhOxio620i/wzsodcVmUY/8AhnrIh80WPL5ovQtwsBjZ
NSw9kuNOZU0yITKuNOJ4tJYlljxXLLAxx5qy4C4gmE0eTjrkR4k5kXhv6UoaFlZXWsiATKQ89Y84
mTTIxy7QTiZeG+kkaSpHK2LLk4pxHx8iPKiKthlWV1qeDy0mMNwkEqYkR3zB7gmNnwRyPExHAeHn
x8/R6eifozV/UtVtfe3F7V1r2vR9Q8XXgPCaFHX24elGudWvXOr6WNW1Om1f7Mv/ALdtCQo/IiLM
J3kTFVSoAHr3/oE+TjYyZfzDbIKy/mW5zVkZmVktwX1DWMe7ZMWJHn5cMfcSVkYV3E6KpNeKvCe3
aNoydyycTEx8OH0svdlx2Xb8jMrJzMTb4xBl7q2Pjw48fBbiyMtIaCy5EuNix466j0J8ly0UYBgg
SFOG3DlZKwB3n7sfEEb+hPPHBHBDJJJLKkKQRPI+fuf3bbtS4h4JJEiSNGyXHfnygAD0MqV6dASL
Aag1fTMZ3rFjVjwSOkaYf/2uQPQyJ0x4lSWRst/DBt8IA4tynaOHEgGPj8U8njixUAfhy2M2QoCr
plS+KF1MOHFGI46y275HHfk8Ap/vycVbn/4ZJCxbnNkDMw5HWlj5La8rd7qe7HDBIydOtOhaEjXD
TtVWPflL9m2f7xXjQBkStxULixoFQ1ufTbjZr0aBIMyiSIg1ixeON27m0iexzIwGx4/JLIbLpe1O
PLEQQYU8kjntWr1km+Jgf9kUVpoRMcFM3EdsuWSiA1bl/qiu0CgRplJZ7X0BIIPkXLe7XrEf/IRY
qe2PGa6jnUSXyPxIJ59vwcV22/DgRM0WjHOioYYRIj4iARgntXEvFLcQbhUhMGZw3rLQmNHDpkAx
tmAhQQy03+DJ9E8EsSzR4shK5MPniErClYMNJkMEoIYaZcTAyomXj4s/mjqUHHkVldcqFrwyrNH6
xFxc4smmTAz1fyiGVZk0ngYtBOsy8c8CTxwTENNFLhTRSpNG6yYrxyJKlYWIxj3jOn/cM9ZYWwtx
yMWbFnGfhq5gPEeC3pnj5+hyq/B1+ktw258PSrngsatpbQ6nW2po6e2h4jwX0HDaraEVcVeutGhw
Woa302r/AGZY/wCUXUU2YWbxPKkWPFENB/QLVb0xpue4R7di5fy7dcmpZpZmq/pX1vSmlFyi1DDe
sLZsjcZcbGhxYeAcORkwY0btuufLj4MGKuVussk2HtISTW2p4cnMIMMMk7wwxwppf0CQBLPJMYlM
xiiSJfSkyQ7iN1aDGjgX0JZkhSCGSeR3VFhV8lszPysifA26HCXgd1RY0fMdy2dIqqq8YqeZYY17
8eLFgMMfFkTrDGImZgoUcG5S5GZkRRrEg4yalnEsmJjmFJS2RkKqouvOraRhMvP1twZUl5sVCkPB
NII4sGMtNrMfNlOfLmUTWLeSTEu+tqGge0eMpSAN21lMpgzMbGMeFAvhYhYZW7YmrGa8WUe1D0rG
TvlV+58hOyWlUsz2RLGsl74m2D/J728gyXMSy5X5WGDYXrc/1YAslXomomNNjXyJDYajlUiiWOCL
xox7m1iasqPtfEiCq7XYW0mJ/HwP+0BerXEFhPtmyYwh3f4/jfjmtxH+HAQNHIbmQeSG9X0xL+KU
lpKRu1mF2nPbDjt2zdCnKdsqHHn23dsTzxS2kIWaLCfvxqb/ABZfFepP8WblkxSbghMCOsiZUXmg
xpvNBer8OMPE5AKwWWsNioqaITRY0pkj9XKV0dWDLOvgyMY/jyW0YBhCxhk1UnEyMlTjyqyspAYR
O2NNUobDmBDDS/qOiusTtG2k8TI7EgRyJKlcxU8LBoZlmTjyscZCQSedCpwJUdJFkjkxmikSVcLJ
7cfd8xJMwyLA2TitFXxOdpMVlVgpbHNweHkdTpbgtwj6/lXvwc/TNdatrajoLVy0vpbQnloTQ09j
V6Ol6vQ1twnX3OhGvTg5351bgFdKsa9q60RrtP8Asmly8qXEgm7VjVatw8/SH0lvX+aykY16vV9e
XqCozzxse42/a582TGxosWLitozoivuyTyQbLB58zOx8GPsyt5rFxYMSL0iQBkZplrHwzIFVVHpE
hRJMcgRRPlqqqi6X4zYCTKfIMStO8USQx+hNNHBHDG+QzMqguuTU80+5nGxosZOB2VFQNnPLK2S8
cSRp6F7CNhlSQA5E3ESFEk16xoTEnBuWcMaPasH8aPQcWXIzNjRLLJmylUwo1PHueUcXFwcUYmNw
Xq9MQqqDI3DmPWLH44dHYKuJzTCBYVmydkUw/HxI0KJbgyJOyGRe2AchueTK+Qc+fGWffcTIRZEX
FjJMGW1HnWKbPkk+U6Y6+OFTZs1a61hpdnbub2ymIxdqFCpGYLnQ5uRUsAx8ehyrcjeXB/1Gjops
ac3PBGakNl9tQbFlWVGbsX3rnUv+nB/7QoGppjEdrzoszBzcpIMU9NyH/H24gQk84m+6dCklAXKg
Rpkr2y9KvUH3xZTXdeRbrJfyZsEkkOzYecpCSxpEhRMf/Hk1lp3wo4dOLNjL4/25WNhyeXHwSUWo
7w5fFlXQAhhkqVrIYRyaTf4Mj0bcBAIhJildA6LG0seNP5otJ4RKkEpddJ4VnjxZfKmPfFmqaJZk
xpWYFVZYm/El9eaFZVglLDSRTjOV8LKysuksLRvHIkq8eVjmQROmXFE82PKCrCSF4JIZ1kX5BjBp
YpDCscckFbDHFiaEAjukx3DBhphopiytyyjlbPvJy14z9CeK3odNDRq3Hb0hw3q9A3o0eDnbgIq1
HloTobcHPiOp1tw8uM62rpRr2tR0tXKr1tR++bcVikAsPrB69uHlpum74+2x7vueTuE3BbU+jahS
C5+PbVFJhoiovoZOWIEx03DMMMMWOmZvDNLh7Uwf05HSNcjJaesXDYjhHFLKkSSSeURY75TBQBb0
ZZViSVzmokMmVSqqD0JJEiSGOTKckAZM8cqquVuJhhjgj4GIVR350ksnc0USxL6M7mWSSzsoCrxZ
DeaSBPK+l9JpVhj2+KTNn4BwZM648SxzFVVUV+7IkVQq8UYOduluIVlNcYwDScBNgAZpemuaxKZf
2Y8aBEq/my5by5tX4Mn75D9+VWXt3lfL2iQRrtWXi5E6mPHg/wBMzd8lYqhVzV++kXueUgaSL5Ib
Uq+KGhWYbY+1j/HFE0skGy4UMW9bZFiVnc5ALCtyP+fBH/HNHSNbnvHfILHgUBUNpErlwID2y37t
ZB/gwf8AtAclFGMPPhR523vLm5WSa3D/AK+Cf8XKw5HMTuS1Yyd0kh55K98Ve2GTeQ3kFRG8R/W9
hkQFlYZmLLFiSeSDIPjytMX7DpfS2uF/iZf8WbKPFm1mq3iRxInCQCMQlNEQViOzQ1IiyR4kjFfT
tpkwiRIpBLHloyFysbggjTJRo2R1kTTKRo3mjXLgxJzLHWTExqGZJo54UnjxZnP0E8RNQyrKlHnR
P4UhvivcEaSxvE6SLKnHkwujFY82HHm8LVLE8bZsUWfgvDKXx1mLY2Ti40ePuyxgbrhtMQCD34rK
ysAGJzZFwNq/EXMm2zZpsWfhPFfjt9Bbgtw24zqavwHhNW0tVuC+oq+luA9at6HK+hrnbjtVtDVv
QOhtpz0Ou0/7JoAmb6vufoB69uHcs5cHFycqXKmn66DhPpCsPHeSXDjEWLxySxQrPNk5aY23Qwrm
Z2PhRKdx3w4mFjYUWtvQnnjgSaSWZsbCEbenPkxwK9+6HGZ29LJyo8dWSQvHjNO3oySJEkUb5clb
luKQHFwZs6REVF4CQBz3B55rVBCIY/RyZhEh/wCJBjQmJOLLnMMYiICIqLwZEjZ+bHGI4+AanlTS
+WXFhMa5bMUw0DHi3PKONjYGKMTF43cEwp44+DJeyYygvrH/AJstgZc2p5BHFjJ4ocNSRRGl9Ivv
y8Y979dNxat2+454BjjY+I0RRHZHlDvhNYkfMkk1EaWC+TKbnTOP+HbBaDFlEGTHMkqfMdxjgwPN
5pb6bgf+Th/9ajXOhaONZiJnAZNUF2y5e1cOSpFsSNUFzNKIlYd6av8A6cI/8oEUDao2/wCUtmpx
GakXtbPt+Lgfo0UB47EHFQLETcqO5TyNhWMOyLIXtlFYxuMiTsbLyGBwd5yciJOdbFM3bmp348bi
SOpf8WTxXqb/ABZeWndFkr+RiQSiaEjli3jYcWSPFJWUhK94jydMkGKQEEeqx/GybVCq482OTDJq
P+JNoQCIb4s+QjQzKwYVIDjTXBGVAZBjzieP15I2ilVldadFkWK8DrfEeraSI2O6Osi8c6PjySqs
8ONKUNfiFF24bbHJvE0M+YHtQLqrgRSYWTHkwkChHIpxs6F5/mW5o0Uc82OuFvmbFWLlwZaGwoc6
saALEqw4TVvqOWluD34vfX3twWvrarV01twkcuPlROp1FH1bcNtbnS3CdDpY1YVfQ0Ratq/2Zf8A
2vrhoPVPDu2B+djZmPPhSu19L0PXUXODjwiICw4SQBLusJkO2rPKO1RmbwC+Fs8hkHL0b65OUIAB
NNPjYyQL6N6FZGT4aaySY2GI29LKzSjqFgkgx2t6LMqLGkmZJatz3aJH2vaJey2g17gAzNnPNKIV
x4PEOG3A7qi44MrQA5E3E7qirJyxoTEvBueU6Lt+L+PBwW4Mt3kaFBJISACxndVCLbiiK7huWltb
aZEnjijj/wAvDM5MkKFI9MiQRRQIIMfBUmOsk+WfKNokUIlHS1OwRID48TEXtgjtdzXybcZMbIw9
yycwORPUadqSCxgXulc3aweI0q+OHRGszG1HXPICbcLY16OTlR1k4OTkz+MJmHTOP/JxOWNpGpJz
ZeXOsOTujkFm0FkSaUu8chRiO9KvpGO1ciXvlw3urizaSf6sL/s9DencrJs+SZo2iAEjdzZ4vi4H
TRDZsiL/ADNZUpGs2SnbMBcsO1MgXQVjm0mVzkmxyRg7Fjxr+ziOdsZMNLA1gXWKsmMywQS+WHiy
4jLj48gmgxLxvhjxS1lDxScUiCRMRz46MYBxpfLDTKGGKzL600SzxYsjMuVB5oz/AMvHhl80dWqW
JZY8aRgdMmATRYsxmihP401MoZYWMEtZAbGdHWRKOtvTA/HfSWJJY4mvSMcd9XRsZ1ZXXjYNhS5E
KtWPNFIu5ZsMGJiyzGMN5DJHzDAgN4pMiOeKOD5HuMVP8rSfF/dIcKHMGQuThbFjZWFlY8eLPhY8
8E+Xk4+BG+4bjkr4mwUzfkZZP3LcEbA3tnfl9X7cuC3BbhtrbS1Wq3oW0vzOtuG/AaPDzvVq9r6E
8PL0vfpw89D00Nc7e1WNAGjXOtq/2ZX/AG9L/WDQetbgYhV+Q7ts0sVEk/QCkuT8XxyZOHIy44Ki
bP3GWHGihOTlQ4qCXP3p8LbsXBTW/o5OZ21BDJO8MMcCaGr+hNkMzAPE0GMkIHEdegky58qRe2Ks
XFTHW/ou6osYkzX6Dcd4DPtGyphrwmnkbNkmmjxIsaBo/QOsl8vInPmdVCi3FKfypoQMiXgzMqPE
g2zFeWf0MnIXHiCNdECJlyXrCUO3Dat0yGgxcDFGJjccxD5GMCy6A6TOY4ooz3azDyz5rHxooVWI
UYf+SmBkyraW1ySezK5RKAADasmUJFvWJhSpFhQ4+QsZRyR5px2y469qVGefi/5MjXauVAXZxdL6
e24GsIf8WNA7MyWPbTG+4k0KzDfIxVvhnlXO5YRRu5YjnUEnikkAYe6Lc5ktl9qw5bq4s1ILnJlC
R8jUTlJJLFfamF4cP/sgCgvJUByhtaLMyzppm88XAPAAHDm7aZA7osdO6WQ/cV7ogaxR3Szr905/
xQyJ2gLkRSoJI8Z++FT487TG/wAc3HhnxTZP+KXK/wAWRUiCSPCkLR8F9Jv8ORWSjMkEirPpkqVI
IYH1cgGJwQaltjTyjwTa5ETMIZlmj0yVMEmREuRBiTmaOsiATxY05kB5hC2JL67KGWJjE+k8PlRG
WdYXaN9DTBsZgQw4mAYIWw5BfFydyycbOkiZkR4z3H9U0YQJlFDMTHFlQRKbGrkUs92k32PIx8H4
/irj53g21nylKR7vmRK8ru7OFpAJVKdw23e2xHV0kW30x4T6duLpwHh50NbUdPa+h4SdLcNtDxX9
DnxHT39DnXKutWFEaWrah/kyx/yuAa2+mv8ARW0ztxxMCPevkn58Zl5E/Qiof17AlotXkSJXzpMs
YO1JCLADL3lFlxdmmknAAHqZOaWrGx2nVVVF1tVtL8HSpMiSdox3mCFYE4BwySJEhbIzyUkmrHx4
caP0WdEVQ2Y5Kom5bvlTZW17RDt44pZGzHZ4MSLHgfu9LIlIprYWNiwGGPiy5nQSRlQiKiaswVZD
Lueaiqi34zYB51esSEopIAPkmKKqLw+0JO4blxuwVVLGFQFXQUKmZZJcUdw0YhRijuoXlzazXbsA
WKPEW6Vbgaz5T/fl1mZaYsUu8PId3J3GLDgEczO35H/yzFtIV7IyaU2dgoY0dIxYY0pkLizabh/t
xOWMrWL2NZ2YYWjctnFqBrIN58QgQSCxjW5zJeddKvzxZfJGy2YHxxyOWe+kTsjsO5aQBVyJPJJp
iSd8bcjenW2Ni8skV0pCPy9gw4fxsnDx8qHKxzj5GYP+NgfqoaILIeR0UB0xV7V61GebL2PiDl2B
452YrhO4zNimWTbTzMR8eRmfZVtMn/HLqeCe8c8sfkjj/wCVhYkvlx6kvBm8N6ljEseK7PDWRGyP
FIJY6IBGOTFJw24LcBAIgJikdFkTGLduO5B1kH48gIIogMIHME2UrQSqwdayYmBjkWVJYkmjxpGR
uO3Dy1liEqQSEnSeEvSlMyGCV76WBFjjNcEcUkaSpE7QPkY/4cuSIAGFSLyMhNOlfcyrDHE37I7j
D+HzZGSPi8OVNNgjFysNpGxst9qxzPLEshkuPISOZEZo1mg9uBumXgvt26Y24J6FvpOfrWq2ltL8
d9DVuC+tqPAa5Vfny06VbT24DwHW2nL0+etjqRbW+u1f7Mv/ALXLgHrn0xoPTvwcrbr8owsKmmyN
yyJypk+iFJ1+M/lrrnbxBin9uyc5YYo4I58mDGSSXN3OXD23Fwh6jyJGuRl+cY2G0lDkNbcbMFV5
pMoxA5QjjSNeO+mTlRYySRCR1jfcAqqo9EkKB3ZzyzQ48eZmybsNs2yPAi4ppXyZP8WJBDG0z6D0
JZFiSBGUQA5EvFLIkUaN4lxYPEvBu+U4XBxVxYvQypDLJAgnlrMYvWKqu3Fu+YcbH2/EXExePMJY
RjvyuAkKEJaNFCrpmN9v2xRYSnw3qNTLlZrHxIvanAeVYoLVjgtNW7wPIk0bA7uZQ2G0i1G7HIJq
WPyGQ3Y1zJkB7DrlMUix37JZVrlSDubcT/yYOUJqRWcptp80IP5tDlUp/wAqSeNpF7gziKJmLE6c
6xpeyVlBOZLZa9r6YkncnZ9+VJ2R21xpPHLIvJBdpnsmMbZA6ipSVn+H73BLhZGZDjx5U5yMjLF8
bbxeXpogu2RL42kGnWoz2s9lQUpsclfvg+2CJrRyLG2PhT4v5e32/FjZlysj7JJ4hLDiSGXHqaMS
xYshkg0tpfSePyw40glg5Q50A8OXWVD5oMWbzwcTDw5RrIi8keDKO/TKjYqjiROMcWTGzpFKsseX
GbS/5YoZVmj0ZVYQsYH0yoPNHBKuRDjscabQj8WS9ZUBmTFyPPH6R4Zoi1QyiVNJonDfjtmRIXto
QCB3YrdeOaFZo4ZGdMnujlZaZb1NGC3aWp0tUMojaP8APyY8zE3zbsTa9zhxDkQyZG490kbSuyhW
HkZO0uj2luogjYmOMX3FVMkiWqOWSJ9p+QR5Io8Z1toNfauXqW1tpevfiJ4LUBRFc651arUdOetq
6VfW/p3o17ae/pc+HnpzvXKjfS1c6sdOVhfS1E1zrav9mX/2+G5q9X+kH0nyzd3x4yyWxpirZUPY
Qt6CXopTC3p2oailWviTRvJk5UOMkWdnbpNDgwQmszeI4ZMfZ5ppwABxDimmSBJpZZpMbD7T6cs0
cKSEz0kDZdAAD0cvOMNdiYhXEbJk4L8RIUFXz3yMmLFjkkys6ts25cSPhvU8z5EiJDiQRxtkyemo
/KmyCZ5FUKvER+XPGDkTcGZk+CLa8aRn9DJnEESxyKERUVmCqO+QKoReE2AhT9x3Wr6E0NLaBgZM
VCkXBlsSkY7sjWMebLzWJRQFXKkMcOPF4oSBLl8OU5SEDw4+IvbBU7hIVgjjxsqKGRcGNWhhQ95o
N/jOkYu0Uvko8qvUYJbJk75b0p8kNR2Vcw92Sgsm3Yy5OT+Lj9m5Y6YuXj2/Jonk/N5f1Y8heLNk
JY3q9Xo0ouUBCZIYS+9Cr1A/jk5VPJ5JOCF/LEi9oEnkmxueQOq2vIl8t9riMp8hasnnj7cbZDiz
EVEOUz98kLd8HTRBdpRy0mHdDbtS4CI6HFxszb4skbrkvFgZsxzMiPyQ40nlgxT2S1aov8eUeK9Q
f48nNi74chgyA3FRd0GZpbgyI/LDBJ5YqzQcXIUhl0i/wT+t/wBeek/4mQQcefWeISpjzGVdJv8A
jTZMInixcjzxUyqywM0T1kxtG8UiSx+pbWZGjZXV1xYUcY+7B9z3LdJMOR95yvy8DPhzoNGUMq3x
m4uVZGOswjdMhJYXhYjnlLYMnepbmwN9vy8uKeWSXctnwcTEjx2ijkkfGMDzKe3EsWaMFo3LK0LS
ATTRH8/KWmyRIWUMrxcypFbf8hycJcPfdty0XtdKvamUJFi7njZUunL1Of0l762oCra2q1W4bak8
Bq3B7ni9+PlVuG2vt14OdDSwo6c668F6voaN62r/AGZf/b4r/WW9R3SNPkmZBl7giO5wdljTHn/H
kBXtPdoReiLHhtVtRQGoHO9q2EblBHg7VIXVVVZsiKBTl7hukmFtuNhj1cnJGOo808mNiJjj08jJ
THVu4GHGkmbhvQ4J8x3ctFhDHwyG4baDS1EhR3PlyZOZj4qpG+UMTFXv4sieSWSGKLFhjLZz2AHo
ip38rzSLjQY8Rjj4syR2MqFVRFjUauwRQj7jkAADjJCgyhnxomjFZTeR8QeWTi3jJMWPtuJ+Jh8N
6vWRL4onX7enDfumxlKxjSeTxQ4sXihQ+XMpv82Uzdq4ins4Zm7p81v8SgKtN99brnNiY2Rv2JK+
GyrixfoahbtNGnbxwYj2kkH3U7+KHTCe6lfvyX7RMb5Q5DCy/wAXI/eMAR7ruwy90w/13pj9p5tK
CZUURpmx3XnV9cOLucvZ8yO68GNH3ycqmTxvV6GmEzd+S5SPH/Vj8p78w1qZ/wDlhwxaY2kAU5A/
wYRImazx9anfxw3rDa0jCzVEBUT+UaR/dTn7iLiJS2P+DlSZ+Ng5Rjg26ciCTyw44Ecsg8ebpmAp
x2rL/wAb9aiQg4TloazUvGjrImp1UeHJqeHzRbTI3h0yYjJHFIJY/VliWWPGlMizwiaOFhPHA7cG
QjRtG6yJTorrjM0bzq0EwIYVPCJVx5zKtWGHLyNW4+tW47DHfIzJIMbfd6gyI5N1mmx4GE6QZ82D
kQfJMgtDNHkRUyh1j7oG48iJ7Dsy4pYzG8yhliJBliVwQUO3QhIztsudEZsh2SVu0Y0Me3SKZAiP
FUs0YfbsA52Vj4GPjHK2/Fy1zthyYBOwQYudkYjw7hsubUm0s65TANzqHOyoKxPl5WMfL9uUZe4R
ZmRscbPncHv6Vq99DxWo6Gude3DarGrcPLiNc9DxHXnVtSa9uG2tq9rD0+dc66a8tLcPPQCueu1f
7Mv/ALd+A/VD18rLx8SLe9+/Pgx8eSd4sPC2iDP3HK3KVV7Q6g10I0Zb6jgtQU0ErtFHRVvVrVgY
r5eVgYUWDjVmbokJi22fMkRFjXW1W9HIzApjilyHggjgW3p5WX4aY+B8bDKv6ApnVFaXIz2MggGL
iJjj0rgAs2Y+TuEcVY2J5RDE00nFkZDu8MMeOhJz5AAB6WROIkgi8SRL55OKeZYI4x+NFjQlF4Nw
mM5x4BDHw31mcyywKuQ9MwRQXZUUInCSAMcHP3D0JLy5WOfLNwTSCON1IQCw0n/zZMjiOPCQrDLI
I48NGWHMa6KoVeA1B988n35eiFZG3zByMhTteZj5qIUhQWhtcrJfJfk3U5jjuRiry81UXOa92OmN
IElsO5nMuR+rKNdamxvMmPtywNhD7RUvKIfqijvNK1zYSJIpRr0TSrcxIIoyeYs6yIUe3KhWPH44
w335kfcmg0xYwkbqJY4QRJCbS3q9NyyNuzLT9qlWYMZheHEv5sR+5EWzZT9ze6t2s4DLUjeODGft
lYWaoh97H7xWEB2beLJs/OLtrD+wTnxzZikwqwZadA6Yj90PANJ4xLFhymTHywY5OUOdRF6xro3F
kxGSKGUTRVlgYeZyOqjwZHrTp4puoyo2Rpx3xxSLLHqtsWU6ZETOsbrPFC3gloVkRsjxyLIjKrrA
Whb12VWGcZ4IcoFZhWGpFSQLIEDCDZt7bHy+R0nTyQpvW5x1tO6yZb8UsRikktkY0iMhngZiJJUq
3nOS642PhK0WFlvc4MHnyN3yLQxMWimj745I2atuxvx8IDQXvuewYeeNx2vK298bFkyXliy9vkeV
5CQbaC9YmPkO+1beMOGjXvXvparV00t63Kx9ECrUKJ0vV6udbVbW2lqI09tL17689ba8tefFy9A8
PSjrajXKjp7cHKxrav8AZmf9uhpf6G3o29fMy4cODdt2n3LJwsV8lJJsLZFmlny5vtjDMSVanW9A
6stW0tVqCGghoLTCiK7L0UVaL2pBJI3xzZV27GmmigSTKy8+TD22DF9efNucfGfIKKqLpb0Z8zuc
I8FY+KsI9GfIjx0EcubRlbKGPjxY6el0EkjZTZW4l48bbYldFOY3Fk5Dl8fHTHSV2zHREjT0mYKs
CmeTILTSABRwkgVH/wAqaP8A5WRfg3HL/Hh2zGUD0MvI8EXY/cihFrJvLJigTScW65Pjiwsf8fHt
qaOrsFVWMeNBH4ouCX/LkRASTnQkAYgLVmHuochlEyyjoAZMvhnfxxYqFIMcd09SOEjgngghzZA4
3lv8U7BYO77L9qxsRLL+pAAZG735VE3kgWyK7FmJtooLMVJji5SQi+VUaizsDTEduDbw2rINoIxe
RiEBNRPY5sfPnphx9zytpG1jmR6c6xo+95GsL0LOkiFHvQqCPySSHtWI2Lp2zxf7bUBTj/ky7fFI
6oUIp/0YhtkQP2SMQis3cTQrGbugQXbLfmCQWPctKe1L9yoLsn/HqXdJsOT425lxwA1Oviz8iPyQ
xETQYTlodB/iy+Aawjw5kieSOXukxEcSIayx4pOOICGesiFZ4dtmZ4tJ4/LHBL5ovVZQy4shU0n/
ABZ/+rPrJGJExpGB0kP402REZUx5PImlvxZqnhWVYJjIuh9M6OiuPk+2eGePtLIsaq0i1tmKJg6m
OT4tuZysU3WODdMGaTe9taCTYJIzuFiTJueBFLLvO1w03yPZgMXLx8yKrGnBxJMiFZxnY9sQZWSp
2iRZp82YTZGZJ+Oqz+WXa4fHDlFsrJfHgx5Y2Cvs/wAfycnMf8rFyoxmjJiXJUc9MnGhyoZPLtWf
m535VQQNIcXY5JseWNo5ERnO1bJLknD2/Gw10H0J0tx24+ul6B5VbW1cuG/Dbh9/THCdDoNL8Bon
W3BfiOnvbg2r/Zl/9uhx34B6wq3CPTPKvlW4T5OZt21NJWTvEePGqlizhQTekVTXvfkwsdDXQhe6
uw0FoCgKYhQpLEiwaUCixalUk/FdiVRnbxDjVDtuRmvHGka29V2VFycoyrj4TyEAAenLkyZDRjuq
CBIE9HJzFgpMftpfyM5o40jX0r2GXkCapJpMgYmAsFffnSgADhysl1ONjrAk80k8scSxJ6cxbKmn
lEMePD4k4L6ZTvNJkljSRrGms0yQR48c+XkgADjLBQJmkOHjtDFUjrGjGUxIixpwsyqsA/My/QzL
uJAHn4CwVYnKwQIUitpmPdUUIsd5MskAYYaR3YImIlo+HLPcZGEceGpGPW6SFYJSSTuE2Kg3ZtzX
LyFMBItkt2xHkXPdHM3ZAavWG9ZjhYzXPTDTudX7mdOyfCF8oUrWMrIAuWZMjB5Y96yzbGhF5pm+
6r2MiiSM3BAJMaCKIm5q9qYCWNlKtWPGI42NzUZscyPXDi7Vc3YGxYB0iP8Ak9xUh/5G1YqZuZFh
4oX5DtcWFMw5YptkQJ3yTp3xnXFftlRbPM3dLUB74RUx7YICWigNpJ4i7ZG2vmN8bibFIYKM8kIC
CMX7HjtHl6ZQIjBBHBbTNHZo/wDiy8C8YqRBImI7ePiylbxo4kSskfi5mrf4Mrjtx5KMKR1kSeFZ
o4j54sWViNciItUMqzJToHTFdlMyjHlBDLTosiwOyNWRG3dHIkq+vl4seXBu+C2BmJkntR1Ztrx4
9zxMr4HuSyqu57FmbNuGDvMOb8d2ww/KI9nfZsWTx5OJ8rTb8vdMlsjP/FfLjbaPAcDcsHa2m+W5
zVP8g3OY7Z8myYXxshFGXhxvWL8dkzmysbF2pMFkGZnGaatvSSbK3XIiwcfZ5B4MjJRF2fAzt2fa
cgTYeVt8s+fgKJZ6FjRFtPk+zyTGLb5GbadjCAAAbv8AHxlz4nxvtaOJIk0JFW9M8ft6Jq/Ber1a
9W4LcBNXq54jpzq1W4fbitf0DparcB4OVdKPFbT2o6CiKvptX+zM/wC39Ly4Rp19aaeGBN5+W3GE
cZzm575RSOnfiPOiCDoRegSpV7m1WpmCh5O4iYrTSM4C1DjyTPtfxUQDIyM3JOHtkeP68sqRJLPP
M+Lhdh9NmVFllOYIlOUscccS8NuCfMd5AuLt6x4r5MgAA4RQ4dw3AEiKaVsbESBZGfLlVVReHKym
Q42MsC5GQ7SY2OmNF6eTKyLDEmPFApyJuLJnEEaKMSDFhKLresuZ8mfHgXHh9DKYzy4yjJl0yG80
2OPLJxblMAuDC0UPoQkSTYgLcOW11lAabWIebMmkEcWGpEOa5EcaCNMpiQAAOFf8uXmn/EoCrXas
+XmbUyLnbIZI8fbWw3ylIIHPLa8h6wEPDlveQnTEUmTNTkb2oc6jXwwobNkj7dvF8mmHKfD/ACET
DTFhxFtj1m/9bGF52N2q9QtWXH2vhx9zzNpy0ia1Zkdnxo++SRrDnqQJI2UqYk8jmyJojfZF/s9w
akY/kfF9zTE3tZeXybMjkBrG/wCxiR2Cvd507Jb6IxVpH7Y9MR7SW+7KaxxWHetRzO0sBH5O3SJF
Om64LM6iSPDctjyjsysr7H0YBlwyezgvpIiyR4Ds2PlxeSHyAZBOj2hyuC2uP/hmqeFMiHbZ3eHS
aMSxY0hkj9XrUZ/HnrKjdTKC6o6yLrIPx5QQRWXGxEbpPFAxgl0yIjKkEvlSpL40gIPFb0vkG1pn
4c8MmPLc0kskbDddx78zPzcqoZmSWH5IIocubbkwq2eeL9rRtjxsRs/90nzQyTd1E6/G8HdEw8TP
xypx9ww5IZJ91knw4vPn7fuLPsW0TYMm+LNk5k6riw4m25W5Z+1bbBtmF8gXJw1PyRszHjjSJNBz
o06K6xYeJCeE8NvTvoKtx9fTAoCj6FqtwHQ6EVbgty1tXLh9unBar8POjofSPHar0aOu1f7Mv/t1
7UKHp39G30ArdN5xtuTdN2ys56i5mPGWNJH7jxA2ogMOhoim6gUJiAZhZmZ6JtQuSqVt21ZO4S4s
W1bGibe+e0UMUCetkZK46gSzzQYyRaW9KWaOFJGacRQSZlKAq+gzKivJkZ7F48RYMOzcduAms7PM
zYeE1QQiGOWQ5DxxrEnDlZRjOJiiBcqdlrFxRjp6csixR4sbM07eeRQFHCzKiwD8iVP+RNwZ0/am
345RfQypzDH4nJRVRakkWNAGWNEVF4WYIuOjZOVV6vqdcuUxwuhhxUQRpwRWlycW7ter1ky+KHFi
8UOZd2AsIrzZVIRLl8LMFXCQiKb78umYKuIO2LK++sk3dj3bnkqrZJt3ysS1Ylyst++9Gol8cEg8
kN9MWMvJK12vWQ//ABdtH+ZRW27PjRQbxtGO2PmMPxoBaHlWf/1sTnkHVSQZ4xLHGoijJJN9QbGR
BJHDH4o2PcdY2NZkdmxIwFka5oUl+5P1kfcBU4/yz7dHK8OXnQoWLEHnFyyX+xAbHLUMldahXukl
XvTSNu1gt5MhryxMVduTLIy5GdlTRx4WY88SsDW0u74EP+PKykLwOBkYuPJ5IdG/x5el+FP8OfTx
m+PL5YKyIvNDjS+WDS1HXMUgKwZayR+Ll6y/4J/WyIvLHjy+WOgRjTC8EurAMISYX0AGNNkwCVMe
bzR6To0bo6yIyhljJx5PXZQy7zjlMrVTTKVOuLuBh2qTIdm2WcxP5DkqeXB8Vmab4uzqXgmyMU4O
4uBtjJK1mJzlZMmZo4oMDHtBtO042CJ5kgi3rcJcmbCxExouCZ5FignWdOnDarcZ9G3AeM6Gr8I4
TXWhqTfS9G2nPTlwHQ9dTVtLa2GvtwcqtwW1toeCxo8F9SODnR06Uep67V+vK/7Y0GluH2+j6env
PyJMYT5EkzyHnQJBSZmo+gDTrcKjNTuFpRc2ABFyUCgm9WpFrb9umzJIo8mGDFwvGPXycrxiFJct
4okiT0551gWVlDQ4jyyejNPHAvgkzWklLVBjJCvEOG9Ze4SZkuDgKojijiWeZneKJYU4cvKMIxcX
wjJyfCMbH8Xqqfy58ibxR40PiSr8M5ORNkuSY0WNNZ5hDHjRDLm9B2VFDs7YsHhj0ntPNjXml4s+
axw4THF6D/5ckWly+DJkEUTKYcSKMRx6Tjz5VQny5WVL4oMWLwwSOEjw0Kw8OY3+JVCrAO/ItWaT
4u3tXNZVxtw3fMx48XIkaWRy8uQ3bExo86hASPKFpTUSd8krVEayE7Jaxl8UJJoVkNbF2wf5FNjh
ZUeRj7znQ4uAmU8uJCT2XrcW/wAGH/2KNe1RG6ytyNX0vV6iPKRrC/ADYsqyJyRL89E/Up++/O9Z
BHf3KSrhFkUV/wDIG2TKQWpQHjIINjWEnNWvLOvZLQqB7wk3Iq/cpU/lT4qPJt20YQaP49Ekkcax
pkf457VhjxnGHjk0y0LQRuJE0HBnKRGGDLmKVrFKpPpH/gy+IgEYxKGpolmi2+Zni0kjWWPFkYrx
245P8EtwaljWVIz5Vx5Sw1miEqwSmRakjWRMeRiJ1eKUMrCvZb40tqliEqQyH6D5Tt7FX/VqDcEE
HQFyhFY94sFWKtL2ux12vdJMFozFkLPA7KMvMxoR2PW27pPiZEm5YuVnZMpy8qDvbIFwu9eIYWBj
NfTlr0qePxHHyoclLVbhtRrlbU8J4vfW3GdbUBVuC+o4rUavXPS1W1PBy1OvXQ6deA624TwH0Dyq
1W4DodCbV1r3FbV/sy/+39Pf12dUXe/knfTMzHtqRSNQSKRu4H0IUd6yIpVq1KbVDCXDxypG0DvR
RlKresHbWmbE2aORIceKBfWtU+Yt4MSWcoiRrf08jLSMvJ4mxsPxt6OVnJCY8Wzl5sxookiTS3ok
gDMzsjNbFwEaKwAy8h0bHx1gTS2uVlCGsXFEJnnWJceFgfUyXZ2RI4IscHIl4RWXO0SKq4eNiQsi
6kgCeWXIliiWKP0MmTzy40flfSaVYYrSJDGgROGR1jSFGnm9CRwiY32QYcZWLglHmyj/AJMvR2Cr
hI3bPJ4ocSMpBL/ny6yyGoCw4W+/KkbtTEUiCm/y5hIs/blYmfsGDNWLj91MvZWXfsY1Gpd3b78w
XHviKACblDZs5LiFDJJK1hegazDbH2zoKORkQVnxZWa744gxRZV717tyP+HBv5uw12NRVhXaaUBF
kF10Jq+i/arjuTS41hv2yk31Q/cP1e96yDZ4MornqvcrNyvzblkP+s3qM2bLXtkvUY8cANZa3ANA
1EloByoVAbxs3/JnYLkYuRDEW+Rr5MPLizIcyMvjxP5Ipf8AHlzt4srS16xvsPE6h0wjaJlDBi0I
0y1YxI6yJR4coeNxYisv/jZdwRpk3hlBFqt6jIGWBmBrJiY07eRUcSJrMhRkcOtTqVKlZY4mMMuk
kaypjyFWqeIkRv5F9Z0SRN/2eTbsng66qxXTyP2AEn43s8Ixvkfx8QURY6RyyRNsTHdNszSFnV3S
o8ux2/xO2O5lk2XAjxcZmVVzc9d1yLcRo8wITGoJtxW4T6FvQtz9G1e9e9tbcHLS9HS2lxRNX1tp
bjOtr686Op9Hrp14hoSTV6vpy0NXvXTS+gAvyrav9mX/ANvQaD6e3pTzRQR738ikzAOdBSaWM00Q
IkgZT2PQFIbHqOG1bXsmZuEmXnbPtMe3Jj7gdw2fFhdMKEzZW2bngx43x3MyY/w2hlbbBkHC2bwy
42AFUeuzKi5OWZKgwlehy4L6X4sjLbvQkNi4iYy+jNmPI4SDBjWKbOZVVF9K9SSpEu45ZzBhYBCC
wrKyDEuLiiAW4cnJEIxcYx1POkEePC7t6mROIIsSBokyWORKAFHC7rGmKrSuhOVkcGZOhGFjeIeh
lzmJIou5gABpIRPkY3+eXizZAxxI2WP0Molzk2YAWGl6dgq4v2xYSnx6ZpLBQFXLPkmdxGmEjdlR
nyZPCeQxgGrNP+FVCqeQwwXG47nHjVDu8JgynXw4BHgJLyZXOJjWKOZNTjvg6kjxxUTTr5IcNO0O
e40KzD/i23/WKMsKAyBmazU1iFu0u5n7cAUBpfQgmr8iiGjEoVVVq8RpY2DZHeUxy3Y4s2nOgpan
dY0P3pR0Tm//AMh0tU6WbIwIpnjjaFOYF+c3LIf9VXrIHkghTySStoR3w0i9zFu0zr2yrWK33yt/
ycsuy7XNZI2JHx1XCdawvtGbGXx8n/NiRuJI9Jv8eRxk+LMrLjHfgyM+PVgaxrxycTKHXDJCVNEs
0W3TMU0ZVdcRmUetkqwCOJEpwcaZCMebgH+CXRLwS5ERljx5fImk8bOsUnkWpV8LK6SD1snFgy4d
z+HzRCSN4n1HOooi70ASXRo22Db/AMzMRFRSAR8h+Nh6dGRtMbJmxZBvMM9Y+TtjlsR5VxZlxoky
mSvjG+zDJ3nPkypo4kiXitwcqtXTgOtqt6l/oLcPSr8RHBejxE1b0LaGra+3CaHHbTob1bW1HW2n
XS1W066WrlW1D/Jlj/l8F9Brb6O2tuHcNxx8CLdd3yNwkAJKoajiBoIBXiJpYRXiS0uBDJU+HLDS
ParaXoAmoonlbG2TD22HdPkuRloBekWS8W2Zk1bXjx7bNDhJ3WvWXkY0Jix8nLMMEcK+vJIsaTZB
yDjYvaPTPSWd8kxR91RQpCnoO6orSZGc7vDgxw4jyP6mTkpjrl5U2RNg7cmLpkZCwLiY7JxZWUMd
cbFKNLKkSQQtky+oxCiAHJlyJfEmPD404pP+XNlSMTHGsSa5U/hTFhM8voO6xosju2PCIY9JpVij
KMsMaLGnDJIsaRReaX0cf/LPD/lyeDMJcZRsigKtGoP8uTWPeTIzLyFQAJXCR4qFYuHLkKQQp44p
QHyazJOyCFBHFuMMn5WQD2eeVtxx5JFhw2Y1J90DGk+yEmksVgjvPK12uBpE3KQ9q6KGNTQd6wAR
i5NTo8KoQQKbpCQZNzNzgD/HrJ3FcSFgzfa1M5FElm0Iua5GioNCMGjClCMLU8LSmGNkVo2v2NXa
1Rghz+tegrKIrHhbImg+N7f2b1s37c3vlC2SxudIjesWIozm7VEQGlTskxEvK5+7K5gcqxRd5kPk
yYVWsLFxo4ztG3ymONIkonx53I1iWC4N1j0yI/JDA/lh4sxSYlZXWePyxYcn/Jq1ZQEZDAjinPhm
0zQcfJBDDTKBjIII9S+iEQT0yh1CXrHkLDV0DrBIQ1SxrIkEhZchTBIpDLpKvheLwPXyjdY46xt+
l/E2rd8XcodDx24Dp8g2P9xjO1Or/hrd8NlDRlaVmWMGxgIM0jGaX4vhiHF13DZMDcK+QbOu2zaG
JwtJJJG0O7Z0Rx9wGa2P8eQrBBHAnpHjNX4PY0fRt6/OrVbU1z4zwHhsa50aPB0q9X4T6HOjpfjN
X4DXOjrbh9riiNetbV/sy/8At8Y4R9Pu2/Y+CuZmzZkyRMaSACj40MM0JbHwxO+btPix9SL1k4Kt
Rx3jjvciHtXbtpy9zlwcPCwIt3wd3bIGLklodpdBFJix1kboFr41gvmTyzRxLk57znH2uMAAAevL
kRxBnmnmxsVMcVbitwMyqss0mXSwDIVVVF9DIyocdFglzWmyChx8UR+rlZwVpZHjycTAhxRU8ywp
j47NJw5OQsC42MylmVFiR8yT1ZycmUBUWEGaXiypmUAR4WLiRMo1kdY0byZcscaRJ6GTJ5pcWMtr
epP8+Tj3mm4suQPJhRssfoZUhSJrY2LjR+OHgg/y5Ef+XL0y5TFBjReGGdxHFAgjhxR5Zqy3LMOQ
4Z7yZFQDvyKn/wAmTUkCZOVlYWLCNywcSEPAI8fHUpjrzCr3PIbsajbn2iOjoajFOpahEK7QKJol
fxAvaVAlQGZyoAF+bHljm53E/wCTBFoatqpKtL2s3gaM9rx0GBoA1z060WUU0tqMjmgZDV5hSSmp
8kRVEzMjdtB46HaabHhNAmu41KvkG15i4e5R/J9trfN4wczbyy1mRucg0dEPazNYaA2OWLnEFkve
nAaEVjco8g9tZZHZjbjhkSb9kNW17q2U1Zgsg503+PNH+PO1xj45+JlBGKSFtWahilRxIlOoZcRm
8fFJGsseK7NFUsSTR7dIyjQgEYxMcnrZEPmjx5fJHU0XkQsWpWDrWS/4sWR8neN4fkcEtQTJPFU6
sjBldYmONNoQDW75iYO2TPJNLAksT4qy48+PMuRB6NuJ/BJFk/HGRmxpMapoMeWsuKM00Vi2OsGP
tmKcjJx4Vgh4N7wDn4GN8c3LIkwPiuFAN02yLI239vzPJh/FtxyKxviODGMbBxcUfQdKF6tR6W4D
wEanQ8N/StpcUTVzV6v9IaPDbjPF09M8NuLnp78tTQo6bV/syx/y9bajQfRXq/Fl7jh4a7r8pmnH
c8rw4wqSWGGny5JmfFkRYHRmwQYzG4kgyBGJ67atUsiRLk56S1A+LGu3bSuQu5/IzNHiZ2RiSP8A
MM+aI7hO8kjAtPmtINr23I3HJjaDbsfxZWW8OOkQ+gycnxUkc2S8UMcK+nLKkKSl8gQ48k5AAHoZ
OZ4jFi83mfJbHgjgT1MjJmmYsJDh4cOJFUkqRLDG8j6W1yMlYFxsZgxIAs2fIAAPUyp2iTGgEEcz
eaUAAcMsixR40bEqTl5PBmZF3w8bwp6GXMY0x8dZODJmEELBoYI0WNOGWRYo1jZ5PQvTjy5c48ku
t6yZTHFyx8bGj7IdJP8ANmVkEvNmydkMEYiiqAmbL4L6Yx8kzt2piA+CsYeScC5WJYjuWRGku6zR
yJlG0LGwU/dGnbKTRNKfulJICsaWMVZVostXvQ50aNO7NQsXchSgsBRqQlUx5VVc0GRsZSIavoa6
nKhWSGOV1QXWObI8bpm2I3GwXLdx3StRIQUkZNO1qu1BzTwJMEjEalVB7ENFGFJK6kSg69DegRcQ
LJD0q1doNdgrsWivcLc660474AOyGo/1MO1oxbHylLQyj7MOKVM+KJ7bPjOcunQOmK4eHNBMOSwM
euSCknHIRFmVmxLJDtkl4dJe2HJ45D4ck6ZymGZHEiaZSMVjkWRPWk/wy6TxsrRSBHhEYPy7fYsi
d5mdooZJF+MSZSQDnVgRGTDPPCsqY0rSJp8jx5JsDxOGiAVfOFOxSq+3/QnnUmHiSUmz7fHI+Bgy
VN8b2yVsLZ8LCarcPPWwv9Ca5cHPS1Wo0dDofWPDy0v6HvwHhtr7nU6H07ae/CdfY17aHnpb0rel
tR/yZf8A2vrBz0nyIcdcv5H5MncvkjoJMqaVog0rokeOk+e7GDElnqGGBUyGkaaOWOEfnZQpd2yw
mNlrKQoqaRY1bLkDZWW+Q1JI0bTZU8+ncRXeaDmuZra/j7TKubj48W0CHJcAKPoMjLAWDGkyCiKi
+iNMjJjgU9zPDinyejNkySmKCHDj/wA2a6RpGvpsyqJZZM2pHlmfExIsSKmcIsanLbhyclMdMXFY
PTM2bIqqi+pJIsSYsbSNlT+GPGhMUfFyy5sqRjUUSRR65c/YuFF5W4bVbSR1jRmkkliiSJNWJmyo
LTTcIrIlV5sVGCegxCriAmPGHe50to/+XLyv8kukjhEwkIivWMTJKLT5tZEnihw0MeODQ4Jn7Isa
Pxw5ZAiVe1Z38cWLGY4O5UpjXy7J+/aQ3iXudZxZwbNLZRzNdt6HaGdGsrgU2RavOTXeGA7Cf8go
Bq/U0haBo+0HIXvkWhplMRBGbUXkYw8ouDoWMZUuGXMymRHkeSlhlceGQV+WAIwbHmwF2d7KOdCm
NqyHMcav3RWoqKQmnS1dxWgXQLKjaWtRF6WTtUEuz96Gd+2PHkkdNCL03IrbuEfbUjcxV7HIW0rf
aJF7oWQfh4YHiw44psRERBpCSmSVBGOgfHwmLY2kqCSPHfvi4spWaGNxJHUXdh7ppPH5YcaQyQ24
p4hNFjSGSGpYxLFt8pR9Yf8ABPw29F0Dpjsw0tU8fjbcdyTH26V2ZhesIf4/juZ250e740EgIK5E
QmTHl8qZIeN0YOtOium6YU+DkSkCm/X8fzjC82diQY+NlR5Mf9DPojr6h0I1Pp34LUeEDkeeluDr
wn07Xo+tejwGulc+M6kVzoDjNdKNWOntfQ6G+m1fry/+1xWq30rMqLunyzHx6ztyys17640kcEc+
S8xwsXynLnkkkwopIhuGRHKRrFIY3gn8iyjvGbK49CDGlyJcDY8PbU3LJlnOBhT5U+FhxYcHFb0S
QBk5hrHxGkIAA0v6ArJy/Ef8iSw4wT0XdI1Ej7hRbGwYRinJcAAenNNFAjd2WZnlyZcTEixY6Zgo
CnMk5cOTkLAmLjP3VNK2XJHGkaeqw/LmJCjHVsiXiyJWLMY8WDEiccEjiNAr5MyKqL6GXOjVh47R
LrkSiKOVTDBHGsacM8whjWEs2p4swllyWEcESCOPV3CJiKfHjf5JaNZl5GAAGXL4oARiYeHH44Sa
yiZJNBwZNnap/vyayz3sKyj3MTyzEjkkngiGcwihqWVWbuNfc1WRA0rV3XqJXDWjWg8bVkwrGyE0
y/5VykV5stxJFlTJKRkT46SCrliLjS1Zn+oQrUpImQWTgIoILnxhcwQSYq27o9whjjWSCaszGCDE
mMoNwQCCWBJSMihzbLF5cX/WOnsaB7ltyiPOftRVlcKucho5kYoO7r+SkBDGRpBzCgHWT9TcqQ9y
PGGoqykm9FfIJP12JUsBiY264kJ2SVZ8PxgirVkf48iv9WbD/jytYh48njxPsNbvCWhxphPj6IPB
m8f+nK0z4zFLG4kTTJiLxxSiWP1shCKRw6Uyq675EvhlUq2OqCDtQKrvjZG6KcmtszpYMypFaOXk
QhONNSozn5JsEmdDue0bpt5s18PLzYzJmTebuh3jZsXLSVv6GfSvxnh9gORGp0t6d+O9X4baW0tr
78VqNX0Op1NHnqeE63r2q3AeE6X150a6adODpR4zW1H78v8A7XP1L6j070zdq/IN6z5JCSdR1saZ
2Y1GSuNBgpEMrPaUH71PKr64uSY2jIZNwdGk4bXrD27KzW2vbI8CPNyolrC2/I3GTDwsfCj9d3VF
yMlpji4XYfUmy+5o1Zzj46Y6ehNMsSxwz5LT5KwiDGbu9TKy0xk7GNT97vjwCFa6UWfNlVVVdL6Z
OVHjJjYr99ZM8kskMKQp6uVMxaGJYY5yJ5AAo4ZpBFHjREAf8vIoaXFT5JkfFgEKehlTiNcKEzSc
H+/Jx/8APNV+GVhNkYiEL6BqK0k7gyZfBmXesp/FBBH4oqJsMUGWWsg+XKyiJ8jTHBlyrcF9IryZ
dRffk1H/AJcytwz0xZz8kyEbL3wsDkyTOTc8hVwKMlqFhRyCKZ7g3WQdOlPd41FhkOUSHE87zJB4
8ZEOX5JO+QHvUGw09si9dwo/dlA8S9ZnCJJkSSa4Jbzyp3RYsU0b3sSQ6+NbGNlFDk+UCJsPmRQ6
pLGB3gUKU/flTiJ1Lq7Y85plZTDkKS8bARLMJ5WSRhwTUx54x7oWvYutSRlTAaJucSxLr9s2POc3
4zJIMJGNz+qstO+CNg6ZqnxTGz65Z7FuCOKT/Hk06967W7Rz6ZkZeGJxLHxZEXliglE0VSxrLHgO
0baoPBkeupMMum/pKcLLxTLImRAiHIU1s+MuZuGfhjMx8iJ4ZMKZZ8UgMICY3liWVMdzWMWSF93i
gr5F8h/Lx7UpKlwZGi+QbpBjQ7hkRZMW67vGI5Elj/qfvry0NqtVtToaI4T09Ll6HLhvXP0COM62
4PfTrwe3HbU8F+H24OVqPDtV/Jl/9r0bcY9LJ3DFxqhyJss7ts+FkwZcSwTVa9QY4UPBJLUuLPDU
cJJTJixkeSWVjQaxdbjUA3ky3McgLURqFY1t+0Zm4Tn4ccWaHEhxVyWArA2dsqSKOOJPTtVtZpkh
VpZ8iXFxEgXhtxvktK0StOY444l9CTJUVBjWaXIZmgxli9XJyWQiNcYsWSsbG8WszNlMiLGnBPMI
Uxsd2esqd+7Gx0x4/VnmWGPFiZRlT+GPGhMUfDe1J/yZsuViYolhj1zJiawIg/oySLFGBJnzqoUa
5UxiilUw40UaxR8OVN4IY4yrAejkyeKGJPFFiXccEH+WaT/LlaZjlYoYxHGaxT3tiDvNTyeOHFi8
UHC7hI8MEQM3auIP8WTuWFjBfkeNDHkfJNxkEmS87lrCWXyIgtoSKLWpFNF+4ip3sSPvFdsHaFZm
KuHY+abJnEcUmRLIUYo2LlTSydtwOpq9A1kn7+6oPuyxfT31XkZACuRA0TwxGVzt018bF8J60BRR
TSqBQUV21JFTRRrC/wDmRQIYhjkwxx9wMSU0NwtowVvJnt5JsXHjhjnnRTmTRTQohdlh8cbxhgHh
EQoayX7WrEYUaYKXkBEi8wQVOPJ2NEY5F223Zs7fYWa2nWsS6oyhlhUyYcD+SHR0DphuWh4spO+C
JxJHW4L+Nna4xMU/Gp8OVpmRlWjfyJbSeMyRxSCWO3oc+KaPyJDIXWszMxcdJ8+CWaZCkgYivje6
Y2DmxyJIm+4HkTZNyTFlqeMusb96zRF6w5xJjbt3jJlUsCDVqjgYjwNT4xA2r5JitDtkwgerf1W3
D04jqdOfBy1tpbS3BbT24jpfg60dD6h05Dg6cPvw3NcuAj0emluK1E1tY/yZf/a6elz9fLmMOPkZ
Mks0W6TRpmZ08tdvJIndsD43mz1i/GIEqDBxMcbpt6ZUGbFHDOBROqGnXtOlggFzSx3psVmo4k1R
4GQxOAYI/jmDl+IyJEZssAYkGTnyooRPXyMpYq8EmQ+PDHChUgek7rGs04yFix3yKAAHHcANkPK8
cMcNGaTIeKKOJat6RIAeU5FBhDR7cSsbG8es0ryPHGkS8E+QkCwY7eSsrJaOsbHEC8V9L8JIAivl
TEhRjg5E3FkyO7u0eNDiRvwTy+GOKJp5FAA4raZ2S+TNi4648XAn/IyoB5sjiL+SfDisvoyDy5OW
zCJFCLrlSGOGJBDBhgsttBebMrNl8ePKvix0QIlqyv8AJNbS3BmXZFAAz5hDiQfJcGaAszMSaDwi
JIS7tGCrYoJ7QgEgamJFCryyRr1PIKO+Vh9yBi1hbxK1SQntxYZYpc/vtBt0jp4S00MSxIPuNuDL
au6sAd2ZpbS2hFMhaNkupxE7o4goUGgLUooCrDTpR5iUEqLqXJJx2bsW9AXoinjDgw8jhyJPb7sx
2bIqKTxyLlxERuXxwLUOBgbK5IwWPfUlIeUTfcwDAIVrCmAEW5QYNQb06LDu2ZG0Eqzw6fozKA8e
Zjnsm1A8WZx4t0asvHXKx9sm8uHVqzAUoEMOLIQvHG4kSpEWRMV2ilyM2LDTc/laxT7d8n23NP8A
qn9eUCGTLyBjwbhu8mQpN6QCZCLGvje+tgZEc2Nlx7xFkYm4fGt8TOx8mZMOGHJx3NMGirciJHku
C0xqSQMqZUyUMvJInzciQIGdsbMnwEk+VZyrj/J8+KXF+aQO8E8OTF/UbcPP0SKtR1Pp9eK2vWrc
J4rcFuDpV/QtpavfiPoHTnp1q1tLac79KtR4dr/XlD/laW4DVvos1A+PKoSRI/Ixx5e5cCeV9l2S
LDiHLWaPyx7n8emhpgQeAASKbqV5AAmo0vUOOTUcCiljBqSVIVlzLy7fvndgNOxOHsjylUVF9PkK
bKxUqfedsgEny3AFL8kxZosnftuhJ+ZRXx/lWCDDK2ShJDa3q/BLKkKTzF6gxHkb0HdY1PnyyTDj
RhZcsqqqPTdlRQZcos7yFjHhJjY3jNtJ5mLQwrCnBPOsKQQOzVk5AhXFxjF6VuHJY5DqqouQTPKo
CjhyJhDHiwGNSPy8jUkATTO8mNAII/Q3fPXDg2nA8CcGXKUjceCCNFjThypvFF4+AcTsFXEU9h/y
ZfA3+bLzGPjRQi1PKIosOIxw1LeTJj/zZemN/ln0vwD/ACZor5ZkePBxx2xae7zNjPCv3ZeTLHNJ
kTPUU/ezG7d/bUcCWdGVpD9mOpVZ1UVGgVbaGnj8gs4qWCScx4yQhuqi2oo8qzZAAZVraxefijg7
xzURcx41NCNRQWrUKBomr0z9tJkxkMgejG9+1mpEIqTIjikBr2vVqIBpuRzcbvZMPJkOVhTYoALF
R44h0HCxcVhyffUn6C3ZGp7nU3FdiGtxxZpEw8PMMcePI5woDj4umUAorKBCznx5GuWp7FIZTw3q
YrFkaJ24246OodcJmCW4Laxf4pdM6IKd13mOWMm5BIPxrKfcNlhk709K3Ayhl+QztDtbG5pWKmbx
SJpiZuThzbxup3R4Znglxvk0+fBjbugy9pyTmRZGZgQZG5qr5Eq3pkFzVrkN2s0EOTLhbdtMr/md
mblp48iu41tm8Zm3PtW/YW5L6FqtxH0T6w47ac+G3EaOtvQOtuG3CdLV01trzq2nX0TqaPS2vP0b
egfQsOM17X0966VtX+zK/wCzparaEegfUyI/LFnYz4k0EamsPb5chsLbsfEXhljWVN+2xsWfgUkG
QIwAuYoS1QYZNLFGlYuNjlJ5kmkkQrE6ESbDg5+VWHtuNielJJHEmT8t26Gsv5buExbeNycplZOQ
+XaNnksER5DNOIaJJN6hkaN9m3qNsQbqY2S/ZxT5McCsS82NieI+hNkxQhcZ8h5pkhWCKaUgAcdu
GfJhx18YmJY54lkhw48bF7G0nnEdQReNeDIyEx0xsd2esjIWCPFgcH1smYxJjQ+GPIl8SY8XiThJ
AEYORPlytaGJYY9c2QvWFDc+hNMkEWBA+5ZfDGBPkQjzTcXd5JsVT2+jk/cCyomKp8VtXcImGhWN
f8uVpkXmnAomw8nZFixeKGsqTxw40Yjh4WNlwwSlfKsnzbjaw0Vu1px31mZbquDhxOjpjgTokGTD
G3cIg0lrUhRXMivOxF1XuYchROvdRIFTNZVjJWMsTqem4tptK3IFhqauaLtebIZSkfaBparcOSSI
oiZDFJaJWWVY4+2iQKy4GmkTmKNA0aZbhgy122E0OTM2PjJCUu7XvxZzdj4zfYp7lIvTgioyFMWY
pBkS3kVVjnudlzI2yhGgOsqd8eNJ5IXUOiq02Fjy+aHQgMMU2TiyYzJBBIJYa3KMtjwSieHSc+HI
0PDkoexGDpUiB0+QwS4+46fC4wm1Sf4ZvX3rHORtjCx0RipZbcPxtYZmA2uOOCTMMOC2NCMMtuU0
y1Kbg0qhE6VKTbG3CfG28kkzjy4uvxTEabcPqT6R4renyq1Xq1Xomr+rbS3BbW3Fyo6H0rcB4Lad
NCeC2vX0eXo89SOfo8tNq/XlH/lXq/EdOuluEUPQtWbt8OXHj/Hp48mCCOBOPcsJMvHzcR8WbW1q
VWY4+G7GPFCjPyGw1XJyicZJs1cTZ8aCI7Phkw/GdrjnVVQcFqHD8tnkTEknZiHetn2wbg+J8W2u
MLte3RVnRY88u5YmHtGC7F20FYEsqvtW3OUq9Xq9X0yMgxkCV5sfFjxxwjWbJKGLGAMuXGrY+IqP
6mVmiEpCVEYmyzPOmMmNjMp0yJxCuPAVPBk5MeOmPjO0lTTJBHjwvK/rO6osCGWQkKIbzvxTs0sj
skEWJG19cjIWFYcfySD0cx5Nwyoo0ij4MiQhZv8AFDGixpw5srJE0aenCTLNl/dQFtTWUe9nYRR4
qssVMwUYSltMlysTKXn0yAZJrcWbIVhjUIm7ZRxcBHebIF6OhW4IRKzwnlGVMseAsjJNiyZGRCoi
itRp0Zq8CxGKKNgEC173rlqaPWWQLI4IpRQFqPWnFkz2vJW0raLhtSKKlHfnVbkNOevtJOsQMwNF
hC8JZkw2YUhJXMkYGORpYYY2TQ0NT0MamjCtSKFCjtjocBFxualXw354xvDUn6UjjYnGkM36SxuI
0QIHaN9r+QPEVeKWPXHHZLSf48vGAjk1c+LM4La4oET0QGG3OYZdJohLFjSGSLjx/wDE9SRlhkTy
Z89jeDCYr8UxpMfanRZExy3ZparcduCwNfJMD8LdNUYCmUqdY3KPsUiyZOyfKsnHefadj3pcfbdx
23dc8TJkugNLEXeUhnNFC5nYFqxUY4uDHtf5G47VNguK+PZ7YGQrK61b+mnjNtT9BeufoW4bVy4T
peiNLelfU+pc6HQ8FuM9NPfS2h4dqP8Akyv+1per1fW1W9EVf6IivkG1DIidGVmIWhKlo42kOPiA
CJFJaD47+DlwYUMv5ILfH9rnVfVysqDEh33fZt0l0SRkODvudiPi5uRu2PjYiY6/M0RsWgrGo8Yk
CIs/xnZw8vThvU2SahjaYxRRwpoOKSeSVokhhSXNE1Q46RHivV+GbMkkligixBHC8jT5CQJjYzK2
k0yQpjwsW4MjISBMfHZnqSVIkgifJl9dmORMAAJ2aeZQFHDkTCFMeDxJ/wBufV3CKzySyQQrBFqO
HdMtoY8DDGLFww/5psc+fI4kZZJsNGK1f0J5CkcMYjijXyZOprHBkmySXawArMYlUUItSN3ZOIht
XSsc978Un+TLr5flFIsdStDWRu2PJyRCGZnbHxJJm7bBFAGlq5ipE8iBAorpQoUNb88r752F5ANP
fpU7f48w3nrbBbG4R0jpR3bjQ6actAaLVOodIrRtMIphAQheQtLjzWEn3tjtF3KKPLiNEcpbdrc6
HFui3hxWtNhG6U4uotZMkidyJgRyF6duaMwXE3DJxGwN2x80aSAJlVlDtqY9k2uWheCNxJHxTXSb
TcQ0GSrBltpyhy7VbiyAVoEESKXTfMOHCzVSJZMXDyc0wxrFFW77im2TY+RDkxet8u2s5eIRY6g3
BBBXtuca2OK24tGuPO8WRi4+073FuGLkQZX5uQgfLx5oFm7EMbCitqJ7UN77RtMu4z53xyJsLJhm
gmxtzysaPGWCWRoOytq398CoJ4MmOxqx+gv61vo7VbQ62q3pW4D619L8FqtrbjPBaudWNc9b8V/T
PoHQ6X1PKvajptf+zL/7XFer1fjtVuAae2ltLeizotb18hxMOKfIkmfrWJCrVDCq1l5rxVj4+fkV
lbpHAipnbjLsHxb8Zvb1L18wyp3mJ1HOlCxVgfIczBb+Z5fiysrIz5hBHQhQr4YyNp2OTLaGGOCP
gJtWRlF6gxnnCqqqOOSRI1uM1Mt4cPD3nK3DIxfi0WXFjegKvozBVeSfOYvDiLFjWknm8S48Dd2k
kgjSGNpn4MjJjx0xsd2emZUVEbOk9fLmdRDEsKZM3hjxofDHwswVYF/IlypSKhiWGPXLlEj4UTej
k5EWLBtONLNLw5UpAyCYseGJYY+HMkcRyoCwAHoX0b/Lkyv448ePsi1y5OyKJBHHjnyz6Qf5smib
AkuqoFWsuXxQQR+OLhvWIO5718mnafdYkKJqwBrNicHbZRG13YogUDgvVzcmhRtw9a9/1Zqn7tb1
kmyTt3TVhKwxeKPpjHuzfca89cnKEAx8tXfIKlBIVaKzNICks6AYqSyRNiLFKoNd/PT219mFxBJI
WUkEcOcvdjIe2TCP311B+05Efa2DISgJLBGVpVtJHIQQ4NK9jt+/yRmKWOZMwHwKwdXXvTtM2Hjy
iaDXGPa3FOnkigfyRVmQefH2ya8WmUheGKQSR8VgaxyQRiDs+S40mLusF3XB3LP2yTa/mcks1fKk
b8nYZJV3XiPoEBh8m2M4E3ALOCCCzucaoyUwkQmi0sZi3gTLNtLzYuFgzfk50wbIx8qJ1Ze+TIPL
Bw3zZ9u2+HAx6+TbKM7HjxpZH274llzHL+PQHEzJH8kGXk49RfI92jXat8zlz/U9vUt6PP0DwW4b
cHPhOtvRvV/S51erUTrbgNWq2ttDRrpwHQ+rajw29Hnw8+La/wBeX/2vTvV9fbS2o9bcppIMTI3D
Mmckk6YDf5Bu0MMmRvEL1Nm5M1bTs2TuU23bbjbdBpb1flmA80JBvQBJusQYklf1KoK9pWu+xja4
23Z8vNONjpjQ8DuqLkZBmGPhluAcM0wiDfeMnfGY/tMr0Wyt3fbc8R1PAYm9GSaOIHHky3mmKCOL
tE0yxLDAe/RnVFCDIbgyJ1gjxoJHeiQB92e4AA9aaVYY8WJ6JCiAGeXiyO7IlkdIYsSNzwZMhAhg
Er+i4G85lgBwXqAGWWEebI4SQBG6s2GpK+izBFxV+ye0kupojzZmQ/jhx4xFFWXKY4caLxQ1luVi
x0LT6ZF5criynMcECeOHIfxwRXmyAb0RR0mv44bSqEiVbChrz0PAeY6VfQ6Mftg55KdODLNgxu1Y
4Cw6E6kUn6cE3yPcUNb11qdFdFxQWZG7cuHthEsiKhVEfI8kcqwyLguFH5zfk9xqKWaSS9xR0tRo
9Vsm4OOdDglsUrCku3sacC8kQcQY8Vlh7iIwDMPuBtJcA9xUqwNYe45OHJh7nj5lYhPhqO8eTjjx
ZGs148njhukuir+PkixFXqH/AAz8eROMM758od58nIaeW5QzlWoNY/HvkcOenyOESbb8eQeQWI9b
LxYczH3ba59uy/FJXgkowyCiCCCrVkm2ktki+NbYmXPvO0nDnliZT8Y3b8XI3/AZYpokNY+3S5M2
YcfBiu2XLseyxYuNG/eui4OEktCvkmyZQzodh3Wao/iW6vW0fGIsNvrbfQngtqB6XtxnUa34CeD2
1P0HThHoW0vR6V71fS+luHl6F6Gl9Nr/AF5f/a4r/RCrcFuC2mYIvx9w/HOVVuShSYMuGCLxyyNh
7RnZkm3fDcWJcfGgxY/oMiJJIt0WFc1VLEsEHWiKIpZ3WvypDWwYMG4ZOB8bwMRwABwZ/wAhwsNo
82TNrGxPGfQ3f5BJhzDcJZzHsuTktNLgbRBDj5m9SwwRQR5uJ549s3hnZ0KNxz5IhEeOzF5zkmDH
jgSaQRrDE7HQkAANlOAANZp44FgxmaXR2bMkRFRfWPIKPzJqyC2RKqhRw5U5iTHgEEZP5U+sjiNC
WkeGJYY/Q3LIlviYsWJBw5TFhkN4YYoxFHw5btaZAxAsPRyT3noMb/JJrK4jjxIysUx8mTo48+Zb
TKk7snHjEcV6vasMNI/Fk/5Jq+TZwxcHHX7emp5CVu4QcyKt6Jo3oUdOVc6k5JjdI2BWgKNCtwaw
pRdlFl4VuS57Ttv6udxqTU85QpOVJzULIVdZ8iaF48hJ0mxxIbh0UtE8tmXELqsGEFLRuojxlkqN
e1eDnXvkjszGH28JUGp07JsFyEHMVIaTlXgUkCiKyEurg9rH7CGCxubOBXmJfa96Mbo6SLkjtbK/
xvrPH5IoX8kXBzq1ZAEcumYgDYzELplqVUEMOLKjEuNv8WMm4sov2khWrmpikZGxck7lsXxVozkR
yLjDI+U7PBW271gbn62/7dHmYXiiSIMhq8ZqeNCMfFctK/fJjxGWWazzbFhDEwMrFhy4dxwZcSeR
e07HvcijMWON/I6UMhu+aUBviGdmSFGBl/ovL1LcNtOVzwc6P016Pr3oVy05VfgN9PbW1HX24LcR
FW4Lae1uK/Bz05URpz0NbX/syv8AtcQ+gvV6vQPpbp+OcXIVEnPKjFF4du2HM3Ctv+J4uMsWBhwh
EVPRt6XyfdhhYtix7qIocqNGrUBWBO0GRtu4QZcPB8n3PJhWCBnfacD8LG471u+6ZeRJtnxzINZv
ynb9tO1fKptwyDsIOYqhRVq3HAEw2ncPOjoyNwz5TFocaOBe+bMaKJIknnWFIInc6EgBr5TqAo1l
lWJIYmkOk0jZLxxpEnr5TvIyIsaZU3hjx4fDHwu6xpiq0r5crhYYlhj1mnWVsSEqOAcGTkJjQ7fj
Mg4SQBjgyPF/nyOEkAQnubFW/pXqA+SXJk8cMSCOPXK/ySEhRiAvpLII48GNlhp2CriK00tWrLYp
BjxeKLih/wAmRXyXL/J3JV7U9qJpnkWNGMkkYAF9b8V6voeVXOk+Xj45gykyKyD/AI8X/rRXB1ty
3JrLWOLzgg8UZAZgC0Mcccl+Y1PSaLvp08aw+PzLmRxtlOJlilCvkZJDxMC88SySiYLUecCYcpQ/
lZpEQKNDoTXseu4iwv3RjUaWrck7crBaoG74jUi3oWXQUaf7kY2jCd1PGqohFklW6vEHjrY8pocy
RO9EtkY2G5eBIpHrMMWFBi5uLmRwXjm4siITQ40nlgqWMSRRSFV0IBGL9g4/leKkG69vN+S9S9mU
Xr4xvaYTfH9wjws3OfP3J4E2WHZsPJnx5WyETGyPmuNCz/O5r7TusG64vouqum9YkmRt7Q40JU49
d8Vsh5ZIpMZgWjTFxvj+CczNUBVrc9ti3CHcsB8V2V0ZlgeJ3Boh5DjbXnZT7HtC7Zjf1D2q/oGg
KtXvwn1jpbUVajXLgtw24Dp14L629TpR9A+hejpfS9HjtoRptY/yZX/a4x9Her8Wf8ixMKab5OGT
OysnOM8RiZrVhYUuVW1QSY2HwD6DJnTHh3PNfOzGa9DWxq2t62HcXxsqKQSR6ZeYsC5ORkZmRsmz
jFT0GB7dkVI9x+Tb+y0aBKn4bvWOEdCh0tWZiFHxc1cuLVmVRLJNmMkePhQrHPlyKqqJplhSCCSR
8iePGSfd9vijw8o5LZEUxZUVF1lmSFY4Wkk0nmeR4YUhT18icQx40BiUkKIAZ5eKT/lzSOkUeJGz
HXMlZmxYklk9AkAQd2dk8WSxY5LeKGGNYo+HNZnrIAdgLD0cqQpDCnjje8uVV6vRIAx/8j5bkRog
RKy7yyWAo1uc3jxsCIJCNJbS5fFK3ZHiRlIMiUQwRXyMoHu0Ok72hx07YVHpsyqJp0hEeR3Tid8q
bBk8ks0Sd0USwxzr31HEI41HOhVudbi11rBXuyu0DhNC4IpRaQdQNSOTsVUxZU9DHUV4SHyHtTKr
VEvemMAsrhTQAmZsRrw46IEhAYOtBgavwm9Z63x8dw2PQ4d3Sz4jWlwnvDpILEG617K9ppYmuvdF
M/8AkhdSY1xpJJI0CiIszKxQ48omgj+yfuXHyty+by+bePkOduzfE8yDb5Zz2PxwWiyK9zGFycV2
aPSceOXj+aFGzytnkNGwMZsSpUk8w5aHAyp45d02/Hd8F7Gbcs2Zbk6fEt0XCz/QtrNjY+QuV8Tw
pDJ8Y3OMY3xaaRF+K4gqf4dGZNu2nG29dfkO2/lY00bsfNKi7Dj7LlPBtmBjKFUf07n6F/QOp9A/
S30tVq9+G2ttCaI4TwmuR15V14LcJ1OvLU8J9Hnptf8Asyv+19YKvpemPLfLjccOE+OZO2OXFmkG
Lg5GVLsu0Lt0HCKvV/StVqtWTkw4sW+b7PnOSToqmiKtXIUaOsDlJNlnE2BWXmiMbrvhMnxjbPKK
tR4p548eLK3bPlrcseLb4JoyrX0jlaGT43vibpjkFTowBGQjYksORHlR1PkQ46eGTOMs0OMkeMZp
KlkWJYYWlf5Bv8W0QZvyDcs0vlTNWB8hzcVcHe/yoUIZdJZBGkUckzaZOQe7HgWCP1ywUQAzyVkE
zSgBRw5UzIsEKwRn/l5HTXInWCIRzGo0WNOC/BnTNlzxIIo+FmCLi/fUJM+TwsQog51iKWHpN/ly
ybDEBK65bkRRRiOM/wCXMokAYgMj6ZrDI3AKAKJAGCCx4ss91AWHyfJ8G141liGl6Aucs9xVR3DW
4vbg9jTuqLA8uVkQZMQbBilnDDLx4o1j2/DwldgaYc5XVaS1qFCjyXcGvptYvk8N6QqK5EmK8QNA
630IpwbTq7FzF2AgRiYisflJLbshcKRMqlnuZZ5xIx7kDzqYe7x8DVkr3QYzEY3VeHdEJgibtl29
hfSUXMLd0Y6WqVLMjBkVe541AV0Ijg8njyZUDI8pIuK+Pz+TCyD42+SoTtMl+6Ne5xGkcW2M2RtO
PIXh4sgFX0zVIjZljm96kjEkeM/dHqdfkO1DOxWRSZgBRtVyCWDKtjS3CYcoiwznZHj27wR406GO
alBYrBLC2M5kx/pCAw3zDzMDLGSGoJgNW37/ADbfW17zi7mv9IH0F+C1Eevb1hwDQ6DSx0ueAcHQ
jW3Dy4OlWq3Gded9b8Bo1blwngtobVtf+zK/7XAPUtpbQeo3TdYDNuyYssxw/j6KFw8ZY8fExsVe
MemNJpUhj+QfIJNxmvfUHkedWrkKJq9Xq9Kefxrc8X8XcNyWNdz3Rnba9nbcsnFxYsSD0Pkkxjxk
n/Hx83MmzcgSK4kQowN6Irb86bByds3bH3XF1ljEiLLJtmXLmRqkeKztPkiKosdi1SypEsUckjbj
nRbfh7jnz5+XfQG1bHPbOjIMdSypEscLSvpkzmMY+OIV+glLZEoAAyJvEmPD4k4ZHWNMWJmbLlZR
BCsMehIAeXyPiweJfQ3LNOJBtuH+PFxZBLvkP444oxFHw5LeR8j72AAHou6omIpEWYx8aqFXW3ly
3YImGhWKs1yI40EaU7BV2pGll0zn7YIkEUQ4lPky6+WZXmzQoRBqvIr9+VGPt0nnESYsRVdC6K0+
RHAsU8pfuPbnzOzRPDA0kS5kySZCzY+N2Pn4z5MaKFW/NzYP20nQUa5VIwC55vJW0L/l4h0jkjeo
3aNxoKGrGjashI5EaBo3XuAK3pCwPkABmDGSVgyTkg5ETsmUpLB0MQalNdKNe5qQApHZlI+wHW+m
YnfjVgPeWr09rYx5e9SWK40qMe1aRrTNTjtbLjMc2K4IBufj8/jzZo/JFk4hz9szcfxbg2Gvf4ww
+JLkiBP8eRxSIHTGfvhogEY6l8bHcvHobxZPH8h28YmdJECHiKkrQPazDtbysUypAFAJOS3YMv8A
ySrtwAyRChilkYbBk/k7X9K6q65vx3aswZvw/cIWxvie6SttGywbYn9dvXtpy19uK/DfU+per/SH
Trr78vQ9qtwnTrodL6+/GBoa6Vtf68r/ALWttbVb6k86k2Pz5uPiwY66Xq/0nyzdLQ3uwGt7V3V3
0WvSozmTHmjo0FZqXEmttoyYJMrPmc42O8smz7cm34tX4badBuE8OVLuW4y7hOzXoG1cpUIKmiL1
tm4ZGFPiTefG13NVyo/jcL33DdcbEysXGMa079infNuDx79jiP5J8kk3Z4IPIZo1Ro4S1fhyEfG9
qyZ9zAsJZkiSKOSdtJ51gXGg7D9BlTFFggEMZIUQKZpeIk5M7usaYyMx1y3MzQRiWT0Jpo4I9vik
zMnQcDsETFUsIx5sjhZgqwCy4alx6WUPIaH+XK1kcImGnbFlt3UAAKj/AM2Vpu0pXGxoRBBpIPNm
UOFj2rhraPIlEMEV8nLPMnV27Y8YXjAGjSgUg/IyEzIHbGzfMxzsifOil8uTiyGeWFWem7guJhNF
LLiQSuqIik6pJ5GuLuLU4fuA1AqflHlm89bUD2C9uC1SBmqcfjg2MSG4HAXUVPIVV8qdJhJ5gRCy
s/jcyxu7cmt3EN42EbSvETeLAjLJtyiUJyCAVbhfksIsj8k0GnKmAYMpVsGSwOj8lha0tNyWWRxU
blKjkDqT2z+0yXEkfkRElgyIv0wSmGZWDLJKIMzOnfLzFc9mOmRkSfGoMmDGzHSKgwYcUf2ZNGmC
w5jXim0niEscEgliPFvG2ruWHkY2ViyMSaZVqRLAnuWIWokk4tkokscbZt1zcaH4buky7/8AGIsC
CLHEVfEJ/t+s5/1m/By0PX1j6Qq2t6OvLhPDcejfT24PereoeE1aufB7a8tOtGrVtf68r/tehb0+
f0N+C/pM3aIMvH7jJHIeD5Juz4GIuRI5MRBsqi2hFEGudKLn4rsUaJu2AcvFzPju541YO1bh2qq1
JciKI32DZBip6O8yzpib1uG3zY7G2qsVLKJk5giolLPtsRiwadlRXknzmJw9vh/LzNxk2vbsptxA
suRkQ4sO+/KMzcJocmWGSXcMrIICs0PaEkx43ZYgq7Vt+RnTYeFBhwyyrEkULzSaSyLEkETyN9A7
rGmMjO1Tlp5QAo4cqU3hiWKNicibXKnMMccTAqqqPQymnz8qNFjTiyAZnyXKRxRiOPhyf8smTd2A
AHpY3+WR2CJioRFrknyPUQ8uTWQ/ZFjx+KLQ2zN30Y2GIt11vpesxj4lUKvybJWDa8RyK5Cr6MQB
kSEwooBFZgM0zsMZJnTDxAr4mJ2tj4cOA8WHi4Yhhx8OHH1vTy2aOR5XYgDzw+SXMuIkCJKBZX80
i8xqTzyT/jnN5a2oWg4gOe5n7Ih2RKKFX0JqerAiRorJJElZbxvQIp+ywN6VhZjesaVo5gvkaHJM
cMGWhKyKSDfQ8Eg7gGAq4NG3dwGsxezKwzyjIMdTGwUkOa9po+1R9xxpPEA/bKhurCilmfFkGRHy
oVss/mwPlErQ4WXkxy50+Y6P8f8AkabW2HnYmdF8hx/Ntew7hkwZfFkfbXXTLRmgIXKx4H8kekYM
WQdLVbg+T7W2UiwzgGONqkxilMpVpGIWpz2IASfjhB2Wtxw1zMOVGQ/FpTHun0XL07Vb+s9eI69O
G3HfjtVtL8F/Qvparae2ltDwmva30Z1tR4OlXo8Io202u3kyv+1V65Vfgv8A0MegasOF3CLvmaNx
yJezFijkNPHQOpFdtbZi+bI2jJxI4omiyHzDFiR5OTl7xkZ/x+fHiIPfsOxmPS3o5Ks8Odt2Ziue
BHKtNH3qgLN8b2BpHHITTRwxzlZKk3zJyCdnbth8uc2JiRYscsscMfyf5C+6ZGgNI5BgYsUhAAx3
mb49gvh4MsixJFE876SyJEkcX5D/AEJ/5U1ZEvjjhh8KcMsqxR4sTVkylRFEsUejuqKJGY48Phj9
DcMkouBiDGh4pHCJiqe2M+afhdwiQDsTFUv6eU5WKKMRx5X38OL/AJJJ5PHFjx+OKpO2bItpNIIo
trhKY+mUxEaKFTif/JlV8tzPJm4oIiOsv3Ukas/eI0w8jIlEiSK2LhZMmS23mTIkx4ZWsKLqoBBH
ct6eaKMgqayBP5ZMows6TSqmBkOYsQpMekwasQDxoOWlqtWYbKxu1bcLYug4FH3bibkCyCvb2FSt
2okcmRIcdLTYLODt7qqxPIZU8bRqzO2LIBYqSQ1RCPukjKr5Kx4YWpVCUr3odNPc0/IIvPyoSOvS
vbQgkbonbkYh/wAmG3dj1KKXohuKaxBhh7E7keSRKhbuiq1SD7cfqK+OT2m+UbtDHlyuGlkvJADW
BuOVt+RtO64++YGXiz7dlwyrPDwuvcsDFo9MP/HRPiydMtT41YOtuJx3IfiG6IH2TdsaJo43qeOD
FDA3jhZE2na591zN62sbbuXwzJEm11evk+3iKbbpxjbje4+tP0HL1j9Lb0La34TbU8PLT34+fpdP
XtwHU8B6cPtbQ8PKjpbW/CdefDtf68v/ALVD1LfQW4OXr30vxfJt18UflWCgDIWIJVitFQ45g6Rn
taNiJcLMcOvyOPFqXIz99nwsGHDjIBoYuOH9TKxo8mHeNsfAyNRe8cvafi+xQ7hltjPAN03eNK2m
TMATZ5MhsjLwtrggxMzd5Yoo4UuAPmG6T/jailF6wtpDYOB8cny2gxcfHWWVYljieZ9GYKqBsl9b
+tkysWijWJHlRKk+SwDMg3WOTHhyYp+GwypXcImMjM2s7jIlxVMz+hlZIx4tvxmkl45v802TIUji
jEUfDKTLPlXkIAA9L/blVGfJNrluyxRxrHHkXkmp2CLiqezTcXLsAAND/kyuPE+9pHWOPMnOXlAd
oOsik1iXs0Ykiji7eBJ0YzPYY6eQnLx0jlyjGjZGTeLHdpXjYrHhrGwghWgFDdNck2jxhbEFc65a
5rG2mIO3H4k/Vn/dNYBRQtbnoR3UqKtP088jN5iFkZBThi0EbipHmZj3CmjYKbmoZZDR252jxopU
kcujIWZQTbQ17NzGXM0MMSCSFVsLUdetbun2wN2zYDcvdxdQTeJrqL06E1KV73dXZEFsWVRG79kg
puax/bItjUE8mPNv2Rg50bnHRYGFFSKvXxncmwd0+VweSTZr/tVX4U+zJ0m/x5G5PHFi7Y4nwZ8H
eIaw8hcvGxyytRHoZW24OVW4PjbruWb8NSNZtl3bIk2LaE2zF+Z5MMu67Ju8215mLlQZcNbthnN2
8iUS7LuuPlYHqH+n247fUWrnrbXl6tqHEOG9X4Trz1OvMVz4ete+ltbaHU8R4zV6uKvV+A6W5170
a2v/AGZf/a9G/r2+tvV63je8bbIpsiTKyGwcuSp+5ToCRShXpkKEGrdq4ik1G/YViRKwc543iv4+
K3o7ttsefj5eJJizaKCT8b+Nz7vPPtz7A8+TvOdjRfF1aaXIxMKLK+QvKcHZnkcAKCQA2/R52Z8i
3T9wzeBK+KYr5+V2lKllSJIonyG0JAH/AGiAAPoJ5lhjxomWt/3Zdp2/+R7jZ8mVzte7zQ5e0xoY
NcmRiY0EaN/yp9cmUqBEsrbvv+37PHN883V5tv8AnGQ0mHlw5kHA7hFIfOyVUKvFLIsUeLGVSP8A
zZHDJII0x0EceKpY+lK4jjxEKwzP44oI/HFrby5dQHySVlEyMAAKNYf/ACdw0Zgq4akx8WS/ZDAn
ji+T5f422QL3Ti+oBNRAPWPdX7gqySyMYe4FpY1WbJXtxZPJWS8YMcTsBtzkDGjVlRVBJrrxGs5i
IVHbDw7g3KhzMSWj4k65XPNblQHIML34J5PGhySzTNKIu4mkXteUhVkcPWbCFWN1Akh8QLdzQ5h7
IpElBiU0qBAKPWrcutMKy4r42Ae7H7eE9NyXvxQbHBcCTToUYiSr1uGPzty7mQoFVp+YHMVMO1g5
K8rZu3be0RBNDkZxZh07iD+8ZOfB8bn821cWULAW0yY/LCDj5GNtcuWkgz5gBnbXNLu6R4UWXm4s
IXM3LFl5HjIBGJtG34UltflYI32tt3fO22TZPlOPuJr5FEMbd1kdDsHyxII4t22yahYj0LVb6O30
HP0rfR29TnRocNvVtoOC3Bzq2l9RbU109G3Hy9c6HQ17aHQ8tQKFW0tr016Vtf68v/tetahQ9C3D
b6iR1jTdflphGTkz5UvxjaXzJ/GnbvGww5aZeHNiSW1Rgw2P41l7xk7z8c3HbMyVjEuDjdiyOSfj
+3tkT+meG1fIdmXMhkRo2AJrYNhn3WafccraYIth3HeXycjbNmxTNuWWJVTIm23aIcJanmix4t0+
Q7jlY5mlUMLHUUg5fCpWjy8iaGTGiibIfRt321MhN2ws/IVVUejer8R5BAcmZiFX5Vvn7rm6Qg+T
408smBpPMsMeLE6rkyMqwxLDHpJIsSEyLW77nDsW3ZWTPlz1FG8rfFN7kwszGniyo+R05Vly+U4s
Jij45QJ58qQpHDGIo+Fz5p527mAAHFbgyf8AJJU58k2sriNMZOyLIk8cUKeOI8hjAySabjkeDF23
H8GHpmXMaKFTS3BP98tfLc0TZuGto7UdIn8bL/ikQD8hwWDysCVyJpIcWfy/hDughWGPtB09xXsT
R4e4UBWdchx92lq9qz2uKjF3UWHFH1lPdnkgm9qXtVQ7uwvoOVZJHbBjhV3DlBh4vlfIw3knzInS
TCiLzSxRGs/FMUkCtLIn+KSVfGcfLxwsc0b0OdWr31apF7o9sY9p60KOnWp174awX5EaS/qVCyRN
3xzdwRZBIhwWXHZ42oBlCy9zRXANTrcRt3JW7oVc9e2wI70XmCKweb7VuuHt2NNvR/N4XQOuK14t
JBJLkIioooZcTb9kYuO2PsW0vinPi8uHtshlwdbehuPx3bNxmyfg2IwX4JPfA+H4WLJat67v3Ohe
seGeeTaIciDb+G39Ut9X7cNq50dPb0LaW1tVqPpWq2p4zrbg9qNW4jw2o6G9W0NDS1Wo6nXpQ4ba
ddPfax9+V/2vSv8A0rMz8XCTfN8yc8hCK2/DbNysLFiw8erVuO1Y+dHuezT4MhFtALH49Fvqx529
ZEsk0iQOJHZtj2WMxRxxxrVqtwDiNHUUsXkr5PixR7ps+zzbrmR7VjYGPDnZqJnTfIcmOCZsZHxt
43M4eDBhx6fJT/wd73eB9nNOtxoEcrtu0S5LbhHHFlfHcqPF3DAeNZpA65FfI9yOFhoXr4tt6JB9
DMTKxKRpv26ZGZCb0AWOHt7TNj4Pjn2V8c4Gif8AKnd1RYF7m1Z/yJcdXc//AOQL/n0Or5axnb9u
3DOmg3T5SkGBHPFh1kyisWEM/HK4jjx4yiR2mn4ZpPGkEYiixQXPpGsYF3rHHe+s58k1S/5Misp+
2ONBHHpn9uXnAWGnKXK44f8AJk52UuLiZErzOq9qnVmYHzeSOEUuI9/BCAABqK5Vzonhvq1jQJSQ
Gpj3TsbyX5jQ8gx5ZzXNYwvkanguaYRK0ZBF+bXoCgNLUVuRyqaJJVhhSNbXoxqR40QSd3f41ZUw
olmycVZl/D7oDgOqNHJiyQS+WKjpyFC1HpasI9mS/WunAQCZk8c2Ex7FbuXnTpeg5jkgHYrC4IJl
ilEqZWN45gzGONiWx2JFSC6wmxBrcYvLiKvNq7ijuAjmsElZvwcbK2jJ2/8AB2SbdMDHhg+ZbXPl
deAXTJtWZkjGhgxTi6Z07RJl4X48bN+55lqNrbFkJLGOK3HbXfPimflZ2L8HzXOP8N2mKsXbsLEH
01vobfQW+sPBbTrr00PGfTNc6tR4DxHWw0PD78FqtodOWnTW9W0v6I6a9aYHQDiPFtn68r/temPW
v9TejevkO157ShnvjbZNnPsuw4+1pwZOLFkR7/ta4M6YrePbNhy5F3HNz8XHGQVpUtWxbM2RItlA
NXq+lqtVqtx2o0bAbv8AIwkuzxZ0WLkYWPPW2bbPt4WFAdLCuXB8vm8eHkJ3ro66bNkxuL88wmXM
wX8WTNk/kZEe4wwY+8/LMnMqCXMzcjavjMhZEVF+gkYqq4ckUWZ+Zlnc1RMJsOa+BgmNlVRWz7P+
ecfHixoqynd2RFjRv881tct2cpHFLp8wzXyt5pIJHWPbB4fj75mJmDSaXxRopldQFXjYeafLdkih
iEUb5EMZQpICCDobzZGSS5AAHpZLERooRMlykUaCOPQkAYoLUSAMUdwpbyZGksgjj2tDJJo7BVxV
ITilcJHir2w/LsspjwANkUTR6VO/bHCjCJFsNeuh1kVlYHiNX5ZH3TJCO2WBQ8b91Cr0KvczEBMw
3esEXyVN+E9epyJ1hV2ZziRhYpHCsedCraX166FgA0hAM1z9jh5FFLkR3BVharU8ausUQjX3vzIN
O/aGyGEoYMPdT2bi3S+vvpuKduXhNZ8Vu6CjUoqA3jjHdXj7km74UGSZ0+7uS5EShAKK3r9MlGxE
6+KQ86aj98Smx2+DuOzFMObO3uLchlyOZi1q+G7rJm4WuSp7VZXSMfm5U0fkihmX8bER5pN3yZY0
2pYxgxQPKd0xcyU4uIm27rwW1NH0rf0Y+nb0rVb6E0eA/SHQ6W9C3D1q1vTOlhqdOWtuAcvROnPh
PTgOpGlqHpWrbP15X/a9C30fL6l0VxuXxiHKbadrj26DiyMDFyTJ8ehmy5Hixod2z33HNiitWzbC
01Kiout6vQNX1tVuIippxO+LtOLjz1arVbjvXzSRQkUh7pUMb0wuD1wJfDk5WWIYXk7a26QR5S/J
osBM3cMnOmxsd8iT47scG3Y/0ObmR4cMubkTzxbvnxNlfJNxhTcNxy9xlEamsfHMpwtkaeWKJIo6
nlEMeLCUWeQqIoxGlZm87ZhU3y/Bd/5iEbD+Y48OBtnyTAz8TcymZuEeKzukaCNEZq2nC/CwqZgq
mVnOPD4k4QL00oWpdwxI1jnhghzvmOCubuHzDdMpY97zozjfJ8gJsHyqfKzqnl8UcKCKLFBc+mv+
XJpryZOuUW7FUKuUx8aqEWaQRx40fZHpuUn24kQjg0yiCqjtXiy+agWHyTKE+5YSf47C2gHOX/JM
guwq+t+ByQrMA1xQPByq1Eiom7ZsvINsWdi9uyUHQUTY5JtHOC8/7Zk1hYUsE/LS2poWVJHMkluU
bzqGMxkjIKDX3o0L0aJ5yZFkYsaRjGkrN3KyVHZAGq+nPgYU0akwkqGYCsjuGaTy4Bpu6ffjtaXb
2JU6SLesWxjB7WUhZs6Go4wpVpIskRhl83dDHIHANZSgOpuK3aMLNe1GkbseZQrwyyitxypDJC5h
wc8/5yb18O3BcPdvfTI3LG29u7b9xx41jx9GYS7hLJHjw4EDSNjwTbfLk4TLDFhT4CNuXm+SSshy
eG1Wq1Wq1W9Q/wBWP9CPDy4r8F9LcR0txniNW0IocFtfbXlRq9HgPpjpodbVbitxbZ/syv8AtcVv
WH9Jvp8q3csYICa2XYb0AAOO9Xruq/HLG0ixwxwpxW4vmcpbM97CaMVb7T1QEszsornQFYkEcsm3
zY67gu74zv8AQO6Rpn5rZmQBerVJGsscmNLGVRr7JhQwYesYOTOzBRECx3f5ft+Adw+Ubhl1LPNk
STvDFj1c0rsoiyO0pMhY7XuUY+N7c7yaZD/kS40Ylfi7rJ8n+QTwZOdn5zRww7rmAbXLi4WPMqNS
hnPw/bFk3OlPnycj7yoAHpSv448aMpETYY4uusdpcmuUmVUt3m0PKgDkTgW1U+TK47eTLzZfBiTs
0hRexDQ6c66LHdnjFhIxRY27gNL6u4ULIrmwIlZonVrgGvc6XqRu0SNkCRnZyvJnN3XS9EVldIh3
Zy3Nf/PhNZLduNWJiM4yVw46hfDNRzgTg3FCr6k2oksZyVUyFwLs2fIVcESQkXpXQFWBoVfgJ5da
cWqQlR3xJBM0V0YkaDg3Ve6BT2tt72lNCnrGNnosSJJxKAoSTv8A8qQlZGiVJUHa1Si4WUob8tyh
EmM8cgJLCia7u9MRf8h7pHzftfA+NPuu3OjI0bFH2fN/P23SbGhy49w2Ofa45nyi22fJ8lYc9MrE
w8bNTfJsfH76KqV3LOzMTPz9whwMOKOXIeCE4zW4jR4Dx2q1W+jt9Tb1bcB9O+p+hOntVuIaWq1W
058B+hHAdfe3Hz1vrzq3qHg9raGudWq1Hi2z9eV/2voOf9O+QfI4sIJ3TSbHsYjHL1L1er1er8Fq
tVuK1Wq1Wq1fNMWz2qOTsadO0ymygXLdkSQxF0KkVHE8j7V8Njmhxtg2vHWWHbNvTboJszL9eSRI
03PcHyiDehRNDlV70GUHYDJMhBBqdzI6qEX5J8nxtvG4/K933GO50gfwUxLHS+gJB+K/J4DApUrW
VMyBIhQAA4sgsI9z20gZ+4rA8u8P48nKnyWxsZp2/b2YQ4Xgr4Zinw5EhRIkEMUALN6cxEktZBJR
QFW2mRJ4ocePxROwRMVSIjyGP950ynZYsVB36SOETFUiLiJsMYXX5VP49shXvyRfgyG7IoVIjRe5
pW8hQWF9D0vRLtXnW+QxWleRqkUvSsFoaDRul1eWTJhCZuMgVepB7hV9ctq20d2UTal/VpfQ0ayh
3YsS98mTkeDGN2NrUpKmHI7grBgDwk9rSkdgBIT/AGZvPJwW7owaZAawwBUxAkvw8rMeeY/bEmVH
aRvJJjTXINxw5kYfGrBkHedG6RntmsABYmbCtPPGSY0jOTIpGPkfbDGtwOji4kX7ozdXUOhiQE46
GsjEAAJUu5jxcMAzbBth3bcYIIseL5ptAxcrofgWddddwxpszEzNrlxWy8eUJg7pk4u2/HcPBOGA
AM7IfunOHg4gGbn5WDgxYcMy3RW7lNDW/Dfht6d+C1W9S3Fb+g2q3Fb0efrW4SeH29W3pX9K1Cjw
dRx2o6247amhxW9HbB9+V/2rcA+jv9fy1trlZUGJDuvyrNy2dST8X2szzWA+ivV6vV6vQPo2q1Ze
Fj5ce/bZ+35duaKqr+ziWpYMSBxmwXy87HihWYBPM4Ow7zuEWXl7lj4kGLh5O5TBQo9fLWNoJp/y
CtA6gV2msXImxnxsmLKx55fFHjQmNPk26/te1vI8j6IoJZu48Skg/EN5GQksqwxhpUSKMRrxt3Vv
CQ50u6thnJpVLHAiCRoOUWKcmbZtpG1QR/5sidu4gBR6RIAxkLnxyUAZMmVo8dIczbsmnjZRJeXJ
rJPdQ5VkuyoihFAvUaRvUuG7vAnZHpllmAAAtVuHKcrEi9ifLsryZuCPtvperXrNJZg69/kPYBrf
lUjELPkSOoISNJGkjjJAwwrE/fIK9rUbVK4s0pLkljG5fDgTuyGX77ac6N6zn7TtMCiIqO3HYyoA
dTRo1YMmOpWfNJKjpWDtpmU40KSSOIpkyBQdTVxVxTOLTZCJSRmUJIqMwVqXHiOUmOIchIbiRo0M
MgNBu4iudW0tRomvGmTLkYjwUUIXmKgfyR86HAw7lWBi8A7ADcUwJDGzSS2pTdZGIXJyHSWSTHyK
nHdUMreKBvsHQrU97xD7alhJlMEoEkMzJ+2CNciczSBGixfg+XiyYVfI8E520MOeybi227ikiyJq
6JIuf8eVxk4bq2ybhl7HkneMSTHjEW3w7jn5O55W34EeHFomYi52lv7EtxWo/S8uE+oa58NhVuO3
pW1tXvRHo29S1ctSKIq2ltLcHLgGlqtVtTp0o89LVbTbf15X/a1t/WM/JfFxczdMvcXyFSIRLJPL
sG3vh4f016vV67qDVer1fi+R7YM3DKFXy2KIWY6AkUqO5i23JkoYOJGYJYYZfj6YuVlWsPoM4BsS
LGnhYdRoOdAUBXIVtDFcmEGeZmCr8zxMnK2zQQzFWaw9D/8Ax/CXzmYZEuMryP6G6b5HDW+7gGwS
bmsBATio7zRfG8oRbNtMuNPkOQqKkMOBlY2Y/pz3K46qisyqMjecTGh3rfcrc5sTPzMU7b8xyvJh
gmOorSTUn+XKHOvkW/vgvifINxx87+bZaw/FtxnzYtEHfkccn35EjqibjktlZKAKut7UXiUTgGSK
7FatqBepXdZbdzZcnbHF9uOAQschSojzFA0WtTSEnIjnIU8+dRfbDjp2gdRqb1uB57enbiSG0eCL
Y3BbToclCrNaaNlZaT7nmy2hxzNOSSWIkcBJ3u8skdHJazTuaMMylJygfIZmiKsoY3LGyrapfxu+
GNQYJxSOrC9cqvRamN6nmVFSSRT5ZScMw5cWVjmCTBksWFm4ZoykqWvjktDR5VIO5pIWCRAdmRH3
o4/yTErFhZDeOe3kw8gmo5Q4vWQPuiI7RS7fkZskW27nIZMLOjrOg8iwRY0S5DSCPZ9wk2zLf5nt
J2zbM+Lc8H5Ft37fuo6/Ds78rZ9BrPjY+Qsu0YMiZ2HuGFK+fum65OBt6Y413TFLSg3HBej9df62
3DbgtVqNH+hWrlw9aNCrUfQtp7nht6PLU+keK2vTXlVqI0GttDoaHoWvVqK1arVyq9XrbD9+UP8A
lf1vIiWaPddtfBzGxhIuybDDhR+qPob1er13V3VemAYfJsKPF3DIjgnKbLlyU22xQ0Jduhp9zltJ
kTSVjY8s8k+UkSbTuU+HPiNLJjWq1W9bMyIzJuuccuRavV6TqotXOlQ1tyvlZCqFDf55J4I54sn/
APx+zT4XwfaMcfMdygOR6PwxFG2+OOZhy9D5LvskcjSo8e7wNIvMVHGXbGxfG3xbGxjm0agPml33
Omzc/wCKIF2700bySghK3/fGY7hlSSmOMFpFBGBjGSZBZZXEccEYjilfsjx4+yPMyVxcXM3SXJfH
KKmGoLfFpWXcbVK4SPGTti48f75PkeX+NtkS9+QBwSzCMYzAlpw88HUammfsWQ3PjAXHx4ZaWJaL
oImYRzCJlIaUUXanmRBDk9ssGXFIZFCyopcgXAFDqL8Ex7powBHlNbHg5Q6+5q1WvXSmjeBklRqy
WjV5P80Vmrq02MiRYzKJciN5WkjeMwAGbJk7sS7GugV2QpIJFTtWpp7jsZgssqqt1qJyxGSVoZS2
OQtHJAqGRsmWXFl7nRo2qGVoZdwRJ8eA2lfrrbRgrBk7JcFrxVyKr+rElZXgawIDBcceTJxldNuU
IJ0HckR7ljYNU4qIi46bdP8Aj5tqtTRRMG+LbO02/fH3wIfKjDD27Nzn2Xbzt23fPoUMVfF93O2Z
6OkicVhQUDhzYfPjY4ZYeA/0e2h+tNH6zn6V/XP0B0tz1Ol+I0dLVbXnXOrUdOdDS3ERVqtQGgFc
9eVWq1cqvyvV+DbP9mV/2fp7etf6fc9vTMg2vYTE9rD+g3q9XrJy8fEh3LcF3TcTt2dPLDsXyCUR
/CN2kB+B7lfJ+FbnBGkCB58wGOoXaOT47ltl7b6+658W34XxzbnyYs7Kjys0cqHUUgoC9dKysjwp
sSwQR5BkRI4/EunyLeF2rBlkeWT0MeEzy7Vh/gbXFEI045HCIXbIyIu5gy3o7dB5Exo1rsAEE0sE
m0ZpzsTJa4kVYsXGxpMvJghTHh9KZ/HHjRdqb1v2Ht6wZ+yvib5mbc+XiymdYsNTWyo77hUn3zU7
B5yee7QGfbuxTLLAe0ReFPh00Kbleso97W45m7I4E8cXy/LD5WEp7BR0mcIl2kKYMKwNjxwyxNZg
dWYKCXemEKxyL4poGdUJEaiciRyXbGhnlRpsgyGRyxNcqgsGjjWUqgFC1DTlXPRyVDyqsgyYQubk
xNAhKi+vPgIFdpFSQRyU2NKhhmaIplQqzf58jLltEmLORGsopscSFsQAr5CrwsmnOsPGjEeQQJTW
I0WRW4QeGa1RSFGzfFMsaTSAyOa50jvG0e4TK+4RwT4tGo2Jiw0753N24LUBYZS2bBa0lC1x+oFo
ZIJlZ7HtKE11pcYo8t8gxkxyxkNVqkAZVYBhauZrb5/yMLX5Jitl7Lj/AB3dp5NtwUwcKvkG0jdc
B8N45ZSqjZvlWZtkfxj5BlZ25fWW+oHCdLfS24zx24vb0LfRnS3Lh6624rVyq1W9G2h0tVvR51ar
cXPS1W9Kx0tVqI4BbS9E1fj2z9eV/wBn+0rVb1973eLasTcd1zNyl2LByNwzYMeLHiq+jAMPkvxf
zUysrVi4csz7Hhfg7ff15YY8/ePlW6ZMz4eS0E0TpKlqWlFXACvZcOCXLyItv2yF8ff8/JzITMYq
+R7ou27bu265W5zWPo7HgnIz8VCx9B1Vkz9hihhH6eWgq1ye1Rsue8G4QASNvcrrifGsBo29OZg0
28/NwmT5jIiMzVnuDWLOYXw8HOnxsDBhwYKxz3ksFGMyEZ244GCufv0u7Y2Phosytc18Y26FYKh/
yZPHN9zOyom55L5WTFGEU6e+U/cYsYKkuWzmR2clBKI5iteZTRkNTy9qwSsYu9i325EfcqCEDJky
ogkoHLEldAqqGjVZcnJZRGkSeN5RUUDUbLS89PbS9TG0eFs+PuK5nxbMxIkS8w9A9caOM1IFLlCK
YGwjU0oYUUYFmLVckhL0VtXbXcRUkCsLFWRqPNrVjEibOYut6ZGAVD48YBIWI7lwx4uhqPljxQNL
GmP2nJn7UwofFH1I6cEsnjjmx/KkXdDk9QORYEOyB5zjgLjydyhipkABlftpQBJlxsFge40nQkow
I7wtfG5+6PW1WHB87hEWYFJOB8X3XNX458bO1ycNvTP9ft6J9G30Z9A6c9ban0ump4LVbjHFb0jo
dbVarVavbjNW0tVranUnQ9dL8B02wfflf9njt9NbQDS39gTzxY0W/wC7NueaASfiOySRx8BNh8k+
UFq/Fd6gwFU/Htl7avV6vV/V3WfLg3DNlmllIrCz5MZseeLIVb3XlWblCKOTcXmaHcJa211kO2yD
P3OjXzDcpMzc40JrxkU0YNMpHFEoA+PZmZjZcLmSL0Mt2I3/AHCLInvfRQDVubNanck7ahfKZo4Y
t13CfNzcOAY2L6RIA+Y7jLjbYWJL5kklTZ4SNmLUhAfZ5sXMx6yWKxooRMolhuGYuBBuu4vmZUu7
F6gkKDGwXz8hvimURh4wxcaZykWOhSLjQ983yLL/ABtsiAkyhRo1ISqQjyZOdOZJACaixBEmRMqy
jMS0mQFjbJd6Xxpt8ZPb0oY6xY/cS2OxWTM+54lBPjciYT9kLtDLLjxzkYcYEcccdREGSZFYxggC
r17UOdZB/wAex4kbYWfLGcHFAacV76nUGxZ2KhRV7Ve1XvoTarCo7Ad1GiCFDSLXcpp0DhF+1oAx
OPUUfjqYd6kEFMRrBYlEis6SQyRiCYPG6mGVJELzTgrFK8deeWSocUI171yFDhlXvixHD42Weybk
RUgpgO9JHQ40vMG9P+lzcicKVc9naAim6tyMnMRntE3XYsjxZ3oZm34ecsXx3Z4ZFAUfS2oDhA1P
p29XuF/qL+tf07eifpDbgFW4rcFtT6J4rcZ9G3BaiKI4b0dSavryq9E17aW4ds/2ZX/Z+iH9n7hg
RbhBuPw+eCtu22SXPhjEUXB8n+QMTHFSRgDYNlbIcAAaXq9Xq9Xq9X47V8iy1xJGftqSMpVr1DkS
wNtm5flBpEVdylkknxo+xVaOLasZcvKl23b49vxqysiHFg3CWHc8nwvCxNyRRW9PHwKpYuwJ+HbV
HmlFCLx5mXDhY+4/JpcqeJuauKBvS9DyVDGAT3UjmN963wSw/H8dsvc/TyrsnzvPx58zgjUu/wAV
ysVNuyMmDGiw98/P3Smyo4hvfyCbOeo173MzpLseaEzxz0yLPJxu3akC9sfyzLEuZgp9hq1Gsk/4
cME0pLNhWEmRlPPJHC0gUjySRo9COS6kvEVaMqQTPOfxgRUKFRkRuViiVK8go5ABMUcqmFxQDggy
GrFFA1GlwKFZRsuyu34c+L56xkKZep4DVqYEV3kUZhXkWu80CWqwqwodA1qADDyG/uVBrsoxsKDS
Cu6Q0SblTaOPmwBN1FfY1MpFdihirAeGG/ggoRRApTo6gdNDqKJtWEbJnqPBjt3QGmHI9XWz47Ne
KXuDfciECSYL5O8oIGs45VIbASFqt/kdSY8dyhx5Vng/oo4bfQ24bD6y1W/pVvVFW4Twn6a2tqtp
ave2ltba3FE8F6vV9PfXrwnpoKtz6abZ+vJ/7XrW/sc8Fqt6BANLBCj8HyLdRt+EiszIlq2TZXy2
RFReO9Xq/ofL9vapbl0kK08QOmxi02fPjqFCyMhsI+/IOz7f+JDp8wbuj3XJSbKaVO2bEKi5FcjR
WpI+VAElvsVB93wvD7Mb0PluRkZaeQRKmWBUc4Yo167rAi4cXE+HNjC9ISX+NbccTC9Inlvm+537
vIXL8G14rZOVn7WJ49x3DLzpvjDYkDTS/wCD5JE6fHm5D2issYDGtvbxZO05n5mHUP3y8eQbiWRY
Ic/IbImiURx6zKXiw5CtfjAViMO3wSJHE3jx4sSSRkREC9tpDCKkhWRWilifxsQmKqkRljJ2gd60
rjtyELJAT2SOyuh5BwKkYE0BVtDVquL5DFji/ZjxuwBXtzwx0vpajVuAgEFEogCuyrCrVysVvTIR
Sq1gK97VbS1WFEXrtF27u4i1GJWH46A9iqidpLqY2EiGgAQbXDDyq5cjjIBqBuzNyR3wbe/dj0bV
KtjL021lBbx+SN1Bl5O/6utJcEUwvToQyuSBzAuknxzI8uDw2+utVvVt9Pb6K3EforcR+gtVqtVt
DVuGwq2ltTpb0T61tLae2tqtVqtVqOp1NvROluO1WoCrcG2f7Mr/ALP1g4h/SOXoD6XInjx4dzz5
NzzUS1bL8faalRUX0r1er1er6X0eNJF374zFlJNBJDIrMhMIlOBHHHDPjzZuS00EMeNh5OW+17LB
gLr8yadcdD5ZZG7mjneIkQ5QkhkiPdyk+2OiFBCXr4/sEm7ZMMMcEXoZqrDkZ/4xyCKBZTj5YsDe
IGtvZI8kNE+QwQttOKJdyAsPS32SWPbfluFFiYjoGDKVOu05UWFueX8pycvF27Azs8Y+2NjQ2WTI
+bZwx9qqxJnAUBVtioWmwcSLExp37I4U7I+MffkfKcswbfHAkeMBbQijSmxysKSN5cnIdYJCldjO
sai96Wppu1EJak7LEx2JUAuBQksFRysiFZbU9mVBYyRHyAa2rnR6exdQJMhEBmLFoMqQls81+8y4
sjZTPlHMY0MxbDKjoZUdeZC3miNCWOu4Vc91W5Uet2I4bVbgtR0OnfZ2l8lf/Ii47Dftpy9zGLlG
FXegSSCbDgBo6WrcQVcZU9ttJAjVGaURhpFuLG/hVURmjZJUamdFqEiVnjdWkvfEJKVMORP3R9wS
RSa+N5Piz/oAQR9bb0rf0G1W9C2t/Vtxe/D19G3CaNdaNe2tuA6H6m2lqtVqtVqtVqtqTqSKOp9M
1fW2p6Vfg2z/AGZX/Zt9Fb+vW1HF0o+oSAPk29fmyxJy2HYbfRXq9Xq+m+/HodwTJxJsWXEiSLGX
dYvGs2VkLh4kmRNgYMODBrLIkUe/71+7Og7YjoCRUeTcSYvfUxe8cbO3heZihib4luDZDej8txzN
ToGDKyki9WN8CXvitdJJxHHmZ0+fFBGLfEsZp83054Uni3dsn8lhzZQadCpqIfdi40k8vnxoG2/5
BtMOFsjfkxYq2T5rmPPu5N6x0BaUSLJ3JWNYzbRLNNhTffNxk2GMPs+STflbrkt9wo0dALmSYq11
NWWjY1cVK5UhyTEVmEUXjq2tqUXLRyRKFub1arUde4UXWu5iVhyHo7dMaTa0Ji2lBUO2khNuFlwM
IV8oxkgzEUk+JgrACh1Cg01xQZqAc13yLXma4nehkuKGUaOSaGXQylr8paaeM154jQmioMtXN7Gi
NAwPBYUyXCoRpbS1NHXbodBwWq1DQVuS3grAb7xpbm6ENlCVV8UjhY1SpXdjAjRJ5O4ydpqENG9q
f9LCx8pNCxEchx5Y3WSP6nnx2+ntVqtVvoj6dqtqRqRVtLVarehara29K2nv9Cdbehf6A8FqFWq1
W0tparVbQ1y09zR0NcjwE11q9DT31NqtoRqePnpzrbP15I/5NqI/sC39E3Hc8XbYN0+S52RSIAPj
uyiSh9Ler1evkmzybhGVXGwWyzIHnESbH8ji2+fbd0/cG0JCj5Z8m/NcPYtlK9XuO2rU3KopWjdN
3220TSZs7yxYkTSPK/w/apNv270fmRkXElvZXBDR2rrXey1DkTmpAcdIpJo2lYxRbDhDB2v1PmG2
lJ5AslMpBdQRbnhJEtZWcmOuPitPUeWBB8e3x9ozc181RlTyZE96sY4HkfsXykY0ck8+DGz7fjjv
fjy2KxMyww47nJzGbvfU0JBG3cZZJFVgoexvQFHHSdXxliSNe2umpoiulGQkAUb1cVcVZmK4uU5X
bpSI9q7gu0lQu3hKXCQMuNjwtkZODDSESNk5sMFR7gko3LLONDvCknbIEcsARuEQjmw8cTyph4wT
c8ZYJYwzNDty9mbiNjgGsbAnmXKx5ICTUMDyVLA8dEikF6ZLUWalZjRDACUillNCWQUcmQUMpxQz
Hv8AlCwyloZEVxkRV54a8sRpWB0tR6DnQ0tyB1I4suzQVhyFaiszJYtIrCpiSY/uhZXx6ZpPBJeS
Ey3gjktRADJKsyRv3iQfa55tK6tE1dR8fn8u3f0S/wBDaraWq39CvwHXn6FuC31dtTVq61bS2lqN
GjpauWp0PHautWq1Wq2ltLVarVbiOhq9Xq9+M6W0FHhAo1euetuA8NjW2fryf+xrarVbj9/6Db+r
bzvmJtMO5bplbnkRyhaOTIW+N5a5O3/TyyxxR7t8vky5T2gNkQKuTkmZoopZD8Q2zMwsW+nzD5JL
JNphYs2Zk/xjb/23dfjuXt5PKgOQxsqSIbblAruCYUckss7/AAnYvLJ6WTjx5MO5fDZUjnheF1Zl
rxiWkwcs0IPFIe4Mrsr7BtrZ+5gAerk48eTDvPxJ8WJyCJVZKRHdjkLEkGMoUxZ+4vs0mKN53RML
F3F/lEcnxpibQjved275nuUjZhsK33bOzYdq3XFhK4/ETYZGft2Sd83HN/b8aVUxB0q1GjTQCeKL
AkSlhRKci4BvagSCzM1AVbQtYkiriiQatIaTGynCbe5CbbGRDtDFI9vtJlbdj47xY0CVJumHEFn/
ADDnZ8cCRbnMK3fJaOFGUDbclgfyGkmaQBd/kYQ7jkeSLAlVYPzhWfKHbbHUFsyMHdJA6bagbIBF
s8qcfHUNNEQqZ4V4ACXw0WKOdVkR0s+2Y0SJmYsMiSAq2BieUTYClJO5WxYZJnO2SdsxKulyVxpC
sgK0rXIjJVz20GNBCR3FaExNeRzRnYUuQwr8qWhlNX5RBGQLjIU1546EsdNItB1IuKGkoBDCzYpu
IHukTKB3fbIKBNXBDHtYytI346tUyeOoyxx8dGvDLcs4Ib9UXjkSFitXr43P25H9VvV9ba29G1W9
O3rHQ/VW+rOh9O1W9C3FarVaraE13UWNX05cPvejwDjvV9baAa9a6USatVtNs/Xk/wDY4rVarVar
VarfR29W3Bb68fSGvmEU37lYgm4oEmvim5ti5YNx9N81GQdtAcNsfxDIzq+V4sUOZhbPnZs21bRi
7bi6ZQlbHzseeDIoc6+GbCcSKnjRx8p2COGM4eSXztxbCimysjIIUmvjHxVtwMcaRp6Z518l2vAl
wniZDtcKvkjOznVY2WnYlkjZj8e2kbZhesQCPlm2eLOjEcOPlZzZFbNsGVnSYXw3EEk/hwsITyCf
OiW+XCUkZiDjAIjkWIc0ikDaIsmLM3Pcf3Cbbt0hMu05DyLw71kywYX+TaN4+S7gmSdvUmPS1G1G
gSteVrWLULlraWNEGgeRYUFdqXDyGKbe1Jta2g2wmm27tkXDs0IxoqbPxYVxM5cl9xl/BoZU4qfc
gNuRr1tszpk5UzHNaW67pJIMCPItW1wM0f40skkCFZfkWfE6ys0i45YNDt3cM/b/ABiGRkbHw5J6
z9smQY8zQyNLM5yGyDDE5jmkzJby5jmJWKOmb2xjPBV3BlhyVji/Mjep+c2E6pCZgRl2821gLF5B
2boo8+EgacuBW5KCm3RCSb7O3dIEQ4sHnkXAxvHnwNA+PG8rrtrFMmN4XjPcVwpysyGNlIJWFir3
FCRqUFqZmSjM1LIxruNGY28khDqxbGNmxG+1TSFewi9JGCzRstdhcyX75ZUtKgnSLuRMaTskaIY+
ZMoR3YEsJInmQWjYsmDOcfL5f0q2lvUvV9bVb6K30J+jv9TbQ6W9G3Mj0yNLVYVarVarVara20NE
0TRNE0SaNW4TwGjV6FW9C2ltTqOp68G2f7Mn/scd+C2tqt9BbS31lvrbeluW0Y24RZ/xDNgqWJ0b
pUM7wyfG/keXPk/TZGPFkxYPxrasGSjDExVFXgtXyT4/FuMLpY/EtlG45w5Vc0DW+fK8DbTl77uG
YzEsyivjHxZ88xxpGnq/J98jyc7Z9rj3GVPj24YL4eyyJhN5O1VufiW0GWT6AqDW+7LLJJs/x/Ky
mxsWDEir5nvqYOFW3OZ9syyRHKLPKjiODa82eht2BinHyMIo205u54Mu17nhzywtLL8WMYxuHPxV
y8XcjkPnZkhSOCPxQ6kanlSsAgAo2pUkavxshiuA949paQR7K4CYFhJjYEFPFH2zbgsFYe5RWz8l
4YhNLfEyW/FjmLtjzMuRueR/zjKjCeNl2ZMiUVt6SO02GkkwhSKTIEGUXxQAM7JxBH2hMgr2bgfD
Kjd8MXb5IpFVsxldVij8uLaNcpklWTHTv27FCIYr1nYPZPh4kk0o2vGKZ+D4JYI2dotug7Ny28QL
E7Wh22WSPM2+XHKSm0YdmnjmDYeU8a/lTXypHdsSQRytkKXyp+6HAkCN+YtZ86vFtTAP+QoG5spj
2oASBx27r2ldsjDTA8t1QNHt8KyZCdnj3WFRUalpMbEgWPccRBF74e3rJHm4TQxlzXcQMHYZExMr
ZEDv5YHSa4ikB0FrykUjFgw5g3WK1pYyakusk0rkuzSKD2yyAZMOLKpgxTce21z/AJGD6duG309v
p7UatwH0bepf0LVbgt6B9a/Hb1APTt6VqtparVbQjS1W1JruomieM0dL630Iq1dKvR4zodT6O2fr
yT/yb1fS+l6Gl/Rtx20tw2/oVqt61vpWUEb58WTLOVs+fjNHg5Dt8V+OzY8n1zyJGvy35REsCk32
DbYtu2zT5j8hnwgzsxJJoC9fGPjzbnOipGl6vV6v6Xyv5GmAs0STJ8LJlwrU63XIilSfZdom3LJh
hjgi+ito1+3fotwj3KsHaP8A/jmh/Kj/AANqiH7lHCuRuGbPTC47nUQZuTAuw5O6R1nzY2VmfHMD
8GHi+UYePFkG0uYKtyJtqBzGPO4j2+d6G2Go9ti7k24LUeALNDhQQDNxGkzstMPG/cc26ZS/t6OZ
DtuXNFkSZBly5Xre57lMhGGMrDbFGUF2yIzNlwDKyUxo4ZJ8qPOqSBAPzsrFAcETH7cCZRTTE1n5
IV8LcEyEyskKk3lypjJFHE8gqDdYiuRnBwSwGFuMTxZWfHHEryu215AeAym2dPefbZu+hP2LuEg7
cORTkJKFGcyvDipCZoCoO5LHJGuMpbDxo1TPxoyhxXRttwl8U23pLDkYzRTAC4wcmQKjxPjxtkSH
ZFlV4ZcOYP3F8HLdMeR4JGyJDU7yzLhzCGU5jgZuSXi26QJOM0qM3IV4cdwuQuUiLkzxvjpbywSK
FcpJGeuEgky8XJgykKQyPNtyyVPtZjZnkiYSuhVgxYXWEEEpZVivSoUAcMJccMJ4iqxrLIiWMsUj
QyobTYp+8V8an+36G311qtVtLVbjvV6vrarehbS1W4LcN/StVtLaW4LVbiPqHit9AbUatVqtpz1N
CrVaiKtXbVqtVhXL0iRRYVeiavV9Oup1PP0ba2q3CTryqx4bVzrlodNs/Xk/9nS9Cr1ehx39K2lt
bf0i39DaKN6WGJD9czqi718gE+TLjBk+PbU+XugAGvzyAjMtQFfHPjMu6Nj48OLDrer1er1er1er
1er1evkXxRNybO2nc9tX4Ti5GPtNb/vWPtuNBD5RtOAmBh2+lIr5+uCMHFgbIyMXFXGw8rZ5cGXJ
jaNwyrVxeKOWdhs0pIfasQRbtmZMm4Zr5GRgb3m4R2r5li5JVlZeD5dOGzNtxpMmVMDJQLgSmht0
lDbh2x7dH3DAVYMXDWfIdVWRs5FOJKJzm7gcmXFzZYpN0yO7KLXGbkSNCJh2qX/Z4su1bF2NkZkL
vm7fHJJlbgn5MsuKgAyocmGSMVkhkljZVWexXCyFjLTBq3GdEO3biXiyssGOOfJXIfdXK5MomqOY
xgHIkEkM/djQlsnKjRlWBu78GMDJgVIirk/jSgLHKaTIeJxn5cyXNRZDQSjdUKyzNO5Z4ZYt0iYZ
OW09GR4ngz8eVc3MVkMhSsXISSLImss0zW22YPimYhcyYGXbgcmSOIdu440YTaPFSSqg3dVlO248
flSFDHumOpOFiGSeLGHbumCsMkMUssq7fCFzsAY5EhJXa3mjysCTGZZR2pizZMU+DNj0sxdfK6RF
WFbJGLw5UmPP+fmKxyYpozBDNHJtSypEzYyZ0mLDOk4IRkOgJUre9qLXp1AWBlRJY0kLIKQowxCG
e1bRP4Nw/olqt6NuG/rkVb0rcN/o7egfprepb0rVahp71arVarcFtLV0q9XosKLVejR4DwH1j654
udbZ+vK/7Nqtx30Jq9A1f6C1W/olvTt/Tba29LeMSXMwc6FlAnk7/ieBLDi6/NN7w8qTtvXxv4s+
4GKOOGP0b1er1er1fSSKKUCyjct0xtsxc3KXd54NyeBdtnbIwatVtLVarVb1/mm15/7jjyNFNte5
4+4Y+ZirlY+8YrPnQ7EpVhsmKW3TJkWQu5I5BfxMSwNdq0CQfhW7yTcPyHbZTNgy5In+OZC7hGuJ
2Rrjjy7lkRwSY+45GPPm7ijxqxYCcx4yS8sBnaeLKjWsRvNlbnJKc/Hled9zgTIAwnjlnhjVpYEA
gyhFCVQ1knsEUo8Ujr2wZkUUhn+3cMoIcHckniyMtezumM53J+3IdJSmQ6gmZwYXukINGBQBEAQt
lmAUYCfa63PaO4O1p2DIYhSyALI118INQqFRlAqSBe7HiAHgWsmJxJjQux/HsMnujMYJdceQjIja
FYchomZsmQOAFxsmXEd9zndSQRt8zY2Qs7AbnkEx4Od+HMu5YzjLyWyH2zJWLIWS43GdWbbMi2Qs
zLW4TWXb575fcLZcg/HxmjfLgEd91jiMTRQd+MqBMpI5EmgCVtuKiRZOCkscTnEOPkM88kgaoR3D
GymxHxNxxvKsCSjJ2jyGfapYHEzRsspIR1caz3NQj7x4vH9jCbD7qgg8KUCQcWcZGN6l6NH66309
qtVqtxX+ivV+M1biP9CtVqtVqtVqtVqtVqtXbXbXbVqtxXrnqaJo8duM8B9I9dOuvLWxo9NLV14L
VbTbP9mT/wBn0jrer1fS/oD07fVW/p1vobek6B0y/jmRjHF2SLMzVAVammigi3X5nk5L5GO8J+L/
ABSDIxVVUX6C+u87TBuuIfhO8CXE+Jbs+bEixRhqDVerjitVtbei6K6738IebJ+MbPNtODXyzb5g
e3IasTFkev2Pdsh4vh26yiP4Xnpk5vw3OyJcnAyMaYihY18ekGLuN7jRjYb899q2ixzopjibkvc7
NOxl3RmXKZ1IzZ1Tb4stGEhUYAlyY2x7YuG22i2CEx8YIoCyfj5CkWygpXCnFnmFp85YZ4cuOdc/
LUJg7lkFpcsFZAzzDOIWaQPSSygR488qy4rI+Hiq8mRjJGqxCgnIx81Qili76bHCjvFbhIBAsk2O
q7iADuMJAzIjSTQNTDGavIoNxJX4t6CdlMy9pikqFSK6VJZngADE1Ovc0ERMiY7g7kjLHDEnm/EA
O4YyjEEAvHijx5WLGYQklY0Mohn2+QKZGvj4uTJG2JlqJJI3MEkpQm1OyB4tymCySmVj3xSQ7spX
JymyK75IZsTcceVN2zhIkpdaw86GWObKWNcpqwp0eIzlhucxbNx0XvhiQo8axhy7Ce3bBuuQmPgZ
SZatAmQMjai1T7XLCxyHiKTMaV1ar1It1j7leZmMgS9SqFWB7hTcCvjs/fifSW+vtVvWvw2q1Wo/
S8tL8FqtrbU624betbhtxDpVqtVqtVqtXbXZXbXZXbVqtpcUTRNE1fgJomr0TRNE8J4DrbU8Htp1
9O1WPAfT23/Zk/8AZq/p20H09tLVb+4LUFANO6Rp8h3Vt6EWPLHkLCmVLhQ+DE+pFXq9d1Bqvw2q
2lqtVtLehJFHKq7RtqMmPBHVuD5HtYkXL29pFxcQvRVUrZswZmBo3JfkRA2bZR/zZSkh+P7nFi4J
yEmbczJmZMCTmeSLzmbFRWiRHGTArLi5EktO6srzxR5RlBGZkmM4e6w5Ay8tO1XyRO2e5XKEcjRT
oisZnW8t1idxJjc4IQrHtKeNQYZAInieRo0kjfxyS02NYtB2Az4iCTccRaO4ykHKzpUwMGJlSHEK
zwx5Jn2zGMWTsUsRkxp4iYJFAhlrxZQrvykoZWQtHMkpc1lK7m4ptxDAZikrlYxrz44oZS3Ax3pU
hjZc2CshTlTfislCRCNxJdTiSoMZvJjzwWiEdqwCGgyWCwGKw2/Ig/EbKx7tCpbaMcNjHDFsnHQ5
GBgrJDJhLU+Myy4OKrldr25lzoTDkYsLyuMadpc6CbFbEjkmnXZst63LDyMKtvlnE2TNnQtmEyTQ
yqseLkJJG8qsWEdniLlR4qjnkhlg3PKxJpMvB79xh82PI8ZL4DKVyWQrLYKyvSBVJN2lLBsZy5kw
yKFhpsM/iz/6rb1b1fjP9Atofq+nCNLeheu4V3ii1FqLUSaJ4zRNX4DwW4bVajw21PpW47VbQjg5
6W15Vtv68n/s+nbX29W9Xq/1luG39mTxJNFvGyZOz5AijycbYcaM5AAA+uvXdXdXdV+O3BarVarc
NqtVqtW7yLFtsO5RNHFbuZbj4rl+KfTID+H5MbbNs6hZsTHR6eFbQzuhKoVyLK4fsGRIoSGdSmRM
QkW6GCZs6Ernu8z4+U0EUsqyCOQx0plcNG5aGHnLADSRqCLhWQXhS4aPuK48ppMWZguBemxsSEPP
gxiXcsdR+45ZMuRmPTHHIEmIKSSdiEdKzJY8aXFKq/lWr8p5lUYUpl3LewfynOM5/Gw3Jw4L/hvX
4uWK8OeAVzBX+aj213Y1f8Su2A141pY3FCPKNGPMFCXLSvzMgD82QmPcnShu8lhuriv3aRhHPi0M
qFKbJWUKmO1B44w06sFgkatlkjgqfLxRCUcn486FnijrJdJcjZximZ0W+QoaXFQd4QA53czYn+LI
hzsbu3/KhysXGLwT5csk+Vn48alpWKYYkjaDNhSQSJUDIqMAzyY6dsyuWTnBhZzxVm2aRZJI2Ekc
6PgOlDK7DHKbKwclAw5ChK1veopWhlRw6evb0TpbitVtLcdqtVuG3Fer/RW+kHFfg5a2oiraWq3C
dbfSA1fh5Ver1er8Z0tQFWq4om+l6vXX1b8HLhtR4LegeM1yq3EbVtv68r/s6X+ovqPpLf2lbgPD
NBFPHuOwTYh2qH8jO/oN6vV67q7q7qvV/RtVvQ+Q5mOuIYriJPGCLjGmbGyYZFliqX/VumOmRjPi
KFxQigy8snNGPNDmxTpuOSSuJueQqz5pkEE3gc5cklMZO5FldngJpIVB8QsI7VHGCGS5jXsJTvob
fOQuJNY4sUau+2xB9zw0Em7TtTbhn2bJJozY5KvIxEWe9Lt+Q5Xao6XCxkpY4RQCigpNfIjZ0yyr
LlkE5VkkkLVs9zn5/PM8aGvDGaOPFX40VfiJX4teCUV48kV/yRRM4q9Xgpo8E1+PgGvw8M0MNKGJ
OKEG4Cmj3GiMsUGkFFoRRfBuP2814sI1+NEa/DkNDAzLHC3FR2bglGTLrz5CkZ0i1+4yXG4yCl3K
1HcEJG5G65ePc5WNf84UZIpmCwAnKQGSRZjHDFUrdomaSR4UJEkdjMjSUrmOHC3gSiHMV3aQkIoE
c6qRHG8L5BVjPhBFDFCkjqTNDOGwHjIzPGyTKQjq4Gg56bJP5tv9C+l6v9db+j2+stVqtodbaW0t
61qtVqtVqtVq7atXKr1fgPoHQA3tRNFjV9LcF+C2h1NuG2lq99SKI9A8VuO3CdNt/Xlf9q+t6HqX
9K2nP6+39pEA1HBDEf6Peu6u6u6u6u6u6u6rir1f0c/Z8LcKX4jgApsu2xw7ptcu3yMvL4xm+bGr
I/05p+yaYrUoAc5eU2dmP5RHLMKYSECE38KgCBaKgUEHaq2IjJIx3uuPOQu3sa/AWMF9uiptz26N
RvQtJuucRNlswM0DUskhoJnvQ2/Lel2mOl2/GSlgx1o2BBerGgooAV0oik/VvthkvGjmcgUJeReF
xsovn5QP5Pp2FdorxpRhjNHHiNHFiNfiqK/HYV48haP5YrvyqMk1Eqa/4xox4BoYu2MRt22Gv25K
G3Zqn8PeFJj39A827E9+RXmx6M20kf8A0zE4+0NRwtvu2BFcbXMabac0Ftvz1LwZcZIyFrvnFQNm
Su22bisbGXu8aSCSJlDxMSjeIyGXzrmNEsGWkkaSK0rSC343lMjS9mMPtnxu+RoJIqwpU8cu3eaK
XbJsd0zDG6TXCuGoGhXxubtl4r6Wq3pHgtwW+lv/AEK3FbW3pnTlpf6A8NuC3Fer6Xq/o3q9Xq1A
Gu2rV00Jo+lbg5angPB76HQ8Rq9X9PlqdDoaNbZ+vK/7XBf0r6D1Lepb076k/wD4GvV6vV67q7qv
V6vpfiycaLKhztsyMPI25WwJZPkOAk+fOkMWavPKV7zAyPLEYmRQ9eMChHXgehC5ZcJ2B29yVwlR
Cm3rTZu3RD93WjuOaKfPlamyYWPkc0Ez3pduy5KXZkum24qUsEC12qKu1EGu0Vypj2qhuK96vQFE
CoxeTfGU5vllUeISUyABi1vjq/8ALyb/AJQ9f2PBY1bTnVqspBjQ14YzX48Vfix1+P20FnUXywVy
M9aGdngjOyrHOW5ydvapG2hy0O0Mfw9qNft+IaG3yCmxcxTOmQhgg88+14sEOMV5vjROMvaY3p8h
IpTjqRJBYmHsp3btw41iMG5vjzDKSShKAsYDvLFGAIWBm7WWTEER27Oh8mTEksWXsvcsm2SwPHnM
jIxZe7ntuR+PncJ0v9HbS3DarVarfR24LUfr7Vb1rVbhtVuAnhtVqtVtLVarcVqtpbS1W9K1WrpV
6Jomr1eieA+sedHgtwW1Oo0OltDVuG3HajVtDoa2z9eV/wBr6W/oX4BVtLa3q/1d/wCn2/tG9Xq9
Xq9Xq/B8otFiLuOMyS5DZO4Q5eTkHOcB55oDIJcZBlWnqHHUUYYVHnwI6bcMMAbmwptyzalzZXY5
MbV5HNKuc1Lt+Y9JtCmk23FWlgx1rtFAsK7nNWFBaNqI0NDTIssacl7hXeKB5CrGxHKL/bvQ789e
VREkr3XdytfG+cmQb5PtrarUeC9M6qwOpr7mLeZKVu4ChR5cVq5VysatfQm1d3NTyteiKKiuxDRh
jNeCKmxozRxkowWrGHbnYKj8dhau0V21vkMLGFZoqh3GJqfFEgkxyC8TgqCHyJj24+45Jx8LOgmU
TIz3jI/GWWpYpEpVAGLl5ESx7m0kcUEWady2nwxfhSwmPcoyzQkpgT/kYfCatQ/sG3q247VarcF+
Iac6tVuK/p21v6d6vper6Wq1WrtqwHBer1ejwHiPq24RxH0LcFq5amh6HPTbf1ZX/a9IcY0v69/R
vQOtv6Rf6+39n3q9Xq9Xq9Xq9fJpDLjmIu2HhZDS7fBmCbcVUMrQyDfNw3HHlXKzJGaTIoSo1ATG
lx80gYGQwG1w0m34wpYIVrxra1c6sCbAVcUNfar0aMlBrm9RKl5mDuGFA3qUKKQmvZTyJ5Qc5t5Z
huKTRuQopkXtlCdvx9QFk5zDUVeibVaytJJcSGlkBo8wlhIa52BvRN6hkVWjL+XxCMCgaJ4bcyTX
fQIIq9E8ib0xBpeQDG1+YNEasOVqYfbFyzMG/g7asLgVuQ/zvi48lTYuQtRs8bRbnamgjkqbHZaa
Fbf5FEMcYk/OfFnXdICkWQexXDyzdhpccqzN98GQ+HOVGZGwkxmGajJt2JmzPs2JPh4HBb1rf0K3
BbW30VvqhVtb1fQmiavx2+gvV65620tQFAacqvV6vRNXq+hNGvfX29W1Wq1W4Dw+3o2q1Wq3AdDV
uI620239eV/2tB/Tb1f1b/8A4PvW47RBnmH4zhLUMEMKZ1hE0P5GSqKi/KuzwwwkRLGe5IxZUNdp
t2irAVyq3K9EaAVOksSq/dQq9A100JsEe0uRDEUU0o7jLKCQi0VjuvJpyAIzeh0B5GsWxn3c33AG
xRuxGCHFYi2xC0B/2cBruAIdXoKCewGjGK7HFBhVqPctcwVJqwNKsjvIxd71erirae967iKZuZaN
wjPE6m45GnarlqJtXlrzCvOopWDAHlparUx5Rn/kbcbwWq1WrPH/ACOxSGhIqaCKQvhToMWcY2Vk
bjh2AxchWx3Sni5tCVMziSPDkyo4sHdFdxOrsswFCNZGnhIGDuMbQ7qYTuG0bBJmzYeDBiJx29S3
9Ev9TarVb071er8BFEUBVqtoeE8VtLcVqt6lqtVqtVqtXbXKr1er0TV6Jomr110PDf6i3pW0twng
9qtqeAjg239eV/2uG2l69+C9X+lvV6vV6v61qt9Hb/8AA+XzGLzyrV8tktCq2hiWwAocqNDnRrpX
sJFZiauKTkZmMiRqQ4oi1d1X5tYU97BCVW5VBcu4QKKuaJNlYBpXLvGLAGko1i8592YDOU/dHArg
oYZXIjOxX/C6twsl6EZB7wgbNjFfnR0uZGSHSSnk8JSRGoxg0IyK7bU8oURjtQkmpJo46/MSllV1
STuBaiRTBjUf+SMQpUeP5Ht20zAUQW0IsAzBgI2pogCqdgvwt0XlNtnODXcB/mo0QDRgF5YkcSYD
imTxvDuM0QR8HLEuG6hoQSVkUxoqyvmSwzruKBYpfsjYM8gS74kYrE3rBVhYj07+mf6ifTtVqtw2
q3o2oircF+K1W9S9X0vw2q1Wq1Wq1Wq1W0vRNE1fS9Xq/CdbVb6G3pe/AeLnwnUira30NDhOm2/r
yv8As6DgPpD6G/0t9R9YeC3975CsTg83NfK27iRaNByAoCraM1q7qe7oIvE3YzKjXKtzkBAXuARg
aKtRDigSau9Hot7Oe1F+wNfufIijBzmuubISJ4ZKIZWVr6A0TWELz7kb7hzBxZFvnR3DrjiXCSNc
FeHrR5CXKFzHKR4xQgFeFHMCN5J4O+EB1qBpWHlAp5iA7sxglYqFJEjr3LKaVBeUtG0chIsDXYO5
QbW5rIUUsSSRYBmqyrXctMqkKnab91PIgjBvQ4Go/q2wARUBpuP+4UTqRcGFTUkd1kwFNSwtG0Of
mQKmXgZYfDapoebQlamkZ0xZ54YsDdUlMcqu7nuXsBXaHZ9v4b+lf+r39EcNqIq2vP0LUBraitWq
1Wq1Wq1Wq3Mijpb6C1Wq1Wrtq2pIq9Xq5rnqTRNXoaHXnXvxH1jw29C1H1iNTxe3Aa239eV/2foL
0D9Vf6Qf/hE8hg84zXyNu/KYVGOVtCaNi0kMaM2OhDxyI1iT2h1GOwZY+05El6DBVw4seOPJmx5U
kd4mjZJgyutC5IIUFu6v8cdDyTDDCPPKsEqIn+ebZW8UbPjyD7Sp0PXBH+bcD/zXYgBnEOO3mxXS
MJigDbEHIcHUzzNM5hGPHHLIzRKJcjK7Y6WWcCA9s0sygKO5+0RpArTpJH2EgVjAeTIKphWrpW1W
Mu4xBGUlCsisO8rQmQ0ZFom9HlQdLxsZDLkRxE5EtLlkBMpGogNQRrKK6UNW5U/TbLdg13Jbz3ot
aldTTEARypItXFGNSHjYCTBiYz40iGDKyseotyxpy2IrCTHK00F6jj8LxHJXLgyncwsZK2SRvB/R
bVbhv6NtLVbjPoW4LerarVarelaraX4r8FqtVuG2lqtVqtVqtVqtwXomr1fS1W1JFE0TwD0uenvr
7epb0LcFuI17Uat9Htv+zKP/ACvSFEaW0twX9C/0d6v6Q0vw3+mt9db+3Jj2w4A/4jGw3hu/cv8A
9ictRUkRYoSQWrvIrvR6++lL0WsHlN7+SSXLZijuTOq2jkZHSRZRJKyFcqJ6iUzDJSRJYWWLBjUV
jWWfIUpPFus8dZKCZYH74kNDq/TA5zZT3zClxBivNHjRnHhOXjCldW2peg0OmXIY48KGyZcnc/jA
rbu1Zdx55PdamsQe4hG7JMjLDRbe6LDkKfLURCyALIrKVYk1iSLCmVN30LVzWlkepWKouUtRy96z
Fmp0NsTE7cWB1TImbHenjtQjLkPNAYpw44W6S8ottPLXcR/mvWTL2LCGZpe7x4vOQEEZE/Y0UokW
mjRwYmWpsOCQz4k6iFp4TFvIpVxcmpsV1GLD3NGrKdsxspht2I2PH9ParfWW+gP054hVqtqfUvxW
q1W0PByrlV6vV6vparVbUmia60eG1Wq1EanW3o24bcdvRtVqtqdDVqtwX199DxHi239eV/2uAcFq
tQFuC2ltR6I+uvV/pb1f6C3935jBcXEHbizNaLPIbdgLyqNb1IO89yIHzEUfnSK06qyQySIY8hOz
Lx5fHGOcoijxFPPGUPJkyjuHMrIyll78cKA8eTJGubyJdgqWWsVS82Q4ZhUfPHg5NGeZom9bf/tz
Y/8AKsrdm3glN1k7YY4+SDs2Zf0jS9Ac8hvJPiovYf1dRFJ43PZKDhixXmInNfjuaOK1QCWMzSoy
g3L9RMRTv3mOBnoRoFdSpTmxUCkPZLLFHJB29xhyGgWFWyBLyMO4WxwASyAUqCbGgmEdShRVyhik
71oUSdG6TcoNstQ13Ef5x0yeuKQXbuiKL2PjzsHnt5A6KNWVWBhIE2NDLUuFMteFQ8e6ZcFQ52LM
Ynj7trzxhzK6utX4L1f1b6jgtVuE/wBdtVqtQoa3q/Bfivrzq1W1tpbUkVeiavV9bVarVa1cqJq9
E1eifUNWo+ufUPCfQNW4Dw30PBbhNGr1tv8Asf8AV/8AlCT9A/S36X/3DqOFtHo/pXqOslCj+kUv
VqGg/R71J10TodB+kdRrH1ahUfSX9Qo/6Rw+69NDQodB1o6tQ4B0HVuo6mvf2FGov0nqNDUf6G6j
ROnAab9MXBJ+qn6p1boOg6t1/wDjwt0Wn/W36V/V7e0X6P8A8tRdf//aAAgBAgIGPwBWR7Ue1HtR
7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1Ht
R7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1H
tR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1HtR7Ue1
HtR7Ue1GERXCPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qP
aj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9q
Paj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9
qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o9qPaj2o
9qPaj2o9qPaj2o9qPaj2owjCMIwjCMIwjCMIwjCMIwjCMIwjCMIwjBhGDBgwYMGBuOQu3zVNY/8A
Eb7C7fNXb/xM+wu3/wBCD7C7fNc9f/Er7C7fNc/+D7F/CvsLt/55heHfYXb5sfgHBPz3Gp+CfYXb
5Wtch2+CT1JI/wDCb7C7fKcUhWXwZOi+eo0RSCPBPsLt8pQvHxxJ+dHSJ0Tql58G+wu3yjJOuGWf
go51jiOmfnKxGtSWrbwr7C7fKNjBda7UuZ4t2Rt1W4Tpb5szXPwTd2F2+VcGDJYvpyXMeLhUXzRe
ufBS/DPsLt8s3PxL8DOmWeXBtSHqbGxfPcumPDvsLt8t3JV+DgyS/Bwvn2PE7uwu3y70fh7EMh6G
6N/PFkXsW8Vu7C7fL8ov4aCHSOtV8zRx7lyy8bu7C7fMMrw3mQ6pfOnT4Du7C7fMUtErTPgJFSen
zNHDsi5j4Fu7C7fMmCV4OH80TwrF7IhfBN3YXb5mt4Kar5ljgpMsvg27sLt8ztMt4GH8x2RZFyZG
jOuUL4Pu7C7fM8qlyVS3Gj47LMl8/AYWSd9/Ks8Ofg+7sLt8zyiUQ81lY40lhfG0qfT4C29L4UP4
Pu7C7fNHkSiN1I+S3T6fAJJXxbd2F2+aYdPImsfI/erfX5X3dhdvmqHSVivqJ4b+NXehLx2IVZVP
Muz8V9WXfCXwfd2F2+aprO0g69iH8clkpl2ZOpgwYLWL1XiblnW9lqhkrAvUelcNd/g+7sLt81we
RYxSehPBlj+Istw2/D+ZmkkTqjJZEkNSSlFXwZE/g27sLt8196StHk/jccCyLuCVel8vw70LRLI2
2Xgl8G3dhdvm6UXz8g4Zhn5MsvFwrjaL2IomJ0lkYWmeMvg27sLt83yiGQidUuq+Ix0+A3rkzeqJ
Z5ao4y+Dbuwu3zdchfHXo7+OlkIzogl+E8vg+7sLt829C/xq5a5gtpkafiOmnt4qeRC+D7uwu3zX
CL1sXLoxwF8NtYuXZbXK8NbU/FS8fCd3YXb5qhaL4LUep1XxiH4OxfgSvDWIyyd32+Fbuwu3zT5V
sJvhP45G7wFiXw5VJpa5D14L64PTsu+p6t2fhe7sLt8zZLVvY66l8SfgYeC3EsX4drlrIvcstF0W
sXZgwjCLLgwiXn4Zu7C7fMsKsv5N8iVwYRfh2RdmPAREkpGCyJd2RBK+G7uwu3zJkhc/jz8G1wL8
KxfwFrli7+K7uwu3zJZF0y/hHwbeAlkpuC7uP1XjjNeI6cG16X49kXfxnd2F2/8AAWbUlcdyOFJE
jS9vPjSskOt/F9OPZGYL3+Obuwu3zlLPVtHPLixryZ02uWMl2xXlEJq44Gn7v3YmrPmZ4tiYMceR
UtwbluJZfIW7sLt8Fj5Nb6XN2125rz7CXJ65pYvwsaY0SqXo1uz+5ePVzFJfFJ1wqNFx0vxkiC+q
xeli/BsqZ+RN3YXb4NesT8k7l5F7QLc3Ikv6i5mkFyK249+A1u5ZI3e5dRTSSGSq3sWJZJCpKI3I
sWOvEk9T02OlbF9djoXfyRu7C7fAo6am6dUW+RZI2np3pX5jXKW5PJG3y8XnQiPr9xzna/06Fi50
M0nhXIWK3p5aI0xrsX13fyZu7C7fAm9UKsovZ/IcIh0km7ke1d7kc1SCzpBHhoRNbOfVfsNNQ5mw
1h0ty021fk6dtdi+0xFJpCI1305+UN3YXb4FaxnhWL2+QITtWaLdmLQJ4e60MaeX002L+E7j0pp3
3ZRP9yj/AOJfmeRK0wuBccaclzByrCL8DHyru7C7fBcUsY12Ze50+JwtGTqWRL4Er+lyJr+lyJut
/Gy/o1yE24fUmZZD8G1SGWZmaQS9OPlnd2F2+E3R+JfXFJXxLNb0yNVmlyDcuqGja22+aE+p1F41
wpfQcKYThc5FHJSnSHW/CtpsRuFCmT8Xd8i9JdLF/lzd2F2+F3Rax14FmS6ZM/C50W4U03L6G7bO
GdrEFy3Evw7kIuOKOBp5z9xpf07n+txroX48aLkMlXVLiixe/wAvbuwu3w66LMh8PNbMujPxJqtv
6l+o6R4mXoTFu+5/xi/cTXMUi4ttXUl2RYa3L1QxPqvl/d2F2+IQyzL6s678tF/G3LeBdNr8ja0t
CJLPweDFYWhq3S5G5KMZIX3E1cj7cOxfTYu/sWQu4vTnmW2uU4uL5f3dhdviURw8Vzqv4mX4Seon
0JX9LkW5d6O8RSGW0XLabEl9eC6pYzSSCXaeRaPK5GVyciTdyxfjWpNUKFDSNu1O/q+nzBu7C7fE
ceGz4fJfwNiWYpuQ4vYiIhtVRK0TW9LVtSxcyWLXM0tBZEO5J/EhXReLHq3dDBawm8vkep0zx300
Ju1jbLhEfL0scYFYiPkaPDX8MlA0R9RxzeHyIpfoYLLRa5H7EyQ6Mgdx+XMsP1fqOPoTiCynzRfL
L3LOxBY5/c8ixcVk2XX2Lq/6Eie26XIb+hzguZ8AjbP5Sbe5Jf5YyZ0RRdtEowZdFPwufCKcEriy
yzLPxUInmSxMfM9XVYY3zEeryJn+Qp5jSui+S7JxA7N35DnmTuuWsQy1+5L/AEIRaEXhmTJkwYpk
yerc4X7nRUnoXG19y9Lk5TpmxKUkOx+Vl+pCI4M1+omsyLcuQoLkSQ38qZHyRMmWe4zYyXrgvYyZ
pEVgsQ/g8DXhe3ElmfxG2/cP1PJJ0JrPGtqlli4vuT1Q2KCMELBz7ciCygmxkyZpgbSpkhMmZpA5
WC2mEXvu/REvTFfyISG8l1CIbpGBva5cChSdERksQ69K3q0lNskL9SOmiPk25kyWuWUF3WKwy3Mm
dOCzaLbjCZ+W0y0WGTBilnTBjTksvGPwscS59BTbyo03yt4e5Y6lsHpn7iT5cyFgiHu8xNOOhLyy
UZM8BJkMmZRatqXRaxDpcsi1i6Lfky9/Kk9CS7nsJJQhsv8AbCIV/JEYXlS1OlJgcXU2IvboQ8+d
M6JYoyIiYGowxrrf5KzXBgwWRLTMMvYuSz8bKkE7coh5rMcPFLF1NIV9MuD8WWwRkvtMeMfwC455
HqzFoG9yaSXMlc638LMyRYcze5O7nyHFjqbo/tW5DXRnuavLI5ctNkYrLI4kt16lsEbfud6YyRM9
iyRksOftTFIrNLmRPD8izJeHlko/JkIgVxkPmerarCeHOBPqppNLfG8GDBguZMmaYMGK3aMlnojL
Jen1bcnmSyE/CQiWT9kXIWmzpjwt2jPhp4sTCJiSEsLw68yF9S50LXLunp6SvoyHzsR1rAltlku7
rKRL8DLIVlSXyJIL20Z04pGjJcggcWZiCb9xTA9rdGmyzmGO/wCPxLBgxTJky/CelZZNb3LDG250
WMwXck7WR4eVbiZM0uuDZGCyMmX8Gy0bVOXhkNZJRfhX03pJaj8yHDIskfiXeiOpu3P27PyZK5jn
kQz1IhrFJfgoXMvfcRRCvMkbty7H4r6svxMGNNmXOhyaMscPI08r7j3NSN7Mt48hvdZdNEojl4/B
iuTJmuPHvsXpdQWJRHivrwZ4+aYpkhc3xrGC6MtEpyWMeElkovR2uLc3L28mep87l/yPxtxYJLKR
NYHNkJtzBdliW5rCLv7GWTZofqxEsnZhMbJ61tS3gbfclUsXPSrRzMz508yNWdV+BYvksfllnp2P
1eSIumh7ZutU+AxoyZ+BYMGGYMGDBiuS7MmTJ6k7kNUx41a4Y2R4HPGTVLstBmnQzSTBdFtMLiel
EVsh+qLjl22jVLMvS2nNbslMjqRCZEz1LshWPU+elaPQ2/T0IeNwrkc1otlnfPBtwLEZL/oWXpXk
Shufu6SXJpHAsXrNMaPOlrD6kr+nI55lrQi+dNjBjiY4mTJkyZMmTJkyZM8DBjxLayfkTJBGvLLN
mTPhb0l/QjAl1J8BPXh4MVuYMUszJglok9K+p5kKlkS3xoWSWRo9X+rn5fioE6RwclsE8oHueMdy
UkdS1qT0IMHtX2LWMqCKRTtT1NxYcfWT1PnWZgtE+B6nSsrAnypDLUlluPAkv0JeSXSWdESvquot
23H7E8xcrlq3rKQ93Mb/AMjcE7cfDoMUyZM8eILwKRvqTxVxnog76I8BDI05MHMxwM0xpsi1M08u
GkZLEstpf8zbueM2JSilidF3Sw7FvoXZayLXZmC6lIx/AiNGdMRPUa2jmnpbiCNrk9MY6VmfCo9L
Py+gp40aZZJLpLdyYklpv6GH9j2sw4eUTtFchUvWxDdPVzR6eT+C5MmTJnRhlkQzCME5ImC26kNw
ZMmeD0fUtuJVy6I0YMF0fkzJakpSXX2FCtPgbFyGWLlvhst1zS61t8J5s8f7CcQoj7UxkvzJT0O8
QZwXck22snb+XYcv0rpRFrcOVZk7Xch5ZcS/usJyYmfvxJPKt9aRKPNF3eb8eHV795KUl9r+5Ppf
3Jhk/uRJ7ty7H/8ATeQ/8m//AOJ//Xd9j/8Aq/sxz/k9SfUW5OxZje5lh0g5HIa5iT/FSZM8bDMG
GYMGDBhGEcjkcqZMnuPcZZky9GDBgxoUE7kfi/UiHZ06PqR7tpcTXgP7WfjcwRBFIZO13Veh6Xc/
FCfQlNoyTEl00Zpjh2RK5l/Cx8AxW64n0N3mepuE3ZVc083gh8xWLOGQWFLLMjftXctbXbRFYTjs
S2Qdj1TyOsdSeo9dq2OkiTLPgXJQ2rNl+FbTNII/p2/uZp5G17X6uZFuyJeWWoobRkutr+h7fsyH
6vS+WRNOU8CS5Hd0tSZMkMul4LJnTgwYMGDBz8BNej6n920tmskPwrOd8nqWRuILs/Hafkbo/tF2
MWIgujkWcEJyQ1TOtLxErNH8CxXoZ1yO7F1615szCM4dmTu5dK34FiHqtpvST0zgayRERqjlSKSx
QXpbgxEnpXPXasMsXRg51uNpHnSORH9XId56x+xtjk86Jku2Wcl1Lp6d3tf6GZTE1dLJEWE1h0sK
4kvyTG3bjZM8PBgwYMGDBgwYMGDBjVclLJ7T2ntPaSl6S7nRHItw8mTJkyZMoyZL3MnpiUy5am7/
AKsXauC1J5k68E7rvxUr4JCpjWxm7buVomGPYvbtv9xmZIj+Qlm1yK2vS6dGLRPAtS+iHzGsSLc2
RHu50jSugoVlSWxk8JIiLkQoVFx5ZBCIRdSizu+S5EQ72kcKLcxT7U5rBmluZghno3+3r0pK5sW3
djzLEdRG0ff4akuwlw4RnxO7sxdtUli6LIuYpeufjnpVMzwcIXRbpGnujnfn9RQYpkvT8aTSSxGi
aw6zVbk9Xq8xvlApv6WSuZKJ1TSWYdME6+woUdSfSlx+miSYJ3LChJcyyz1LMvTzMEsjQ6ejfjk+
hDPVtsLa3YnodmbXlfxNy+GMnpphXY/U9MHqf08Vu7C7cDBb5CkbpPCtP0H6mo6s/FJryP4MsXpD
IpYsYrCLssOmCSNL2vnSNPpT8hpluVqytXnS11VTdaHWXgbS7GKRxpLolpQe1Mhqx7YXfoJ/kZZk
yZrgxWeZGXRbN/8A+L/gPbuRKwend9Kbd39rPp8Nn+7Q2S+emREeK3dhdvktrhzRSsR+hlLvjt5E
qVPSGWrMHQt+X+9J5ly7/Qm2lsmC6J22Z+RO3OiSHk9W1x1q6NJ5FufMaUy6+RK1JCJpfX+R1qn4
CEKBJLzL89M68Ic/49rL/wCOOzMbkLbL6Jse18yzsQPb1FO1/wAPhi2oS6aFt+r1vd4vd2F2+S3w
4LGYMod06XRgvSGdV5l0Zgs015E48qwyUQO86IjPMtokvlErQmS7pE7tEddWLjlUvYd9E1zdCV28
lqY0SW4FqQNyT+5e/DvRsbrCslliT/KObIjwWPGz00t6L1Xn4vd2Yu3yTchXo+F5UtjSrkF0WIZK
LmYMktcGBSXRO25e1ZJQ/wBqraiGKKzqvIkW5HUvz1ITTyWb131yyKSXcSO9k8jlzWy4EulyDqSr
EF8vgduLnRgwYrkyZMmdGDBgxqZdje1o6l9pdDjNZZaqQvF7uwu3yXng5Inz+xAly5lqZRcwTchf
qQxPLf1L3IZKYpG4g6UyIjlRrpouRu+5ZkMszBDpPJ1aRO7n1ITl5rbRE0uXJGtf0ImxZ/ppiKQc
qYMGDBHpk9qPaj22QovJH7XPb9TH6mP1Pae1/c9v6kLb+tMGDBBemKQ0xKH/AA+BZM0wY4dydrI3
qC1y1mXLaJpYS8Xu7C7fIsN5JJRC4d2Wk/iSnJCwqwRgwhz9BPzdEtz+o/S7ciNzX1L4JRHPSoYo
eT1ddMUhnkTtcF6QyVuSX/ItG7sxN7Wif7SW4b5cCXRTfrSXe0a39hqWdS6MVtW+q+1Ht/Ui402N
LcpgUR90YrLO5kXU6E1mn4qfMvTBhFuAvgOTNMaronYyN6LPsWholr7F90I/JveRt2L9/HbuzF2+
RG+hHtfLoLa3JG0XWRPgwQJc4uQNYXkQ7lrl1BKL0xJcvReq6G0vOw9rTlnkStNxIWuUXJRe5FH5
Utuf3FFyd3pRK1tK/DnuxwRGD+pGWSuJJ6mNqy6eZj70SrdfoKUYX0Z/UiJf1LPRgSjmR8VszNIe
0/Gx+WCNt2z8smSy8du7C7fIkovtXfoTRr6iU8CMkRnBHMsSYtSDBKIYmX+nL9S7b8miF+p5HQe1
3W5ZPWsHpaX0L4ZO0jcWrL4MaWuo1GOZ+e71PoiNu2I5GVMiW5y6348rEFkltY6wsLiRyR6WvqN7
cbepL/QtouTcsr+fAbcRxr/DJMEpfAd3YXb5ElPA4eWRGFcjXYvSUep+agilqNl+fMfcwYdLfYjH
kzGiJt0Ymndkbn5XI5Eql7FvAOMlhb5TTWKLz4+a+TFOOR5r9qd+J5smJ6jcXbGlZdKRXyJSghjT
WOdbY4NuHHIUc6X+S93Zi7fIkEJDeP5Chy4otNyFglCb5nnSHXoNK8ienNy9cUjcfjgjc4PIkwyd
1uLgs67l5iSd935Ml87kWjq/AoSeBtp0lqluJBb6xpyJjaIayRq/It4C5GdWfkfd2F2+RXaXyPy5
DTTl4tRPTekt2pJNcl5cHM56PMwRE6OqJL3G0s3sX8I9zwrsb3Xb5F1PI7i5+A+glzWC8/WkcDBg
iGi277ohNFyxN9rL0yWZMIcrtBMEvR1Ij00UvI5wW8BPDzTHxaWWMGC6Z0LPRu7C7fI09Rbk4e0W
5uZuMS0JTbmLpzin5Wg9OOlJXIicmbIn/UVz+mqxirTp6evhW39R+mY8zqTH0H4BtFs4L8K9Lbl9
ScjcGJhWE/U7l3R2JacGWjJe5O1SyXmkK5axe68yFK7CdnInhvki7lMT49y3DzTEl1TPxP0rC12Z
e5exuh8hdvkbsNTY8xSO0kxRrIuh0JdMSOe5KcdD238qR9K44FkSiSUhW5+FieZAmJJfUVY4sob6
8aELzIcEpRplqBzg9NJbLXZfbtZPpggaTuRkmz7lyTMM6riqfBWZyZ7TFc8LNMeP9Ky+Iu3yLLI8
qPbzYxbomUQ6OIfMiebdLuOxlivYTRD7FqSQ/pWC/AT4N+OyDEyoGhcO2hOC3N3jhXrIyX2ROm9I
5ExSNty6aJdIGtk9JpzHo6cP1PxGKWZyZfaXldzJnh5pjw7bHuZL4a7fIsGSeol6bshQTPMhukL7
nkiaSjM1xHmTLv5s9zX2/ihXxJenQXPqQsHVViiIdfy3JH47Z7kblBN4/QjDI8E5U8x7ks9ODOvE
yhJF6xSa5LaE05k/L6Eae+mHtW4m68ncmz/QUWEl+hel7/DcF0WcFtxyZdUyWdMcHJmmC7LIujJZ
6pPxUmC9qRNlxV2+R46EeQuRuXkQl+QiJgv91/FUlXpj+YyS/wCpn7CL6Irkye37su0uxb9S/I60
lSzFHsf3G88iVjwSFCwO+VxkQ1NhJF9FuBdxJLeSEKeBG0gl0adkQpgu3TFb65yQ5+wmsP4PdaMs
yYkuqZ4Nz8X9zBLRMMyzLMsvch2Gm8jVLjS0vkWuXWtdvkX1LKr1pgbayo0yndchJuqnDMn+opJ1
phFkqXZeWWUFyDr3IhjjkWMGS5ZEMV7DawjzI3YJ2vgQudYWS9Yi75iXTVimTNMHIyKBpq8Ciwpz
TNMapood8QRMwRwGmWJdE0PkkTu4sOxOH1R6ZmBPyEhQp+A54NqXRdFmWZDRLRZFnTBkzouidv2q
oE73pZWLszohEVwQmy2hdvkZxgmSNv1RO3kXJaMHMvSUTC3dxRtkT3L0ikz9CEi5gnLLtIs6ZLko
mksmCYG4jqWranqb+g5cNLBZ373E5T7UsyN1qdC9zpSxmaSS9H5KYHCzgUmDByMnMsjCpkzo/wCR
YXUy6XVMoxSzpHQl4G07LhzzJJpg8uJdwSnt3fuJbtsFi6JY9rUdPgtnqzS9LMRE0llrFt0D9RZl
2ZpdFrVbIVuRPqZGTME6HTNLZepduPK7fFZHHSkD3t/lg/FqeaXTmemF/EzW9MGEcxP1I2pClPdy
ZMenyOuieWjsRiqhnXuODJfJmk5G3+pKFC/KbsvJO3BKIWiZLVtcl6L3ovTgcGeIkRFzCharUzS4
yNzi0iS7l6uli5KL0udV+5KsumiM8Gbpn4/l2Lr6bieRakiaYvVKJXwhQ6YMGDBaxLvWxZmWXuZg
uRlEuxCc6omtrHUij7DTcQy5O3dBLJ0bn5C7araoI6Fx9+Nj4O2x+RCEsic4J+/mJP0tf8T07dqP
xX6H5Qi7kyZLXI5kyTH3uZgsdODNGRRzWcEIuXyNEkpJ2LotzJLnRChmZLowS63sY+51LWpb3CaV
uemyrdnXSl6brkS716GeAof5TA89JWqLkLBHWkk4j9SXNsCrGNNqWLvPLmWRe5ZU6M5ENUjkWW1p
P4Pfh/WsVad2iXx52nqJbLWdIdUbv+ou1I03Mkp0ktyFuH2GyeujBiufheaNIh0UcjuShuFPV/7n
Pd+xCx5E7219T+JbPm5IMR2uSn9iGl3M3LXLlnS6MmTK1N2gvfsQtFnWMDsRMSSnYSpfVgvc6FkX
0REtjlw56xovwbG5t4syVz6VhcD1NSNpXeEiMeRDLavOjTxzg6LlogjOqcH4jbab5F1HYXOR2MST
tsTudlSP1Iana+Y7zPwNs9TbuZM8C+hqSw2kSrEMvovxXyLuSDzdbWOojd/1F2pKL1sZp1JIYyG8
E9fiuUcxmSZElPnFGN798XxzLbfV/wBiF+LX9pesdf0E0nDIX+/0JyTEcyG/xZG6fIlci6LU5kSQ
1Ys9EvHCtkW7edD8WiUyHovanUtbRasIndz6cOaXcQhZbnCJ1RWaPblJ3PXZeSJIIZbR50tlC2zZ
C4MR9SVelxzELmQr830JmOUDbbv1JwW/QU5O56X6lPMSn1T1+CQNcCxbazBlIyjkXuyzOp+SPcWx
psRHg0bv+oiCI+pOrBeSCVRbVjb8QyYORkxPfSkOz8xNcxvyeiInzVq3U+e3KM+ruSW5GXS6JTsx
pp+RKPyZZSWW3b3Py3bt/ksEbVBd5JRfhzllyFyGt3SwosJkMudCyLutiILvQhbvVL6ENe7hpERf
qfiko68xLRev1rn8pIn1bZ6WksrVUkpkMsQs0TaHtXMTxrhXZGC5CQyWo8y2UhN3vdE+2kIkmPbT
I/VD2LBbgdD+Xi7acUue37lkuG34lG7/AK0kTU1ng2sXuvg8aMU5mOBNXynkJeRHWmGTuLLR11Sk
X23JbIiZLWgvctWeVfLVh1uQqSJpOeZ0T60tbRYluEW1u0tk+eNUaYFthWuS3M8GEJwOVfc/scrR
SxclFiHZl8Do55YJbyW1QiWWuyRfqP0w5L8hN8tE0XXLO5fa3JKvPXRiSOZfTg6+IhF7j0RSHz4s
MlK60ZrimdDOnCRu7OmCEXuW4lhEcC/jLcWG7sUOCJJLZSE1dt/Wm1f6tSG+HDaOp0G7n5L6i3J9
/wDcaazhja5FyxYnBYvWz050REvoXvzgcK0kcy2VoUq3Bjbk/K7O2hUTrJdwbm93kvMlLyxHA8z8
nEk+pQnaWXvW9EkQyxd0lMh4WptE5J3XfQXTkQyFd/oS2yM6JZYuTMpXdLuEuYn6lu2Kly1Y5klv
D5L0uSQ7ItqlkiaE+vG/EwYZj7l2jJe40pRl0tw5LIlm7t4GadtfqpYz47NMam+hO2ZRdk0f2Idm
fU2ryIRL4c1tkwWI3ZJTHtavyJiC9JyWsi2emqxdkbbvFiSNql/sS/uQj0cy6l/wHuSit+ZHCT2u
X5jT56MYpGhztnzEkvOWX5k6LUlkxIk1Ec0T6XD+tbIh0khYL0TaGk8i8tMIvgjaSxK2SceR0pbR
enUmPdckae31SJxnRcsX8O1ohjayJzM5MmZRKZL1Qel+FupPxIaehFkTBBZN0sj8rHWu7s9Ua50Q
iMELXB5abJmDBgxS/jXzsOfSpWmXyo3unsj8dmObJ03fDuqQsi3b1chkonlouXsW0fjBZEL608qX
t+4/U7/uZf8AryLOf04rSS7iv5PgpCUqNxmYt9EJRBamKyyeSF6d3+w9s2XT9xS5PyyQ9qJiIG6J
HnRrDPRtc+ZMZpDIIRCwTuyeQm3kiLCaceRes6bC6K7pA2m4V78X+Wux14PqRfRBBgtwJYu/h7qT
EHMxJ7SyphFkTC1buz4sV6Fr8JkUuY4D4E6r6b8Ls5L2ZdkIhG6eQzc3zbE4sWtq91YPLXdSWSVI
2/qQ36evYST9S6TJ58zypYmKWLt0llsV8yaZf1FtcKMMcenp1E5U8HI6Q20hpYzcT4HYcJxH6l7R
SM1tSFghJOUW24+5MHrf0JZLPSi50RCJYt21wh7lbyFaIGn/AE/x0S7H4/cUj2q7Zdx6YwKdCkuW
vpnnuJF6dylvApSTefHztRMRxJ6kshC3RC23+Dbuz4qTUmEY42PBRwp4WBtJSdhbufOnUsbmN7ty
R+M7u5012P4F7l9EapRJbC6mYMyuRYhlq2YoXcnd9iC5CL5JSX1uXiVT+76yPbH5dRfj/wDY9URW
/BW527DX1WmaSSmlF7mffd/QmZrMFyD0oa3J+Rbc/PsWec0iRSWLKT1biEQxp36Iv0MfQhEN23fi
RTq/0o5LciXKfXVOm4l9yCFyPz2x6enAv8FhkIl4+D7u2uUi+mX8Aw65Mma38B6lyyPZe5CQ75Ek
NLMCmWJ8mW4cF9C1Q0hwz07lME7eRDal0uWpLLFzoXUdZLWLTJmmJJx+pG5v1Euc/wCrEJOxD4Ts
22JvrSwkqQy3MghppvDHbmLmWpblSFkd4aFMS/0Grf8A30wWuyWJ7eWSEr82JurLdRbuW5EKypJ6
ds8S518yXlkkQ78yJmb/AAFRxXtfIhfB93Z6vIjppkspLrRfgS6Z8BgwYMGDlXJD044bTM84JmES
uRtZ3G01e4pe1peZlR2pZaLaYrd8NtiaV0NNZJVIZCLkIe7+3HcsWFb7aVuTuejduxhjabX8hJTP
mQ6eWnFImBrImRGnoe6fSrd9MHmeqzjJfZlW5F1olcz1bmQheod7PlzMQvI6EUsOKPa+V6SNbZib
wS9FqRosWyS1HnyOnUgvhC3TO3b14N7lvEXJ28Cawrse5+5/CN3bRMiT5kLRcsRTHEXFl+MtX1JX
aNreUK1Ni62Nrdupfb+pj0qsoh3ZE6J2v6Ukni3R6UiHnzIlkpzN7aLjaw0X14G0ejfjqfi4jmuZ
Dbe6SHrxRbnC7DXS9Z60mD9RQYL2r5ku48+SQnL9X+pqkqdaWsyI9W7qQ1dk6JZayLkKnpWS+mJ0
XdMF0RRjS6C6viW8RJkyZpMWHhbep+TbPxTPyLL4Tu7MwZRMkEnUujBgsi+S7MTxYZfVmDJkzSxD
JXHvx1RJ8iPMUdBrDaLL1PruPyuJbUXXq3dEdOxJLmSU9NiHxvUs0jdz5k+cjaEy9qZH1J1Q6NbR
7N10xLa7dUNbvVuZel9cvsXf2LbfuWSMl3XLMl0mOFDPOko86WL/AGLWQm7yTMTyMQn0JxoyWR+X
2LWLlyNtyetZIRllyyIp+xeLfqcmfi1WHY9Sd/4CcQyX8IjqNLd+Jifhm7sRBkuzHh7mOI0i68ao
EvO4mlbnXsOxL/3LKDBgwYLGZ70x9yWRt/FCl4L/AJF1DL2JXBhj7USpM36T/AhqPMna57UuvsTt
uQ8F6Z1ucPJ/+vHPy6j9U7iepasQfk/sWX3L5pOC7Lull+hgwzDMfpSU6fzOn7Fy5YuxrdbnJGx2
fMc3bF5Uk/G9J3OWQrFxzyPTt/Jo/LmTimSEXZZU/cyWRaxcXWnqf2I5C2pfU9Ppf0E96iLql/G9
Tp8Y3dmL4TPjFPMtyMEtllSxfJyPxVyWromI5QiYgnbgsyFcvdl0Y0q9j1JyXROqehE0e5k4JTI3
XghWkvO6D+2Sx+Sksyxe4lSNMo/BQ+ZZepK13glWOh/c/wBDovIwXZYhIyXc6II6U7VyfkidrIZe
t3foXJZCuQkfk7MspIRDGxQp9KJbmSFZc/oLYkRktYl3ISLjuQlE9fIcs7mKQiWerd9FT07SL7Xt
EqXP41v4yzL2LF/im7sxfCpWODfwNrFlLRfG7At0q9W3vt0PxPVLPU3NLEp/cbbnyEj+BH6mZ8yW
pYm1nkThMhOSC6pZ09MEbl6vMzCfUmTJLZ+K+5LEiEQuRYlkM/H9TzJmV5ENR5ss0+16XUE7bkpi
auy+royC5LsdWQrF3NHVnejZPUknqNUVLWL2dOhEk06Iu5PxVG+grobVp6l7xSUbt77DdLFz8V9a
X1+rd9EeR6dontbXpsyx0rOjqfx8ZZly3Hv8E3dmL4ZlmbFnTkJNZ497FtFxXhIV5E4ZnBCzyGpv
+hhMjFLGYI02LfqP1KexDfpgna8CbUzkfIsYLosWGndMsRuzWeg2+Rexa9b1l/lAocfo2XXqHLgl
F1J0OpcxaqeCFZFzqy9kWGXM0nWlRM7CoqQxrodEfk5PxVJNrxzY2l97DmyMZJ6Yr5C7fvSWNbV6
mXumuQpcxwPVu+iPIhYR69rcc5LVufxpbR/Hx1i/gb/Ad3Zi48/D+tYghkmK3Ukbt3p29GZtSUrb
j8nH7n4r6sl67F6dTpSE42nkrJfxLnr2uzIazhjh3XIkvqgU0xXrS7MSW+xD2/Uja/yQpul/aoP7
O5O12Re5dQWZf7UllrKr3LHnw2J9aqk0il8IzRSNq89BqfSswNO7vf8AYztZGC16wkKRbUSxy7dB
OPTbGi2iEerd9EeRCwepP8SyjV+JDL6P4ePt8R3dhcKPiN9MU7Fzy0et8uQ16Y9JA0NMzw+lLGaN
MsyJwJpDTcl1EFnJeik8hzRJk0haoLMu/V3Ja+w7x5SLa/0p0ElchXZctRcz1Tm0c9FzFLaHBLz0
pcZCFoYklXpBDgsS3cwWRdnUtT1bsCylyFJZa7Hq3fRHkQsI9Sc7X1IXAn4NYvwbF+Lnw+7/AKi+
O3P51uWsJ8mWr/EayOfx2s9W1yiT1L6l7vVek4Ll0WrakHIvTodSV+PpI3c+ZKpHlSUXOjpcwZLa
PxyflWVMo/JFiFkt99FjN+ZfTD51uiVRp50eeiRl0zDMHQu19TJiSyVOxchs/FSxOfVe65f6Q29N
yxBDJf0R5EY2omZ2MhKPAfwOnwG/B6HUval2ZMlnNZNqIL3RZ+F3dhfFrltPSty1YIZctVvoemFt
ggaY0RouRnVGiIPIvSIGqKIUDUt+X+4mrdSznRilmTMkyZFbuQ9seZZn5GSJJmSzgg83XJili2ef
eklzyZ5IlZVWmOjT65LmawxEVwYo30F5j8hfqcr8iybjBZGX5n5MmttF6zuz0I5EYXM9UzsfJkLw
Nz+J5Fv9/F2L0uyz03ImCzISwXLFyNr0SjpTqLd5mUO6JmGZgzJdFn4Hd2F8avotqmdS3y/JC3JQ
iT1LlRK2mNNq20di9blj078XH6X+PIn1Q9v+rkpJp9SIzhkf1FnPUs5IIpgwYMGDBgiB7n/STSKK
qhQMjlTzFFP2onWJkemKQOlzDnkP8cZOSkzgu6Xdkepq3JHmYgu9Wa+rdnoeSLe1ZPWva+RCtHhL
fbx0Kkoh6ILljct3MsSiHqsZpe5KQ7IhYpDcEr8kXTR17l7FmjJnibv+rF8Wtqtp/nr6ukNSPbu3
QoweTpC+hdy/IyWOpclVsWvqvrsWHt6m7b1Il+kh4OkFiFZEXUFm1HPqSscy6XpZdQs+Yk9vkvMv
tXZDfpsuVbuDN6yT00K0DPPJJPJYI5biFlluQmN8uRFGlddRkPlSCKQNJ4rZl64+5CLnq3e39yCa
XZlGUZIMosy1PI/4o9af47uTLfYlYJ8Fel/vot4metIZYhma5wJdSNvNEtmdENR20XrZmaTRUuhv
ZkjmZgluXxN3Zi+H9dV+Fa2vrp9aVyW1K6EdBxmk77eXMhL0rpX07uZ6uWm7LIwiyguWIaJRlV/G
5LEzbv62LPRd6YuJt4Jm6HFLv0jmd/mLrSBkdCOgyegpUDo0uYjsS6Pauehte0Zf+qk08xplnJek
sujBcsWRLwRzJ3u/Q/BKFYbY5/KcE2RlGSzIFLhVgbbv00eRPgP41tS3i4dYJXIlIs2qSxoTfI8q
QW1YJjVhl00XuXsZJIXF3dmL4ZfwHXgX1tIspJpO1RJPMkllkKOZ6f8AI03H3JmIwZMl26YMGKcz
I0nkyZYk1dc6Yinntv8AYgzwLkFouTJhkJWGmbVERRiRPJonrSZuherNXu+1FtI5Oj3fbRG32/6s
Mnodxo703eaLKw/KuP0ML7GF9jFLDS+rLL6s6kafLqWz1Lj6dROPUtxL520w/C38bDLOBp1sslyE
yXq3er6EK9Y4CTtByZ7UdC24uxNu3F3dhfCuvDvovYsi+u5bgysDl4IZBclmDEFvxI3bsETfoSrx
k9UWMSmRC/7GIj9T1en6F1Kf6Hpj68iyxkyQ92cGaY0btr5jQ/VzpkyRP2LXLIxcv9ibbTJzZbbT
lH6iTtCGJ+Y2frWV7uRcbElzIGuWT1PmdhL70a5Zoh+m3kP6UjltF5i8qW6Djqe37F0xO9vIXlmw
vLyEnb6H+xEP7HpS9K5yXv0M6YVyd13TqxLcnDGt19rwQrRjxP8ADX/Cn+pOq8VYu9WZH5G5vqNv
6De3BdEqzRdFiWO19Ni9zBY6iXF3dmL4Rbh309ODbhwT0J5okl1U5ISsKV2Jm/VCiw3NIeKWLYFy
gkiYP4k8+bGk7dSemfMulD5kqiZMGDBgwYRhFl+lL8y7gvP7kRfqRER+pgjryIeRrrVsaEJr3Fy5
+1I6CJL/ANVF1LCHCjqMkT60nrpuoLELn5H5P7F2ZMl76YV2fk4ISwXEvbI1vX1EunwK2m/hfUuE
xtk9WWpKM06ETTJBD5FmXRDUlrVsJ8Td2F8V68Lrxr4ZZZIy3TMErHOMk8xPmqW4MaL1fPnYSd07
WJwROcEc+hP3J5ERcf4xH6kpKehK+pKx0MzJe6H/AKgjMfcl/QYknMjli+whr+4gT6CF6VD60gTX
IkbfMe3rg9PTJbkSPc+dEOFHUZHJUS6kdBo9LsRSxeypCLabli9l1LL6lyEbk5TTyKbtc9MeOvW9
f4eGlWMT2MPXctZMW1ctE41z1rdItYhkE7scXd/1F8BhVh44V9Fyxfg3I48rI0/sXUyLzJThkNE7
bMhqKOaZpattOdM1uydrl/29RrconryI5fqX0Yr5mIL2XUhbnbq6T0ck9RP+0kvzpG0dfTypPQnr
T08m6osoH2Fu86P/AInckwOfoK31Jd9xZF9Vixe7pCux3/JG17lfb0pH212L+J/jW1beKuj1bds+
RMPbV0hIS6LS9zLJom40TRbUJdNMMnPG3dmL4BC0pvgW0dOHbwEIuepoxge73E7vxbwiNyJ2uV0I
3InbdfrSHqhcLOiGycbuTPTvX103IVzyJ3OSyE91kO+DuMXkNvmLbW1oyOq3c6RyQ9v2JPVzzVGI
i3c+lPPAvMmmCeQpHG37kxc/JfxP9SWL0jJO/wCxCR1Y4yuRtb/Fo70gj5DjDJTL7v0MyYkhKNTT
5je3cr+ROV5ExJZMwerd4Xd/1YvgE6LltNtFvGXu+hcsddEs9W6Xyg9SL2fU/LHUlWZjsQ0SuFcs
i1jB0+pEnuPcZIV+5G68jjCZiT2nLafk3uLbaXd+hGzbHcnc7f2m68dBCQ9vWk9aMtjmMR5IgapK
5CXLnRp/QbEWWD6Ug7CS70iCUpQv9RSKdRc/Ld/MlFyyLjW3kbdynbHUmM51X+G3+N7uzF8CsiXn
gXLcTrrvwrErgXPV05D3bl6egpUpk7HHkKaelK5+TuQTolsyWRO5yWRZEtkepEtwe6TL8jMFtxKL
pPdgsqXcF3L6D9O3GZFLsyH1yS70fUbJJpbkKjj20vWelGNPnRdSwiyiLH0Gz1vM0Y6LcSRSxLt3
LuSEqKMsnG6eZ6udY0/v80buzF4ix04McDofz8ZcvWduazquPc3KXI9XtnCI3I/uRPMn6Sd89iIt
yIIy6dSygvu/mfjtnzHfbthchJbm0Kdt1kwleRqcicqxDcjHaOhZz3Je25+O37jwuxzaMwN+6ehd
RTI5dxf3Uh8iOpPSjVH/AG0R5OkD2vkRyQty5Ejb+g0Isoi0n0PSv6iCOeCOol1FBtZCk8j8mWWT
of3M9KXpge5rAmsTrvf4nf4xu7C4+C2q7HAuFJOi1OhbhdfBW1SskPlwZ5oe7e25wK/0OhDwTtt5
ENQdCyke54RG3by5ivu3KCISgh9ZgxW3A60zToRZlp29hw56D27l+QozzER1JGvqR1Jo+lFSXyp6
kT1p6PMgYiyvzEPdT9Seh2FGBNuIPMsoLufJHpShq48pm2comllrnT+3zNu7CpamfBvjdDqXfD6+
Ll51wq3PVtyJOfUyGTtcroYpJCwkS7l6WpctWNXnS5ZEuyJ9X2Pcx39R5zcXp6XJF9hIT+g39Kvp
yr3I60jkiHyJPX50Yu5ZX5io0/6T1+Y+x3p+TiD07cvqblzQk7NExp7ftxO/7n7+ClfKe7sfTh2M
cGCZ43Th3LcHodOHfjQqzt+1fPqRt+rZYXWkl0i0otDLpqkablq5LEs6IsvUzK2+SEn3ZdYwZJX3
QucFlYfek6Yb7VmDBgsiSCIIY0hdy0zzFSF/UQfoQNIjc4G9r9XAkjiefzLu7PiRS6LM6l14KeJb
g24vnxZ5apWUdKv8VHJYE49XkxvG0W54ZdGa3SPxcD9NxvdJBGh0/iQrv9i87vJYRexjRC5lxxga
60h8jsOKNvjyqN/20gQ2iN7hIa2OevCla/30djyfAjh+fj5Ib+I7uwlnxWCdEEvwvXiW49+Hcxki
CHhC3Tjkbtzv5E3XkJZkhlnXCklOC0MumjJDxRrCXTmeR0o2XdexLxSS2l6PIhWrglErTCrBLpYa
3Mfp3ep8G3B7aY+3g/Pxqp3HJfBbht9EOfgW7s+FOFosZrfRjgwiXwL8W/hJfGhcCK3PU0PdtzuE
lkaUqEN5hwXM1vtVLE+pXPcj3Iyj0pqxlfc/3ORcfq1zJNLmOBaxKvWcMjRBCybk2NbXL8F+3xK9
VGdLLtnVGSZLXL20+hPuNeXwLd2evzaLu1bLhWUF+DL8Hf4JbiQqTt+x50iBP+03N36G1P3Me3MX
Ii4y7g9x7j3HuMltx7jKL8uVLFzJKMlyXRVtW3KsVbikySQiGiXarTxuLbvU+niO3wyXyLK1ZawO
xBNfNktSS8E6YUsmC6dIm/wDd2euHS2qxdlmXXAmsEvwduPfjW1202syHWK99Hnqdsk7FYaifMbd
7Ec+ptjlknl0N36F1+RtjHMdrRY3Pd9C+tEcqIisV8nSWd6OifKD6mNE09U6+/En70mv7Hk/hcLD
JZKJZYaZakolMvWaXp+RaCZLE83yJYu3j93bjX04I1xox4G/wO+qKQzqiay9XUtphqRtLgWFSKxV
EJG1Ukmkk9KxXzrctpgmkPV+untwI1RwI+BXGotRpkrB03EPg2JSPyUafJfAN3Z1mSEZ4H8dc6IL
ssWUCb8Hb4Hfjdtcq3Cxoa9NjuTSBroTReQiBUgjpSCOte+h0SRDExInVfT21Rr7af3I+FwyxKI3
olYJf3LbkyXrbasiEQ0Ttx0IJdkRj4Du7UyRkwWQ1osXvwpRgzBdmPjEvVfwE9NM6m+tXalh0nrW
/Oic5EMkmk17VjroXVFz6DpGlJWQm9HfTJPTjT4GPHemS1yHRaUtEx8D3dhTy0XrYvf4Dbi38XPE
ni2IfAljar3p2pAn0IJ3CHX01aXOs8mep0dJLCI664a1d9Heka40/sT8TUSQyC4m0WPLn8G3dmKm
S1qWL3+HeXxGemjvS/AaJkdF5U7jTJ60bL0dfW1ara/pE6JI7UdNvSipJOhCkW5aexNYpPAlaf3I
ZHwqVrnrS6LOsQWsS+fwTd2ZnRO77fAevw+dNuA9Ea48tdi46pUVX3F6frAx0W1cyCHyIWWQRyZJ
6nzPLdR0Sou1PTWaqWelae41o7kEcCNP+scKODPiXuVjJZ1S04PKk7vt8F3f9XW3wO5Cfj7aJJ4z
VZ4M8dekncodIo6N/ajXQjzIQ6NMe586Lcvqep/Sk80JdKeZ5jmkuipPLFO+iULT+pPH7aZRPwpp
qUz8LImT2yTBdY1Qy3wbd/1fwO2iVYl/A54K0TxY4/UbJHRM70fRjb5koa5j707CinpXM9L5Vh/1
V8pHRRyLiaO5FFSKL0zHM/L6aewztpjjd+HPylu7Px9qWZG5GI76IQlqv4u/go8TG5kKk9aNH/Wi
J8qep2GSNvmPb9iR7mLcueaeSJ6E0nmOiJXMRHSjXW42R0qtaf01duDHAjwsnMlP5D3dmLxfXhQv
j0a54Nnk2pci5PQnqSNUbfMaL1VFuXIW1cyENEc8Ve1/Q8ttHRLMlxMW7rSRHcbpc/F4p30dxPV2
4M6e3h3BI18h7uzLr4JbXf5B70/XTKEqMQtomJLnRljtRUcjn6VnlNUyetH2omspFxIikVRBDY2n
q7He+ntXtw5p5Ok+EhkInQ3teC/x7d2F4q/Cn41HhG+hD5lxkDXQbGS+VqMjqOiotq+oty5EnmyO
ZHSkckPa/pR0R51fmPd1H5iXWrRfX28N28M3W5CshtEbiy+pa/xzd2YvmHt4KUrjZ9CT/tVp1vkd
JJPU+ZA9r5E8lSeW482dxbulHRRll6oRJPQZI4EmoXQ76p6na3ge2jsSR4SNE6ofxrd2F4iPiHlx
p0zqtwo1MijFReVI6C3URJIqLa8KqYq3/ppA9vNDoi46eSI6DpIx07caeLNPJ1jhRzLK2udHc+mi
Cx5FnSWSviu7sLxMvhQR4+PhE60th+fIhi8iR+dJ6jF/xFSELvRbqt8kPa+VPNkPnWfMdfMfcafI
nqdyOpAxDM8CNUjXCleCmigmka2+mCWPd5ipclXL0syJbEviu7sLw0liXw55/Eo8PGtkjI+lHt60
bGmIjnRCII6Ec2QLdSeSJ6E07j8qSS0MVO1yegmR1O1Lpu9hMeqeBPXj9uJPNUuzyIZ6kY0WoiFl
2Eiw/U9E7jyLfFt3YXho4Fqwjz+Bwtc+PnT2pDaGkRR+VPVyxRbfuJqktC8xVnkz1clijRHPFWup
HJUfnSOpDGNujHReVLIxRfbU9ckUjxUrgxygvZdCR7uW2yPU3foQsv47u7MXhIVYfxmfCOv68N7k
8iQpp3pBJ6utbuBQIuZ0zyms9CeunzN1GupHUXnRkiZyo0Txp60nxUPVPPpSVZl7lrCW4TPU/ju7
sLwcKuDoyaxt+9ILln8MjxneqXAgU8xNECfQS+tHs66MRT6DpksTJFL08iDNZfImGh0noeSOxIvO
iM0k7cWSKRxprLwTFiVrueWielJVmRWEfl8e3dheCirVY5VuWJfw2eLPhO2uOYlVk0kyMnQ6Kti9
MGCKfiy5HIyT1LqjnnT0+dO4qKn6cWKTSeE2NqyIbnQ0QRwLmDJkit0QdS1vj+7sLwHSl6wy2CCF
ovd9D1PHyo11PU3ikcaeJakMmqZLol0pclcKfBuvpehxglFyV8n7uwuNCrNL1hkuszB6Vd9T17/o
vhsfAO3767ssOXxX28FesVZPIV4aHLmeFHg4IkRKyhOjrBHyfu7MXgMcHselT2Qt2+76cDJYh2+O
9qxwoXSl+XG3dtF+euJEnwXogUF/Bu8JEPgxWGWwXWjB0+T93YXG/LRGmWWsP1NEpQ+uqxMwZLuk
rJf4PHiJ4TfEuWq9EvjRoen0tQ/BNouPgzxJFPyZu7C8H51uS8cCK3dZfCj4LFe/h34Bn18FeqE9
Ml/A3pCG0J8b8SKQiC3Mn9CGR8lbuwvBWdILIvfgzW1Jd6xwLZIeV8RmvbTOlLpeq4SqxeC+mhPX
ck78ZTViXBiNEUlUjnoksvkrd2F4/toklUtlkvOmFypFJWVqlmSVxZ1Rw+/g40t+BVV4NdtMabKW
z8ufgIJLEPXaGZJbpNJkt8n7uwvAtIlluHC1NU9T0pLLJp51knRciBp+Bnivz4UVjhN1njLwcaI4
KGvKk9b+OtYt8obuwvAz4GG7mKS8avU9cKkIuy5CPP4XPCS+vgUPQuOqtD1TRo3ed0XwW5E+AxJK
E/L5c3dhfAZMlycELVat6dCxJFYR5/D++t8V6X3qheMmkM3dJ1zpwWLXrBHT5c3dheMhFtEU78KS
dzgsvrW+iKXLcS3j44DdZ4y0oXgk/MT4jkT66rOCG8a4LNHmMjPy3u7C8XFb0uWJeeFO63kNs68D
zRcUWRHClkIl8BNr9eBE3r38JHS/gn20T4SdN9eNc8OY+XN3YXi50+lEvwy4svWy6aI0xtJ3bkhu
ZMQR08M31fgnoWtrnwY8LDv5liXRR8y7uwvHwiXl+BvpvxofAjV3JaIRAlwYb051us6YVE+Kq30e
QyxfhOR8K9ESKmCUXLEfMu7sLx1iX4Pt4SxnhSS6S8EpUT1xtwuZe46wi0rsLUl9ddiWSRx34KOC
iXJaSZEkNEddFy3zNu7C8fbwcIuYLUfil30OBLks64QkWUkUlluVFqf2pPgImGLSm+eq/FTJ4LVb
Usfl0+at3YXxGxesLJBC4EaURxJrCHOR7n/VrvWCSXjkWHKvOpvTC4z76lWxbTn/AMCbuwvi0LhT
Wx50SLF6xpb0QJELlqlVh8yWRWdUdL+DkXbXC8ROhfN+7t8UueRC4ludPPVHDk7a/KsvT31PzdZ4
E1xS5CIkW0iMHqILkoi0klmRS3gHpjQ1837u3xNsuRxeiOtY6UuTwbk0gjgSsUu/sQWrBesDv2I6
VgnRYxS5BAoJQ1IjItxO7mRtYrkzYaTkT8xJDknzFGWNMhcj1RyEmskQeoghkkJl6RV/PO7t8UsY
FOfALqRojXHUh8xNHqfChE0imdSXJX0YpkuyIFCmSVZj6ly3UzciJiwo53GQj0xcSE0pgc8z0xzP
3LYGpajoT0Fd3eBJWk/L+kxbB6omT0tQJuw90j2bWJtwSPasidL8pE75EkNPkRTrwU/m7d2+SHNI
XAuqRy4jik9Sx0L307opLHHIwQyRszciJiwlt73LWnI1ksRBZ5IZBZa7IlkEouQjNiFlE7jyGvsK
VCRe/Q8xRzyRgbd4FOG4gUZZDyhqLc2Peu6HNy9iDqi3zpu7fIsPRHDmONelkYRBau7vS7HznoSi
YhoazYsJRcs8ivciJLFteKZM8BkTSGi2iUi5BJfkWuWdmQukF1Tqj+dP7ixHzju7fI8Mjl45U7DG
okv9BNqUSjNy9ySeFnS0YOdMmUZ0YIkyZ1Y0IlEMnb+RdXouaHFiT+75x3dhfM1yBvyGfTg5M0sq
ZqtD15MmTNMGEY0ZMo5UmmTqqXQ/S58i9qJZVf7l837uwvmh2JalTgxFbsyYLKmdMmSVRaH31Jok
UkrlwJejJkdGWrdSP0s/JRS2Kvp827uwvmjd2F53pYzognRFLF0RRUgQ+9YM1ikFtMlyDBbU6X0R
A2hepEovYt827uwvmhojoOs0klEUwWJ0TRUTEOkca1XOp6L6IG1Zlza/L5t3dhfNLHS4oJ4jQix6
dE1nwj4SWBJcvm3d2/8AoS//2gAIAQMCBj8Adz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9
zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M
9x7me5mWe5mWZZ7me5nuZ7me5nuZ7me5mWe5nuZ7me5nuPcz3MyZZ7mZZ7mZZlnuZ7mZZlmWe4yz
JlmWZZlmWZMsyzLMsyZMsyzLMmWZE65MsyzLPczJkyZZkye5nuZlmWZMs9zPczLMsyz3MyzLMsyz
LMsyz3HuZlnuZ7mZZ7me5nuZ7me5nuZ7me5nuZ7j3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3
M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz
3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPcz3M9zPc
z3M9zPcz3M9zMs9x7mZZlmWZZkyZZkyZMmTJkyZMmTJkyZMmTJkyZFcff5tn/wARoff5qjrouv8A
xEh9/mqdD/8AESH3+a46aH/4hXcff5rjRP8A4LuKPCruPv8AN9+Lf/woh9/mxeAUoj58lk6F4Jdx
9/la/wCJMz4mVw4Eif8AwD28Eu4+/wApyl9zH6ku7r38I+O1R/PU6pJ8Eu4+/wApep45avNeJn/w
AqTGiKX0QseDXcff5RhfUjprlF1Hgp0wTy4KRBdfOOC7JjW7F63I8Iu4+/yjfmZRZzruWsWuXXFs
S9V+EiSE/mzFLmDHGfhV3H3+U7GTNMF0W04LGS64NuFGv1UfzRYvTHgoXhl3H3+WbH5FtdrGOPek
rUkJD+e4VM+HXcfd/Ldi9nwLWMouuPKzpn5pl+DknxK7j7/LvUt4aaStCVEvmZeCuy1y/il3H3+X
4fiJVJ6fNMvj5LIyZ8Yu4+7+YLEPw8r5pRHF8zovgC7j7/MUSQ9Urjwx0jr8zTw7ssZ+BLuPv8yZ
IemVxpXzRBHBllnLJb+CLuPv8zXJ0SuLBFH8yzwW1ku/gy7j7/M6aL+Bml/mG5kwQSY1wx/B13H3
+Z4ZfBYh1txp+ORS2H8Bl4I2W8zNI1rQtr5P4Ou4+/zPBDJV1WONDO3xxvrRd/gK2rnpXlqWhNLv
8HXcff5o8yGTtLklyV8kpU+vwG/LHxbb3H3+aZWaXyQqz8jz0ql0+ALwH1+D7e4+/wA1WpDrHyNY
irfjsyyFYiSHyGYldSyPy3fRFkRpeh/B9vcff5qzesMyTb7krhr4jCIaLIwzoZRkyXuWq/O3ibF1
FbXeqVyIw+g/T9j1bv1p34L7fB9vcff5rk8yGZpHUjgwsi+JXXDS8/D+RghIhonV0XmX3fY6pnq2
7i+api4DQ1M/Btvcff5th6PNcFp/Em+Bdo6kYpbl4dLRuXlohE7ruvbjvv8ABtvcff5uhnlwYVEu
a+JfXRkyWuZ0RPh03Yh/clOSVRrrWEdXpe3pxn3+Dbe4+/zfDJRLI0wrIjbc5Ev4kvOsEP6Pr422
h2s67l5kLXK4z7/Btvcff5uhEumHouyERPxVLy0T/b46xLuY0TMELXYT4nm/g+3uPv8ANt7llrj4
nBexkl6YFGH4i99N8+KhXZL+D7e4+/zXOEQtFi3xXH3L3LKDBdRrh+MXfwFjo+nBjbn4Tt7j7/NU
7vto8y9FqVX8KhHnxZ5Pwd+DfPh5wiNv3+Fbe4+/zRCPOtyxfiP4UuPO3HTwFyEuHDpBgvbgXa4E
nq3WR6duPhe3uPv8zWRfRaxfS/idtccGdueNbhy7F7lrabMvcsjJkyXdId0TteqWQsfDNvcff5ln
d9qwR8aeuFw5VmQ8rgyW4d2WXgLImYIlTS7IVkSmQ/hu3uPv8yNtSycJUhY+HY4y8HPXgdeFctfw
HQuWWjL+I7e4+/zJd/QtYheEXBv4CEQyysL0qJ4s0t4GOnHvaluPdllJa3xjb3H3/wDAV6Q+OoFL
G+TJfu5caH4J+Mu4OpYz8b29x9/nKEQxNc+HgxS+vGq9MDtBdCE0rDTX49ONciTJnVbgQOl+DCL/
ACdt7j7/AAOEc/k6Oon1twbF9FtMcdNL/YtgcEqt9V6IUYFS2u2vcyeHbg3cUx8ibe4+/wAChHet
qpw4fyTtfmWuNJj8tNtF9d+BasadsfQcY8hxSKXM066L0jcfjFb6ra/Tt+urrotwOpZfJG3uPv8A
AvU87nOrbt/uf6U6P5HlkpiaXKDzY0+PnwKZt819/MdOpjTctoili/F8+BctquWX3+TNvcff4El0
Wr1PkoVbllK+Q76I6Cb/AEJ5PRJJPir8rC3Jzt8xPKpD031WVb6L0uZ0yS9dtOJ+UNvcff4bctf4
7at1pe36yZlZsJrC66b+FQvPS10P+r/cUctcviweRal6yzy1W+V9vcff4Lmudd1Jax17fGOhd8Fr
+5QNPmNeAjwEchqLDWOFfhLd9HSUXUmIpJC+Xtvcff4TKZcla+hzIfxLGnAq2zoTo1C6Ma8epwKX
l/SCHzs1S1baI4dydsF36T+5eZakKl7fLu3uPv8AC7F7nTgXRCMmNMLL+DTx4om7i3R4uxL0Inlg
T6r9rE9ddtF9M6LaLDm5a3y9t/7D7/DrMuW1ToxWxhlmY+3xXsbX4uNMHmQ+Q9F9E6pdqTrt+I1m
H8v7e6H3+ISrF19i3FTWHml6WHPjb8deWhrmNaYL+LTLZHjsQ7E8O5ZRrvS+CG7D+X9vcff4jglv
h5pBdab/AA6DuQ+dj0sisrh3pbXktS5ZGKSW5EOxLJSrNIwX12Vb6WZG2rR8wbe4+/xHLMsffwdy
F8KiqYuVyZ0X0RpuWWi5CuQkTuTUktpT1FtSuyZlYIc/Q9LdhbttrwJJJ9yN17WT6m1wlu8uh/Ah
daXUjSwuY9ic0uqXrdltPXQtFjdFyfl6ES8mSZXxJ934Rufhk0T6WFLxzPKuTrSSEhepPbJMpdxb
Fli3T6lhlhbZiT1bW7OHIoSfc2+rD6ktJObC9LjsbdzWMnp2v1NubCkW5X9ItqXpWS6PVtcMXreK
eZe9OiLP7lnYa27okczO7mbVu9vqliT9L3T+MCvd4IW3du7ERD8A7QPsQW+WMGNCdHVrpchmSy2m
CSeXwqH4RrmQ+LCIfjILURAh3hJD/wAb5M9PpUfqOYbSsbWoW55LOwtzxJ6drW5tzYQty5Hp2r0q
ZdFu2uGiN0JdFW7b4ULRYU/YisUmLj27n6efce7aufMW3Yr/AKC3bmnysTBYxcfCvgjqOS1JS+VJ
hnqfut9ZIg9qL7ETBHpuWrks5G0sqDFPUn9CaW5F7C+Dp9CfgMIxLOwoWqF4S1MDjlkXkyByidvM
e55d6SnBLbffXHG8tU1sSdNC3K4k16dqY5dmXb9UTPIykQzHBu6JrD0T8mwkYMF7F7llSORNZQt3
NZFGq6MES0fjuOQtq5iW2+2HD7CltxS5dUzTBfRd+MXjM6WQO1E1ou/B2JOgluFv2pbb8uY2udJb
g9LU9SFjweTrouzrToWpHUg6Ujg+j1OBSbm3tW6f6uhue32+QltWVNxepZ6aIVZiRCfRx8mPyMmT
JDf3Ek9plGZISI23Pzu3Rnp3WJWKwnfiJrKJaLWOp0XmQtEIXqWSC2TPjV8BUcyOpEqSOniFtjLP
Um7Kbia5WLcuYvVue7u6L7CMdtWTNYRe+idEFmRS2m52pmlyyL8WWvUmogUL0+lWRt27nndnyPVt
lNfqTolciGxpqU0Pb0dIpf43kyZMljBgxXJmtkYLqNHRdS1HSx6d5mx6do214WyvFi99zV2W5EvR
gujJleFsmYF8CmB8iZ8N6kl9WbotuQm3dYPRZTml6ydrk6LnQwWIrcWiFTBLJrZF7FiFXuRSxdlt
FlxE04aF6m90Hcj8Ye2y5yQpPXt2uKSlmin3aO3w7JkzTEmCyRjwb3PG0ily34lxJCSUaLl9slts
H5InwO7e8vctm39xN89EtxA002lzFGS5PBxTJng3d/Isy7+yMT3ue1fYt8ERB/EmS/goSIdoMQLb
uT/HoPdiWSX1JLLsNPlWKYJfOmatdKTouRSNPloxglJ9y5bRYVb8dbXtc7USuf2EkKcEK70z8AyZ
pgwY0Z8c0ubYmqWpDJy+DJLtt5eA2ee7du+yQqogSX90s7kkcfB0MljDJ5bV+vDejJZl0n2MRwpZ
EcOCCVRQR1I+lL8S1Ye6H2Ny3Z6i/wDW5tctbXZGCHYUF+aEtFyToXIZYnrRVtRUb58iW58h0vpn
JiPLSn0dJ5IRMxqvw7Hq3W8zrJt/y+mFuzonxGPFY15MmUZMmTJmuCyZgwT6SHZE7eNt2vG5wz09
MeA/x7P+H/8A29W7d/aKOYl4BwJ57mFWeG1zpZSYZhnNHUiK5OpdUzS7+x5Liep6ILEJZvW5bXiu
Bbd2D1pelp8iS9LkLTel1FJgnoYJ66L0Ze8FlXsQiS9YrC0WL3LihfYjSkTFO2mdvMlkPRnVel+Y
vI9DVub6wWxnTcyZVedOdMaMmdGaZM68GGYZiuD2o9qMGNOTJkzTPh10LIagkgmu/wBVo9unBjwC
FtX9PpX/AMdtUPaupDF/j2/1P8n5Eq7WC/JSyPK/gPT049jJmmKwix69/L2osS7VW1XEuLLwQqXr
Ba5HTh4FubabUroPbz27hJZQlu3NxqyWZelqRC0elLmXI0sbLUzrtngwx6ZfIsZkzSxc60Q2S6TJ
CI0QqWpDIZ5Efi52w1zk/JaoYknZi9GSN3w7JmmDDMcSay2WbHAkTwodXxdu3ruQ2uu5/d6PV/qS
Jd2X6EEc2N9fASidEuk8LFM1zS7kc8i1ZzuZPClmC5GpQNIisaLIsXFLTPyIUWxJLu3pzXMFmNOk
JDml6yuZ+R6lz1RyZbidK2rKLDjFFqjQqR1MVlW1RTJkyXJSJaxS2iUqRyPVl7f2+DYrgxXJcTXI
9xkiSWpL7aSlJgwYMa8ke5dC+2KK5K56L2Mo/G6LGK+l/RmR34qf9qe77I3vpG37aIJgc3bLWJfg
n30NeDtTNLKt1TOta4pbkLzWdHbRLTjrArTJLUprBOzb6Ef3F7Fr6L0uZLdL0aZc3NclqnoOxHTQ
08R4G9UjueUW4snIhkdSCERJkyZIp7T2oxtPbtPaYghoUoWz/Hsi5fW2leDBgwYMGDBjg5MoyjKM
mTJkyZZmnMwe09pbaYMIxpyZ0X+hZ2L/AI7upKuutId10JX4ssNPp4CH+RexlEyhsySTFt19N2QX
2m7fhbSzyZRdUyWc6W+qhaI5l2QPwkr6k0fjs1s+I+4vIhLHPS27JCa5dTdfK9vnpuizLqkofUuW
Zf1FqXRlkIe145VxX0wX59CKJo6F+DYvqgzSGJPlWOKmTTzdbZGmoMP60tpyeY55Esz+S20vokcN
p0zoxqxp56MmWZMmUZ1YMcSD01jK6Fvx3MuvrWGSrrwthcntsWEpzS7LXNs/3UX+PbZttssyzOZd
SPc16S09qXSZgtSHR9h737t/6Lp4iOTp2+AWriuNbQrDrlI3Slu3J8+g0uauiFrkseZZn5CapJK5
EqkbfuXSfYumnypDRCtWepLsTOaQy1IJ56vOmNUovSZJei/D5GCxD0Ty5mDd6udLOxM0wXVZWS6u
h3Fu9Sm8cx7XnbbRewl1ejBgx4LJkyZMmTNMmTJ7jJyOWuUXcQe49x7i24hv1FlGiVYvxMGDBgwz
DMGDoiyPWnfpS9Nv/ZDrk608hzaFOi9LvlJOT0penb+/ioZC+By+GmobxPQnmzapiWPdtt6fMe5c
ycE8Gx0Jz3G+tH6jmy6JTg9xAy9JMHqaglE9CILPFLltccKFWZJ66L8WFolOkz508xeSokuZ7tMr
NPSvoPdtX5TytI1uUNcqsXSPhrb7l9MctUtUuZ8Pt/7IeqC5axd3JG5E8kvGK4+OS+JMXaSLKe1I
e62u9LFy5KZNI5N0yTWKxotc9TUIgSMxJHSkNjVb6I0RrgcsiXR8SSKXeCFypBCZM4paj6swyXkS
M6fVtzR7dymR70o3JciKM2vp8MUi2rn/AA0y7IstXpXitv8A2H34GSX8hxxIcT5kJX8j8nDdyxfV
cmaci6pCQluUeZtScw6Z1zRVhUbfckvzr5qklqTW5Z06a7Cks/A2ZkySmJeq5aCGkYJ9LMbvsY3L
6GTkZHp9S+tGt1p5j3bPr50a6j8n8MnoP/jbQlrfi9vcff5LT4EVW1c7CWbSNz7hq/0yRnVe1Ipa
nVEqqQrlmQyxD02IaklCFRPoQieS0X020xrtpnieRakjkuTzIRJNE8wYikxTGTFzLPcz3GUSWG9+
2bR/pHq2qF9iR/kr/DHu+pPXQ92uPF7e6H8FxwcVzXHwGxPMnc5Z1MM5l9dnS9qR1IeGN7bEPVnG
uHRKjRCyW0xrgtcnXjI3MF9d9dyFWKO0QQqxokjodxJ8jp3HHPRLG1aTOmaY0RTnXJkz4xELnbSl
SdL8vF7f+yH8Qxw8maYMcKx04bZHCklEaLMvennqkcFmXsWrGltkoc8GCSTGeBEcC3CgsK1xKCJp
Mj02G+pLySdWS6daduBHW/ExoyZRnRgwYMaMlmZ0W0IshLcqW3GRTrnxm3uh/F8mTNcUxwMmeK3E
wSrdhvmN1wXrcmB9EWJRel2WuStK0yi5KLoyStKbIXIlq2uCETOSBIh8DBjh5pCZkyZo5sS5/Ysy
7MmSdGTIr4IlfYvuX2IlGTKMod+D2XiLmKZM8RNfUjcidjkvYvcla2nnxm3uP5FlIijnhrETzE42
LtzpDFNZJzT962QpV+ZKWhblcl1codiODG5FqSY+xexkhcy3CdIRGpGCONkySSjH3HJYvSWyOQvT
yI0WYm1MGILs6nQyZL8CevwHFMmdVmRvROxyXRksyyLRtJ3bqdvGbe6H3+REi10TEFx8L1JNpdBv
lyLk6pp0LVtSU1SHSFklqlmSPWkWI1YLkKWRxJ4PIwX4kEIhmftR6HW1blnRviyjPwi6MGYJTPyu
fiTu5FlYwXfjtvcfyJBZ/QjhzMR1Em49OfM3NKE3jTgmnY7ljBNU+g9pKpDJREHl4CaWUF3mkpR4
KH1ObdZJ68SSZIbuyFSNCvJ6pPdd64XG6mILOuPg8GSG/gO3uP5EjqR0RM34UdSN2H+5bT3LcuRD
zBf7jiLl0dayqSiYHKwWX2pFIySrcWa3pEcGNcaPMZakcWBKTzXPTCL0sXpLzSfAyThjb5UtXC+S
NvdD+RJJZ1LqFwYX9Np8yOmiFTn9BcoIr/AwSqTSJLEoXXmSkedYXGuqpkvGCxz0QuKyV4O5asNU
gV76ZdiC3gn5ltGC5n5G29x/IqvYtzJm2qKS0t25uL8jdt23jnpZg5LqcqRuJGy1Mlqw6W8NBCMw
Qh2jRfgqdE9T9qS+XDwTS51LVuiKQtOK2pL8A10P5GWjky6eq6MUz8WsKeZkyWaMSXTWjb3H8jf9
SGiyjU93pafIy06WJop5j8iWs8ztZ8G5Z6ZXhUkXpAtOeGhdDzo+DJivcajBitiYwYgsXrOiLoa5
eZdELnx5SknOnlTP30YpktTBz+Jf+zeuyJ1XSLNotDNsp5H8jdxWPLStzjMwzdtS/JqDzIVZXUh3
6mY7liVzuTjg2L6H4XtSCWx6JWqGsGNMCVXpkutM0sXelw6WIMFzLM0k6Fm11G6Ysf28VpcLJjXg
5oyjNccLHwGf6duT0r6JcN/IsE+Z3JpBJCT+wpna4jud6Ymllce3/UkovSJH5Z0W1R4m1c4uIeu1
L6notRVh4141WuWrLL0tSRNkcyWRx/Svr4nEl1/A5otuLQ+xjiYpnw62rLF/j2L8t7j/AHOu55fD
fyLNI6EzWw458j1bvotOCfMh3Jn6DUJ/RHt2/r/M7rkW0Xr0pkuSq2RdliFE+BmqInVnhO/Oltdy
y0wy1hvS4rLLEpwcqttlrkstx5L+OyXSZfbHYs2izpdGKZ4ODFMmLF3J+L+5guta9f4zdHuLXRA9
zV2Pe/8ATfEfx/NZr9aTRn7mdvp9OBzpk5ljufyoyI12pnTYivqQkrara4MEvQy7z4CU+Yy3EtyM
VvqkvoTWSWW4FtTT0dsfA7Ga4MGYLNUxwbH5WMlmRKMIwi+1Fl6exOUbXGHItzx6Ul2VLM2bnhXf
cc5bIVUomS9iz1v4riueLHWn1oklds/KInKvfzFfGm55EoR2pf8A3rGqxekk0VbI6F2WHa5eliHw
PU6+WjOB+fCyZ0Snajgzw7kwTSZ0xpueZC4WKyqN9RocchzaCfhN6WZZmC6E08kJl+ZdVwY0WZDs
67pHtsoQvIu3PQshbecT9NDb5D3OuR2jauY2tD+GYrnw6nJEFxN3PSlCyy2vJmR0uzqdCC1y9E63
pFL0vRVgS5SNNW/QiK2rax1pcwMiZ02cC8hma4rnX5UcVzTFM6IQk635FqTS5NYrfg3OpanJnNEp
0syF9T1Jz1+B5pjXktS6PqeqL09KL3L7ZE9uKSkYZZwQmi93VJXWWbXM+pSNLapWJFuaieRbbLRu
e7/+m+z7aEuWj08l7n/AW3aoSPStD4kVe15Xh8mfGwJOqUWV7k7l9ydFq5pEDMwdRXjR58Ca2pNP
Krl8jkQ9MkQqw9XQsXyLjTNjOeJK7F+xC76OhDM6LU89N9Vq9DP20QR/kVv9dB/+uGo+g09drfAY
aOZkyZMovchWpP2L48zCLJFrCnbK8idpKsy1z8lD0OP6s0sore9LXombNyX5blLpfZP6ELO64mv6
tG3auo64MUutSaP+1xL/AJG59Uv04+fgeK7YzKkmjHtSbEn/APYceqefqE28ly06LkkQZre6LQj8
a31wTNJRFbFyael9SzxxLF39i1i96S8DTfAsi+nOS1o0Y4F8CXMvp5EusEZkjqOOBcsN7tF63M0k
ltpk/Bp4cCVJrte32uwl14SfTSkzbs5P9B7UsOCXdEqwtuYdWzb/ANh0brKL4J2mCGi/IfYh8x7e
e1yjY+cj8tWa4II+E4OSF0aklU3rcsxce7bguJSdKRtUnmY+xNZPI6VsXuYgs620TS+vrSehcxfg
9C1y705iCynTL4FxJLJeti9JWmJEm+7E/s+FPMnLQuXCtclkFh8q3LVbmHFkJREfA0mPb6VZxw73
qnAjJBY8zsS6W4qtIltXplX8hyroexL8U/rJanU6DNvcemGbWuzMOsRf9yeWBMlc0Qsoa58/h2Dk
Zrn7GGckLtBgiD1xtU+2eZ2Efii7jsde9LaJpEyYuWaIZbTbjQXIVOdyNNi7+xaxe+u5CPpxLKZH
yI6cNNZPSRwb8yWhpO3BkvalhdSXYhr6iUY02ate41ENVxX+X8jr2LeMTFu4N2j3HNmGcyysYp+L
Tnke2DJ5aLk8Pe92U3/sWtJt3ddv7Db/AKnRVZt/7DG/Oxtcy3yMFyayZE9sEdT0OyQkl5Ie9+7f
jx99LdYIpgyZejH3Mpfqf1P9DBmOx170a85o2bdqe3FpzA2xLX074rJJKeBmME4Idy1MtlkkXuW4
kkYVLijA+ZFeuv2vTEQi39PEzYvzG+F5GIYk8862L6ckvkNdSeBYvS1zB0Esxo70wWnbuZev8y9u
9MEnX9yz+5fxUN2FozRJL6nuLt8NIXlxJO+ntr2/9hjX1N2y1Y1zRSpLP0shrw+NHU/tOrMfcdGT
y0NpUwcvqZ+yOvc5Iy9OS1I8s1lXe3avy60notGJOlbNfsQ/3JRBYl/qKH9iCyJwXuW1RwLljJNI
GnwISkgsX05siOq4kp5IVorbVI1JCdkP/WaQqKeepRzOxctouQqwORSKOZ0mkarPAlujb6bL0060
ifueRKmP0Lr7V6kJ/cuoOvh73ZyQn5aluX9P7cWVYhubaMVzqS/tsjrwu7NvdUW5PA90XdMzxJfu
2/sTzE+uqejv2JymRgvwcacF2QoI3aFGCEYL6scHBMWHKJopw2end6klhPFNz1cyBpSYLssWMOl8
+ZDpEdiZIZbVfRYvXOicEk8tTXPgXLOx31RospgViJ8+DZSYvFyVnSl003RMFnS+m1ZdiLEYkvS5
OmymR7Xsj/K8P/eli6g/ncknkQXpa/cvbR1LGPtxOlMFjzLEDe27HOdUKjTHtfK3FTXIvYyZRmex
hnt/UtbsS4mRprsKMl+HPUuyFhG3uvAx1GvMjW9n12/yrilzHCnRHThumDki7nRZM6EEONG1tSld
iW2bbss+g31emzPyl/UsicaYLli+iCU6W0W0Sx2LosXrf6Ilk8oLOERxoasJrlqnRZwSR0I0XpBA
2nks8cJkxgfnSWWLnqdL0tSETpvWOlEm/SkPammtrtotb9i9izsdS320Wt4FPQ9y+ojdt6OE+tOj
IeBpa1uXO3hZTaE9yxzPVtedDLkTS7JLs/G5mK7e/gUxtcCVlYEnZ6cmTJkzSxGRap4b4S7iidEL
nZ9iE5oki7pbJc6VvwbVkhYJRBD04MXpCEXrCuy9UoMF9U65bH58FsmLox5jb1+Y/UhOM9RpWLGT
M1mqeSWJVuWpakQeZDO/ET3R+XSlmX0WtX+Z/r9zr3In6M/mXLP7l9Frd+D6WStEokyXfAsPsT4a
U4OpimS7plmSJerb/wBlxW6p+6SMLhJ9HJKJIX3LszTGlPgRojXYy+Hij3dBCQl5UuShcpL5LFy6
LcG5YuyxcmByo+nEjb9XqeXIpkai2i1L6pSE2PgJCk78CW4LsiSFSCa3IQ07iRkT66blhPECdnJY
vS+mxFI6Ub9MrapIe5vbtxNL/po6abcDqdOHG5wZzptrggka6/Btvfitp5MmeHyp0XRkTcgxwEl9
axwZ1RwLKl2RNqRRvdCc8zc9uJEiykvYuNYJmYL2LNkOzLcW5ancdrmLly2r+JCsq2rJiPoXdh3y
RniNL9RP6VsSRWGvIxixFYrLE0y6XkXV07UksXLkKkotYyZsSjtfXFhWsInTNM0mksa2v3ddN62s
XWjrwbW0ZrOND03y8a5JIXwfb317Zd4LaVtWNuvqdDJmdM8K+q1qYpgxxc6/S/oeqiaWMeY5FOnJ
e5ZR5snc5LY1WI4Fqyi/Mtpll8dK21QeR5Evnw/Iaq2QSi5JM2FfkOFBel6SK1hxyFu/1bXA5M2I
VX5Ua6aJdIIkvqjFLUjpRPlteCY9MW1dC/30ddHXhfzrYvoafM5/Ys1VLotE6JXwjb31S7InC0Qr
sgu4LOl6NqsaYVLUxowX4EMszJk9xlnMxS6JWixnhpwWUWkwLZLuh0U1wXZili5bwFixBBKIdJJd
Y6i89cEpWYlEjdoJXBmJFyGidWIl30zSD3Gc6YRcsK1y9yx082dWOazSBSQuFcvpex7fz3PPBtYv
99FvCSmRuz9jL/cz+hZz9NN8c6pkLHwjb3MFzIn1Fv2pLLf0J3OdGYp1LGeJv7LTcxrjPEjwP8qR
0Guo9id4UKjY19izL3pchk0wXpG5FqRw26STSSHx5X2OskrGq5KdXtz3E/pWNDkSTnTYWO5DX4f6
gxSXovdE4RnBHLgyWz4BPcpSH6VG3lw7WL6ep08FGS6MGKRNyOfSkN2LF38J296RBiCHyPQ8bjdF
lMqmTJdnkWR04q37crl1RK1REntMV5krJfD4b0zxmJLmepOfU8aOnYsXZmFpSLMmDBckuW4U86RS
1oIIZHFuShv7oTUKttcKmeDk8qQzy0vlBEEvkWJpNbUtS9i/DtS9ZRDQ03IksEcO1i+mxdHUsdNO
a34VhNr8upn4Zt7mTBZEtkl/CzzM0zSx/PWm+pE87cFDokPhX1Mtnl3PTvblcnpsZMmdOSJLMl3M
HQsYks+BBK02RKZdVvw5WUfln9y1iHpsXeiyLKmdV1quWRKJ3CPLTYuWETg7D5iSrbVctSSFSWLe
9yvykfpvJL5kq3GsX1319K/z+Lbe4/GdO5E/ameF6Xw/V0rYzxWkerddHq22pml6uXZDSGpMyXME
s6ItqsQdSUXJpctZDvqtzHN4JwmdS5YvwcFyN30Zezr0pksqXfFsRuRK0TH1JRCL0tWwjuPyJzA9
zpelyxgv+hal9EKks2721uX+TkNn8CUR+n8q2L/fiWsX++vBbVdHWltOPh+3uPxOZ8D6d1txZmTM
6b8CXweh1OkmcERiuLl62rEDGNxI5RCLM6l1S2iaNUhEbS9e1cUhi5QQl2LMuqw+D1RJY6nkXLLw
Fy11phc6WLuiXUZDudKratFiXS1ZM1hUli9UP1qVBc6qliGf6mn8To+vI6eXL6cW1i6+vA6arli/
xPb3Q/DY8PdXOZPqLMe5OUiONZSXLKtiGpbNyVoXPA9uq00tWyG0dy6OlOpdFnAufAlaO9JgvpcW
lfY3PdfyE/bIoIZbX1LljojrxY4Eomli7pGB8yGWuzsYyTpsXsdO4458CFS+WenertSoLtuOtZP4
fypf71sW+x/qfDWOmq+nrxbfAdvdD48Uuyy8f34vQmkyR0Ip5Fizicnr27XujmkYp2LFy2u5ZafO
q2slPhytE4Ml2OC4nOOQ59t2OP1OpDWqNpLqlniNRwbc9CWCYlnRFpuTk6aLD3MhCtkhP1Xzovph
US5vA9m5fkuhO5t99Nz8iZsW+2i/3pbj9fBfzP5/DNvcfCnl8RtcvW5P6s/mT1serpkmM6H/AIvT
mb9yW/cTpvSxPXg3RNbkSjoWZdaHWFiqbL3PaWWnp2ITwbYUrqNotRyTy0NdT054nlxYyTwYR5li
70wyKQqJc3hHp3KN66E7nL8+B2p0ZenWkEPwHXjXLaorctxIMeH2/wDYfCj4f0L3rYz9FW33L3PS
/wCkh1veOSEzb6Ye9R38yHmkPQzOm1YRcvgtSxenUjBGZLEPj/kL0kQSXLMuS8FtF6W0yuWiHS2q
aqtqW4F7DUemFkSWm5bFYVEldvCPS16f8m09W5y+L1X60/1J/E6liVx4dOnBnTamJ19ToWpMGDBD
rEm//I0m4/EnJ+Nn0Ia8Lt7j+LWX8i7+2nr+xelrl6rd0z2JRC+5eu3ZMLc4kTTe71KSSUTokUaZ
VM0uZJJL1lEMe5fYmBmOJgsdSY0eS1XPLlSCEeaPNl8OmRNE0nywWMVvpzofkKnal2S7H4qmeDCo
ksvA9r/H/JtfIe7c5b58W/3pKOhY6eR0Zf8A28XctemC+mUTElxNu75Fv1LlidyvnVg6Re5/k2xn
aYYnDyREohr1b25bZ0LOS68Dt76LfB78Lode+my+9bXLv6aYI1PYoTSu3+g9m5y1SNH5YLosyevB
sY0wiJMEoxL5itkgz9DyLoxozwElzI0WL0vRuL1vT96NVmkaI0xB3MnWC5ZFlSEeXDhUS57sD2P3
7eaJbbbv4GC9L3XU61t4LrwJeXSGSrrR5MsKTbuXalyy12L4H6bOBptidx7t+f8AJdL+Q23LJiS7
9LLNM6di1y6Zgw+Jt7r4lYu9FzqdK2uX0w/01QdFSdu57esH/tSm+WTSdF8EdD0vEnWkl8luHKL0
TEzFzzOpcg60ueZ5juWf1EpMsyy1YIq6O8j7EciCCehPJHcik08xja5jUk6FScFy1qZM1hZLkVwY
ZgwY0pc2PY/ft5oluX1IeSH4D+JclfblS32pc/ii/iV/x0SlYwy5CMDb5cjbu3LmRt22IWhtOU+T
MkK+m6FYyRTcohTYse4S3/cl4MEbVC4m3uh/D+mnoWL67lv1Ouq500/+qfx/Uj0tJ3votSFSUROC
9yxklszTOmxdUiTyoxrpS9bLVCR5PkYpiRRYdJpJJJ3Gbu1E3yrCpL5UVFPuGW/ppCE6IlrNL34E
kbUfk83IQuUEGKYJgtosiXTqeZHHk/gWvS/3OhcsdD+B0fh5JpJHU9LZdSSLarLbmnp85IVPVy6l
9DcTNqNcoMltOUZTLWLXMEPA2+XF290P4ZYu69OBd/QtXofx1217d6/+wn0lQiKXeCKQQSyduPBx
1pKdepityxKL3Ia0OrZ2IEJLDHBuottPUT0ottFSX7jd2Gup2EztREaMmTNbkv6Iu5fRFnFJO+vc
t270elTSFot4Lz/c86W+1Oq/WkZ/c6/udS1/DSi6E1zVfyZYlqqXUv8AYU/aiSMRVvVIhtXOaPcy
9y6g/FMe1K+OLt7j8f8Az4XWk6bF9Fi7ra5ktrsS+Cp5fqhvarPpjXe5ZVyTJn6GTP1JkyXdJgxq
T1X0W0XdPMYhoiu0/wCJY3Db5EifMhcjuN0T5kUubu1J5sY/OlxdjJyIO5/uWRg5Eu/Yjbbq9EFi
WWq4yjZ6E1vXuZL7PgW8D/EvWHSC/wB1/E6HT9i9n4fb2qpLLRa+522oSmXzLKKbRJZeWRvyhw7R
JD/JOw7iITgUu1ElzLUuiVYyZZiBt83xdvdD+D2L8OyL30deDfhyso3bI92OqZBGlzkgTHTBO1RS
+mD9if0P2LMvSCNeaWLKmTqJE9BsmqQmJEP20dZ6jILcqM+tMm7tRrpSOmqWS1gsp+pG3b9zB7Rp
KNViaPcl6vTk27leOTN27Hq5Lh+fhIZ1pf71x9DqW8LD4DfPFVS9PS2NJUT68iRziLdzB6nyLZ3F
0WaJkh460llxro+Jt7j8Ra9OnBnPbgW1dK9aWL8DpwL6p255ifNZJrfJBal6ZFw4rbkQToyXZmiM
QdBeR0LMVx2iKOk9KR1F2HLtT6jT5kCS5Ek9S/MgSRNMj7DfMvzsNv8ApPV1JfYTrc6ukumNFxxo
lmzfKa3ckbltbW3dlfAPKn+v0JRBYlEU/iv4l/uX+/hLmYMrgXJejzIo1oa/tUVtuYvUixJG378X
b/2H8Bviq37crK4Vi/6VsXf0LcKeO08MTyNql1YlEMtV3gyXLVvovwLchOJI/URbJ0MmeBLVI8oI
H5kCpen1p6udI6kU9VPrTIx7aL/lk7Hp6XExR9RkLFLa7lqSzb6va8xk3L/H7XiaT99dzy1z4K32
rcvSH9C9LH8C328LKZG5wZmqf0+1JG9KS5kODb0kb67nSB7nyG+rnTYjC423uvC/z4dj1Pk9EIfp
p/LVevQ68OW+PCuyXf8AgWPSmZ7m3atvpjn1JS9W3bZslMh2JRDs+BL4krBDJRDwSjNL6bFywrUR
3ElyPULvS/0ou9HtpPNk/cg9PKn1p3GfU8sj/wCJA31EeY/SXZBZ0votVbmvxdpN6SW/1befI/hS
SeXAc8D9vBX+9LH8C2OlbU6F/wDcv9/DJu66DlR0RafuQlByL6k+gp2xCikSiG1T07cc/C7f+yH4
HrwGua199ELJfRYuWpa5dluHfWuDay6kIvpexO249Oxw8yPZuyrHVFiHdUlPRcsY4mKTjT1LF3Sy
J3McL6iPoSJ9KdhFy/0okebJE6R1G+dE19RqmSBkkdRt9j9RUbJJr/Iil6KeY9ra3SpsLY3+O3C0
4IXgL+C8v1VfM6Utplf7HT41t7ofGvbg4LjfN8G7I224Fi7OnD6FtULhNdBrdEySsddc7W0/IS3O
Jd2Jf493rldcFskNVkhEiRbRdasGDFMF1SztW1zApY4VLWLnkJEEU70YpzTbWOulj76O56OUfrRG
37UaERosTVf+y+2P6RpY5VnT+3gf9W8Fb7ab/el/vS2jqvjG3uh8WdXQzTFbXLa3tfOljpqtc/kd
OF01xk/1HC/iPzIZ6d19r5/zHuSnrwLGzbs2xv5sxKWWSjoyCKTqsi7gVnuuOUkOHnBluxPQdsll
BcyfyITLsUl2l0LSz+ehHkM7070Xai60XY7UkTXMnqNPmQJIdM0b/tJJ5ZJ6Mb6DGi1tPQl3NqW5
P1KbDnOvpot4BeD8z+H8qSdHXy6Frr9tPT4vt7670wY03VE+XQS5aLlkbPUvxe9J/U/ybOf+Nz9G
W4PVrRBc6HUvwunB8uDf7auu39j1bOC/8cKN33H/AOuPxUsvSUX++hJZZd8x2W0mZLfcguW12rcx
TqTdF70T5FzcT0omT0HRdabe1Lc6QztT10dP2Lj2kE/Qa6iX9wx2yJci7LIk2y01uU2NzSW71KLk
Uu9cEaJ+/wDP4PesnT+NOtevxbb3HS5jwDXkbm87d8T5cq/qP/Iv6tqT4fcsdS7gsi9+F04XTgWL
XfXWvNkbcPg/xPxbU9Bbf8j9O3qN7XO1cyxDtonmdNFi5CrOm2i7IVyIMCtFL0ZI0JG7sM86LtSe
lJ6k9SD0eVH3py8i4+4mv6j0eQu52H2LKRWsjZucRv8AuN+n1pq0kae/78Sen7H7eAml/Ffx+M7e
4l1YuHEqenA37MNw19Cb9iIkuNcOS9zpwnF44NjqdeDe3l4LBG6+19eXcbV1RNr1LoTHpmySPTuz
5DrZl7nQs9UUsZLUxS1y7gkf2LGD+DO5esV3DrJFckEkyShp5o5U9C5u7kv+kk8lcTPoSlJDUcCO
L5eB8vDzw8fEtvcT2/UtwVKlELDuXufjua/Uutr/AELpr9SzTH38DCpbg2/Uvwb8Xy4sLOqP6X+h
O3FU3ub83chC2q+5j2tQ1ktoyXRctSdd7I6fuWLumKTRt9CdF6W48jfUSf8AUQT9B+VxLyJ2qWJ7
lpnRyL6/20+a4E8Py8d5EHqS+I7e4+Sn6kLgy8I7aU0hPg5IemEefBtcvwpduJfj24DSxWwosSnD
Jy2PZ6fyn3G3Yn6ZeWPbK3RzR6oaXUtoyXR0LPRPPRBLeiwyC/CuW4MkFyT0xypcTQvUo0YklVxJ
dGOLP38H5eDtW320fwq4pHSf1JQvTmLl+Gp5tISShPb+vwLb3XCvchWXSt76LEr66LnUahXI6aZZ
6dvAtc6cS3DtwVHPjSydELVZ3H/ixLnBtW9fiOHKNv8AklP1WjmLTmsdDDOZhk6Mii5GuNV621Rl
E6JRLE0hSreC/fxPl4xziL0RFFyI2KEuRZQyyZ6VtbZO5rb+p+O5bo0/+3ft9Xp9u19ept3P+7+H
wLb31pfUsrnQuy7kxrtS7kso0bd/LeoferVIWXqtxbHV+M8td9EwWI1TF6RuuupO26p6leVFy/M2
/wDrlW/Kepv3KI2fek0wYMGDBgwYo+BajrdanBNI1QqyuQpULqPl4L/WNPkyODHwKFzLu40QLauZ
Ek9YdZfPAlyWWNbZjbdwJbV6t36i2x6VzivMwTZESKGvdR7krL4Bt/7LXf3IgvrsvuX2v6Fnfo7P
gbtvNfkvoJ09R+KJfgr/AAXyqv4Fr7ejyJ7bXutU6JTs+R00pp3WCdz7wS7C6TfsTsn0+Zv9T3T/
AEwRzNvpbb/qkjdu9O3qO/Y2tbvzm6gSnlwHV8PFWpuWpGn0+DikV/c81wvPhSvDy8q5CIZFE0Kc
1xJCxuan6G5zD3bvq6NNXL5rG2Cd0siGQ2jclZbXHqFs2/c3eW5r9fH7e/grona32dxbsTy1Joa2
4d0dWRiORjwN7FvG31W1XpKsyN347upDrC0IlMjcvTuJemU47EN8C44pCrgxR9yWxjFx33HVtEcD
9NPfgTok7E8Cfvwf38TKJm6oty7HmTnaSqOiIokubu6XIbPxc0yXpCzusvgG3uqQu40tuBNmOP8A
8dzS7V6C3O89DEdyW/sLcm5Wew48Hel/gMLVPTQk44drroxNc1wYdn1J5Vmb0gl6n30Lit0ffTKr
bwXcjR+xPgZR5ftWV4aUZIZOxkEc3yMM+tO2lbU7v9qSnAluySNbbl7nqf0+Abe9HlNqEPa8wZIY
m8pva/pouW09NF7kNerbyOhgj4zbh34cdeH6aokuRpYiKIXFfNOm7vpgijm40uDHB89P7kEeAn76
LeGkdqrdHMbUVaXRvS9z5u3ZaIlx8D29z0x9aS8ukIf/ACc0uW03/T4BfgT4yNL8HB+X5r9Rva54
EIhiWtiIVF3FSOCtG7vwJnV20dqTrnT+5B24nlp8tErwsPGljnmQN2x/pCaeeVWpiS7gsvywvN/B
tvdaLlqdPkF8afARomkqzLqNy59dciUQLWxdq7RUngqjpv762WGnp7kaY4Eaf2JRK8DHJ+Jh51vp
t/FfxpZkNfWvqe7t5F5Y1twkp7/BNvfTb4D0+Az4O9Z4M62+r4K4CG3TbSCNUnei0b+/AfMl6p0d
ieDKpNf9Z4U8GPtWaQ/CQ79jGhsvpklkbP8A5fy+C7f+y0QvgUJF0+H14s8aXniTWODHXjv1H46n
RLzqhKk0knXt0buC9P6Ecfvo/chkEeB8/EJpw0T/AJN33Of2OhkSTWb6rF3bpy+Dbf8AsNYatDpb
4Eo+5Yh3TGuDf7fEHonj9KOqqzb30bRVgivfQqX5ii4+1I0yOc8hRp7i1Txu2OHH2pFI0+fM8qf6
v8jbe68dYuzH6l9tz1/4mtsL8kWa3dmJNVl8h7uvwK3BXwGUXJ17CGQnNFSKTXtp2utx9iPCd+DO
uxbiw9XlXBDXyHt7ofi76nDibF6y7ELGq/jLeHnwrbLUnTtWqSKxWK7ayfQnXYvz1dh8eHp7+Emn
kKRoT5Y+Q9vdFsR4peTT+3Ce58jotVvkHtT9NMMnRAq/TwG2jW7mQtEaVApUau5+nA767aIp5qkc
GeHKPU7ciKeVJ3L3v9Bxj49t7rxUie3HC27Ofql/JXbTPTg/QuWwqfekE1jRFNuhaY0dtffws9Ce
vF/bg7Vuy3I2qWE90Nv7bTbsiysiVnoS3HlzJf4Lzz8c2914txz4KQl02r4xOmCF4GG7C1RTdodJ
60g7U71mm2sm3W0KSUztqg76p4fek07kUt4OdHNvsNpX227ExO583W4mi/xnb38TPJFscPbHLYhf
A/PxLb+ng1qTpudIoyKrR2pBBtpe6ZG39Tbrmmfpx44XesU81WeF6lglv8iNUaFccqPVu0J086wl
JD5fFdvfxFskEcPtbx86PP4W/UfiTXvTvTsbu5iSRm5k6Ir3rJt0bOAuDNO+lPhQ9Xk+J2pdEUlt
a9v/ACdyBLnA3yPU+eCxGKSXUjsto93V27fFdvcfhklzIIRLu9cFyyPTz5+Gjxnbw88B0ikVfYfe
rHSOhFJ17aRTZ3FwLNYGhcaHy4/fiQ8OihT2LLHUXpUvoJblE/wHubWbJGGdCzmmPxWWN8lZEu62
3Hu+x/x/1YXp6rb9YvWS2Cyck7nPlyXxbb3H4WxOWQtf5Frmbks/H7+Ajwcvw1vDyky46Ok0gehj
rNYrBHSm2k5LTc299D0XouNBNJ4EeCvyLF2QnS6nyMR2r6nmWelPL/QhWSPQvdvu2ejZbbzZ6nbZ
sX3EojasL45t7+Fl4LUlcGWRtx8Dl/DoauuBIxEw56DY+Lt0be9I0OjrPFnwXfiTqfTk6Q8Eovcb
VrQXy+Qv8XNX3eXl8d29x+Dnd9tFyKQrsncXUpmRwKV8EngTxZ8W4FpikCVWLibaRRd6TWasxSOP
NJ40EUhZFe7uQ9di9jNVtWd37c6dUSRSXYb2r8uT6efx7b3H4JPLrKYq9CMeZlblyZd+It8XXnrm
i4i4l6Se25HTTFXR0/XjxSOFCyJbknuefIlKJ0JsuJ9eDYwn9S6+w9ztySrCcE5fVmUuyJd+/wAf
29/AwdiFXOiNEf1P5M/XgLyISz4SNEcVUsX8DNO3CTLsg9W18tC6xgh2Etr+o08r5P29+Ot0erbz
2k0iTJC1pRklnp2kvwt/Dz42a9/212RcwuDGha50Totw3aUyy8amiYGlzPS8Mim1MmKSsif3+T9v
dcaP+NqXOg+b5dyP6l7pzol6PIjb99dkTBdpfUnK8vhHfgR9+L3rPCc61wV4lFxRw+3AVk3uFCjg
251mlmJpYrkh3pdfJu3vxlv235NdUJrnoX+TbnbldV0FuWHS9ILihEN6V5tL7mCydyIUDUYo9rwy
Fy+Dz8FuSvDRSOJivqTleCSLG3vwY0zFyH+SzYc5LUkghkL5M29/Cbo9rv8AXyLFxqH+xHA9XJO3
0pknbHYf/JzRpZ4TbcNfCO3h1olvgrgPTJNJ4k6LeBUUlshjS5cCdN5+hMk8iUyXyPU7dF0JbIj/
APLkdBv1Y8vkrb3H4Jrl6VHlSS7RjgvZ/Tu/JeT51uWuWUd6+qLaYIyfwGty9W1+7r3Qmn6tu727
l8RivfSlpnwr4EddS121duM4X1Joh25ktcCUXpLPU/6r/SkUW5u3Sl6Wa+qPyba6fJW3v4KVge7q
oGWfCknnu/RaJPUsOnbJ6VpW7dP5qYF6lK5lr7d19rp6Xf8Ax7s+T6ojPR6YQ7YIfg58dPj5pDLl
tK4eK3a2pZbPxfqay+XEfaqZ6laTB/d3JSiNShKx7f4ntcfY9K2tTl+VIpC2/VsT3csJfJ+3v4La
/I/Evw5eNTXS5A9qy9O58trgj7Kn/r3+3dz/ALWPa6f+vd/+D8+hHOsnSSZNu5KJ+UImkk+EXcuN
dLeOupLkxHT5P29/BQscGetWuSd/5aZSmOXMtuX8SxJL0XPSvq1z0Q/6Xaqe6ziie9SuhOzb6Sxc
jkvhccJvw06ZL1uW4zIqv9YPxz5n5vLuRp/XVKZfRMxPRHmjcvP5c29/G2O99EQSSRbcvMl6Wuot
u6843VfpIRb8i7IJ5Kt+RGFxYwOeXwHt41a7sgh8axOhJYVOxtnMPXE6cjbiFz/2Py/HuPdhclSS
V0+XNvfxnYlnqj1bfLKJTlOkpSS80SXAhnp3e5fr5kH/AOu8Wz95G5hckiypFnbQtycr03pCUkNR
w8jl3VI0XIe1Ndv4k7fa/wBPhq4K0QiM1/ctwnofA2xkjppj7l1In6W5+2m6xTEl1u+0/sYhUl2J
+Wtvfxa9Vk+fJEpyJKnq/wAdnz28mJ4eI8z8XfoSyFwYF6I3PbzNvplOL9yzS+hd/andr9xuMvRt
fK6Z/wAepmWyXwYwQiWPatX3f3pG3dtjmntngeqHHgI1z4aCdM6bceVqs+xe5MRWaRyR+wu/B9zR
Lv3v8t7e/jPSm46aJiOZ6n9CFwpedW1dd1UWFte1Pufk4Jf6cGS8P6UaWi9/JFtqIe2PNCjkOGm1
yJ66Z3xBZbn2Etu1bf3MyJxDi8fA54LZCUmIjjLhPWvOvUlrdt8jJ6VfqNtxA927G5x5vqWx8ybe
/wAAuLareBTWdrkkR5kPkena3tZfK68X1L607anuVtyz56lfHIhEsklcH1La2lpw9a4yaFwGSSy2
mdcPJbwLb7LyRHSuS5+KHv3W2bf/APT8j1bn/wBV0XzKu/j/AE7sbl+PekvwVsb/AN9Ei3J+T4Mr
nbQ6OU2nhq4l6fvpb8tMDSpboQ2dENLGud130JTgTaU+VqyQ43d1JPXVNZ4TTRHCsKcFhxSy0XL1
uSeT1zrs4+h7l9hLcoJ3pCeJ0/jYXrcxhKyo/mTb38fDLufBqeWC26Poe5/ZC9X5bXz6UafNqOAk
ssX20YE5mdcddKtZqNCaxuNz+2uXfyJShl3FYVy/OluVuDCL6bUxScacPXDLVuWO1JrcuMWmK2rF
ZrO54xS56dt5eY+9/mrb3+JfknHJ8idrrLwSS+AmuRJDq1youi1LbGP46Zejb5ZFtXLUquOQ30II
WeZcUNpeXBdJbEWpHCXbjYIgl0uXpGPB3XzXt7/E7WpH3pL4NnDV0endbcqJF67n/wAv4FyUJUl4
Q3oSX+o0Nqjb0t9CN1or6k4aPTnrA47KsRqnRkjjRPiprHztt7/FpfD6NYayhrdnbz5M6EqGq2pY
lR9aRpl1TZHIb57rLW5f5bf1IdIVyF+LeO5HNO6q+k+EzSyJe17ZPWtv4j/y4SPU3E4P/XzmD1ZS
yP0rB6dyuev02ISkhqCdePASifnLb3+J7U/6mWHt5u64inql9zzMx/sNYQ2JpzN5p6trh/ox7Xbc
sozS+lp8ydv5LpzEmK6ZcthWWuVbd1IcepF2WULruFui6PU0pi9du76NEqskpd54lj1+lrb1PXMT
ieY3v5OB7d99qUo/x74SXquh/oX2QltH/jmNvQW3ZCW3mz171f8AQ3fhbcvrB6PS3u9Rs3btr2xE
m7bD9pve+z5Gx7Xzv2Ftnb7Td6sbcG3dsjMWFu3Xe79D/wBc/wBUD3bXPpVz1Nx0P/VEs9TuuZK2
2PQ1cl7YRZEE8Tv84be/xOGKZ37f1QmtyE+fPhwyxKSq+jvHnSOpik65yTtcNDT6HoWXng9GsMl3
fUh8xvJK5oyXRK3Ps+Q0/uqtdRt5mNc7dra6wPcoS8zd636fQ4+o9u9+ral6l5mx+3buyuxtaj3L
A09rVoUoU7VZY5j3btz2ve59Ju3b43PbZG3c8qYXIn/JH4tD3+pPbEm58pN2zc1tbco2LbdbMs/9
nqULbg3Pnd7Uz8omVBtaS3PdmULYrbdzRuXpW30qzHu3/lySF6LL/JyI9T9cTHI/9fqe1bM+Qv8A
Lt3eu+Tdt2rdunMXF/i2q6eDb/m3pNLobtm3/HKfQWx7X6k3Iv8AJuUbZ5m7bfFrE7mk22J7X+O5
7ZHtT2+2xu3bv6bQJ7MbuR64fpVpp01x84be/wAVmF4CI5E02tf0v/akvPA/Ddb+14LnmOeGvV1z
5k/cW2ZjHYvY6loQ2nkbS9O5fZ1SdqLbtUybU17nCgT9U4TQ2s7VMj2r0uNv1k2pp7bc0f8A8/xl
vz+w9/q9PrfqW2P3N3+T/Jn2r04hHqf5QvxT5G3c0tr5dyd77H/sW5dY5j9W1prA9+1xPIW5t26W
I3OaImGZpKcNH5t7oGtrsz1S/VmT07n+g0kt218mepvGEe2N8R5De6+3fmD0f47puWz8Y9Sdza3G
I3PozcluW571yF6Gk0/yFiG/pJue6Le09TS3NuHPkbduyNv/ALMoaSjdtU+rsbnv9u0T2Nrbv5Ce
7fG5qyYv8O9XmN3cTS9HZD9Ke/bmUXRazL/Om3v8ircnDRer4UHvn6cS5EuOhZw0f0r9S+77KCPV
uMy1kljXSiH6U29vQlbZ9OTbC9Hoc/kendZf3bRbXve7/G7tPy5C3Nen0v7jf+SPSev1JqB+ra5X
6m6dqaeEetP0+SPycl2ZJLvTBMJl1HYh7ZJWD8W5HO6CzklbtuCIx1F59CfS2Q+pO1tdhy2yCU47
U9O/c3A/S7PK5HrbfqV5HsaW1tQ9yG1+W3dlMVvSlhC9a/NI9e6ye9biU5Tv9DoP0pbdzumY9SfP
aQy1mX+ctvf5H9Wx/kvs/Jk+189vjvJ0W6nq3bvSuQtu2Le7ublt3bdrTlnobxzQtqXq2deYns/F
bSHjysJNu1b0wcqXZYxw8HNEptF9xC3fQlpNnpe1QR6ZJajyIgcySm/sW3Dnckfi0zKIJlENiey8
cmR/k/8A1v8AQT2uV5GDc0o3O8ilepPoQ/sy1n847e/yRPjp/iTt+qO/7ilZZsT6G6HuU7nzJpyL
v7GC0GaWRimdV6Y42X9zmZ/Yyvqj+kwv/lBj/wD1J/Wvsc/sZLOZ6nJ/oRz6Fidu55mORH+Tatk2
ldSdjW6SIG1bd1QreqehD+xb8WX+btvf5ofR4I9Sdzbt223enLJb9Ralq54sD8QqIvX8dzXkJf5d
sf8AJfuN/wCPd6hJodlL5rIryngh3Qkvr827e/zRzNpPRJfbiTrQ3FnYa1Y8EqKltE7W0Jf5VKxK
yN/490np3ZymT/iT/wAm3m1n5u29/mmSeouLdaZN2yMXHbL0XpItEUvw1RaLaJ2uOwl/kXqWG+ZO
x45H+RY/KV2d/m3b3+akLWzIqWFS4/KsD/yvoQ0lob6DRDq3JJOpRrXDub9+xrdujDHv3Zfzbt7/
AP0Jf//aAAgBAQEGPwDmeWt2uXMLF25biZRm5EJGIdrg2L7nlv1Z/wARfc8t+rP+IvueW/Vn/EX3
PLfqz/iL7nlv1Z/xF9zy36s/4i+55b9Wf8Rfc8t+rP8AiL7nlv1Z/wARfc8t+rP+IvueW/Vn/EX3
PLfqz/iL7nlv1Z/xF9zy36s/4i+55b9Wf8Rfc8t+rP8AiL7nl/1Z/wARfc8v+rP+IvueX/Vn/EX3
PL/qz/iL7nl/1Z/xF9zy/wCrP+IvueX/AFZ/xF9zy/6s/wCIvueXf9Gf8Rfc8v8Aqz/iL7nl/wBW
f8Rfc8v+rP8AiL7nl/1Z/wARfdcv+rP+IvueX/Vn/EX3PL/qz/iL7nl/1Z/xF9zy/wCrP+IvueX/
AFZ/xF9zy/6s/wCIvueX/Vn/ABF9zy/6s/4i+65f9Wf8Rfc8v+rP+IvueX/Vn/EX3PL/AKs/4ipa
5f8AVn/EX3XL/qz/AIi+65f9Wf8AEX3PL/qz/iL7rl/1Z/xFW1y/6s/4i+65f9Wf8Rfdcv8Aqz/i
L7rl/wBWf8Rfdcv+rP8AiL7rl/1Z/wARfdcv+rP+IvuuX/Vn/EX3PL/qz/iL7rl/1Z/xF9zy/wCr
P+IvuuX/AFZ/xF91y/6s/wCIvuuX/Vn/ABF91y/6s/4i+65f9Wf8Rfdcv+rP+IvuuX/Vn/EX3XL/
AKs/4i+6sfqz/iL7rl/1Z/xF91Y/Vn/EX3XL/qz/AIi+6sfqz/iL7rl/1Z/xF91Y/Vn/ABF91y/6
s/4i+6sfqz/iL7rl/wBWf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8Rf
dWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8
RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9W
f8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/nX3Vj
9Wf+dfdWP1Z/5191Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/5191Y/Vn/EX3
Vj9Wf8RfdWP1Z/xF91Y/Vn/nX3Vj9Wf+dfdWP1Z/xF91Y/Vn/nX3Vj9Wf+dfdWP1Z/5191Y/Vn/n
X3Vj9Wf+dN5Vj9Wf+dfdWP1Z/wCdfdWP1Z/5191Y/Vn/AJ191Y/Vn/nX3Vj9Wf8AEX3dj9Wf+dfd
WP1Z/wCdfdWP1Z/5191Y/Vn/AJ191Y/Vn/nX3Vj9Wf8AnX3Vj9Wf+dfdWP1Z/wCdfdWP1Z/5191Y
/Vn/AJ191Y/Vn/nX3Vj9Wf8AnX3Vn9Wf+dfd2P1Z/wCdfd2P1Z/5191Y/Vn/AJ191Y/Vn/nX3Vj9
Wf8AnX3dj9Wf+dfdWP1Z/wCdfdWP1Z/5191Y/Vn/AJ191Y/Vn/nX3Vn9Wf8AnX3Vn9Wf+dfdWP1Z
/wCdfdWf1Z/5191Y/Vn/AJ191Y/Vn/nVbVn9Wf8AnX3Vn9Wf+dfdWf1Z/wCdfdWf1Z/5193Y/Vn/
AJ193Z/Vn/nX3dn9Wf8AnX3Vn9Wf+dfdWf1Z/wCdfd2f1Z/5191Z/Vn/AJ193Y/Vn/nX3dn9Wf8A
nX3dn9Wf+dfd2f1Z/wCdfdWf1Z/5191Z/Vn/AJ193Z/Vn/nX3dn9Wf8AnX3dn9Wf+dfdWf1Z/wCd
fdWf1Z/5193Z/Vn/AJ191Z/Vn/nX3Vn9Wf8AnX3dn9Wf+dfd2P1Z/wCdfdWf1Z/5191Z/Vn/AJ19
1Z/Vn/nX3dn9Wf8AnX3Vn9Wf+dfdWf1Z/wCdfd2f1Z/5193Z/Vn/AJ191Z/Vn/nX3dn9Wf8AnX3d
n9Wf+dfd2f1Z/wCdfd2f1Z/5191Z/Vn/AJ193Z/Vn/nX3Vn9Wf8AnX3dn9Wf+dfd2f1Z/wCdfdWf
1Z/5193Z/Vn/AJ193Z/Vn/nTi1Y74z/zphZ5bvhc/ir7jlf1Ln8Vfccr+pc/ir7jlf1Ln8VVscr+
pc/ir7jlv1Ln8VB7ViuyM/4i+7s/qz/zr7qz+rP/ADr7qz+rL/Ovu7P6sv8AOvu7P6s/86+7s/qz
/wA6+6s/qz/zr7uz+rP/ADr7ux+rP/Ovu7P6s/8AOvurP6s/86+7s/qz/wA6+7s/qz/zr7uz+rP/
ADr7uz+rP/OvurP6s/8AOvu7P6s/86+7s/qz/wA6+7s/qz/zr7uz+rP/ADr7qz+rP/Ovu7P6s/8A
OvurP6s/86+7s/qz/wA6+6s/qz/zr7uz+rP/ADr7uz+rP/Ovu7P6s/8AOvu7P6s/86+7s/qz/wA6
+6s/qy/zr7uz+rP/ADr7uz+rP/Ovu7P6s/8AOvurP6s/86+6sfqz/wA6+6sfqz/zr7uz+rP/ADr7
uz+rP/OvurP6s/8AOvu7P6s/86+6s/qz/wA6+7s/qz/zr7uz+rP/ADr7qz+rP/Ovu7P6s/8AOvu7
H6s/86+6s/qz/wA6+6s/qz/zr7ux+rP/ADr7qz+rP/OvurP6s/8AOvu7P6s/86+7s/qz/wA6+6s/
qz/zr7qz+rP/ADr7ux+rP/OvurP6s/8AOvu7P6s/86+7s/qz/wA6+6sfqz/zr7qz+rP/ADr7qz+r
P/OvurP6s/8AOvurP6s/86+6sfqz/wA6+6sfqz/zr7qz+rP/ADr7uz+rP/OvurH6s/8AOvu7H6s/
86+6s/qz/wA6+6s/qz/zqlqz+rP/ADr7qx+rP/Ovu7H6s/8AOqWrH6s/86+6sfqz/wA6+6sfqz/z
r7qz+rP/ADr7qx+rP/OvurH6s/8AOvurH6s/86+6sfqz/wA6+6sfqz/zr7qx+rP/ADr7qx+rP/Ov
urH6s/8AOvurH6s/86+6sfqz/wA6+6sfqz/zr7qx+rP/ADr7qx+rP/OvurH6s/8AOvurH6s/86+6
sfqz/wA6+6sfqz/zr7qx+rP/ADr7qx+rP/OvurH6s/8AOvurH6s/86+6sfqz/wA6+6sfqz/zr7qx
+rP/ADr7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/86+6sfqz/wA6+6sfqz/iL7qx+rP+
IvurH6s/4i+6sfqz/iL7qx+rP/OvurH6s/8AOvurH6s/86+6sfqz/wA6+6sfqz/iL7qx+rP+Ivur
H6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+Iv
urH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+
IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7qx+r
P+IvurH6s/4i+6sfqz/iL7qx+rP+IvurH6s/4i+6sfqz/iL7rl/1Z/xF91Y/Vn/EX3Vj9Wf8RfdW
P1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3Vj9Wf8RfdWP1Z/xF91Y/Vn/EX3XL/qz/iL
7qx+rP8AiL7qx+rP+IvurH6s/wCIvuuX/Vn/ABF91y/6s/4i+65f9Wf8Rfdcv+rP+IvuuX/Vn/EX
3XL/AKs/4i+6sfqz/iL7rl/1Z/xF91y/6s/4i+65f9Wf8Rfc8v8Aqz/iL7rl/wBWf8Rfc8v+rP8A
iL7nl/1Z/wARfdcv+rP+IvuuX/Vn/EX3XL/qz/iL7nl/1Z/xF91y/wCrP+IvuuX/AFZ/xF9zy/6s
/wCIvueX/Vn/ABF9zy/6s/4i+55f9Wf8Rfdcv+rP+IvuuX/Vn/EX3PL/AKs/4i+55f8AVn/EX3XL
/qz/AIi+55f9Wf8AEVLPL/qz/iL7nl/1Z/xF9zy/6s/4i+55f9Wf8Rfc8v8Aqz/iL7nl/wBWf8Rf
c8v+rP8AiL7nl/1Z/wARfc8v+rP+IvueX/Vn/EX3PL/qz/iL7nl/1Z/xF9zy/wCrP+IvueX/AFZ/
xF9zy/6s/wCIvueX/Vn/ABF9zy/6s/4i+55f9Wf8Rfc8v+rP+IvueX/Vn/EX3PL/AKs/4i+55f8A
Vn/EX3PL/qz/AIi+55f9Wf8AEX3PL/qz/iL7nl/1Z/xF9zy/6s/4i+55f9Wf8Rfc8v8Aqz/iL7nl
/wBWf8Rfc8v+rP8AiL7nl/1Z/wARfc8v+rP+IvueX/Vn/EX3PL/qz/iL7nl/1Z/xF9zy/wCrP+Iv
ueW/Vn/EX3PL/qz/AIi+55f9Wf8AEX3PL/qz/iL7nlv1Z/xF9zy36s/4i+55b9Wf8Rfc8t+rP+Iv
ueX/AFZ/xF9zy36s/wCIvueW/Vn/ABF9zy36s/4i+55f9Wf8Rfc8t+rP+IvueW/Vn/EX3PLfqz/i
L7nlv1Z/xF9zy36s/wCIvueW/Vn/ABF9zy36s/4i+55b9Wf8Rfc8t+rP+IvueW/Vn/EX3PLfq3P4
i+55b9Wf8Rfc8t+rP+IvueW/Vn/EX3PLfqz/AIi+55b9Wf8AEX3PLfq3P4i+55b9Wf8AEX3PLfqz
/iL7nlv1Z/xF9zy36tz+IvueW/VufxF9zy36tz+IvueW/VufxF9zy36lz+IvueW/VufxF9zy36tz
+IvueW/VufxF9zy36tz+IvueW/VufxF9zy36tz+IvueW/VufxF9zy36tz+IvueW/VufxF9zy36tz
+KvueW/VufxF9zy36tz+IvueW/VufxV9zy36tz+KvueW/VufxV9zy36tz+KvueW/VufxV9zy36tz
+KvueW/VufxV9zy36tz+KvueW/VufxV9zy36tz+KvueW/VufxF9zy36tz+KvueW/VufxV9zy36tz
+KvueW/UufxV9zy36tz+KvueW/UufxV9zy36tz+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz
+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz
+KvueW/UufxV9zy36tz+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz+KvueW/UufxV9zy36lz
+KvuOW/UufxV9xy36lz+KvueW/UufxV9xy36lz+KuYjdtcuBZ5a9fjpjMcVqBnEF7houf/mb37cv
kGPV3r61j6QehZbuow9C/Vf0FV2dNVu/sA3Ub0w+V7urj6OvTTqYdLdcRdA9FPlr/J6fM7dO/wBN
TrU6g9JX5NX5Fs9LT0tOq56Kp8+lurv/ALYc7/I81/tlc9/M3v25fKG6jHop1G6KqnQ4w9M/RX0t
PmxgPRV+QUVcP7Euj1QXQ9PTo29XD0LfMDelf0rdNVuW1N8nbrP6J/mLd1a+gp0V+TYdXf1HVes/
9qed/kea/wBsrn/5m9+3L5A/TTrV6X6jDP0G9V6MO/pfqb+u/RvTelb5PT5Lu6tev9XS3Ru9I/p9
3UdU6N6p81k7UR1gDkgerX5Jv6j/ADFTof5Ns+Q069fR16G9Du+Z2+SP6LHqP0bv7a87/I81/tlc
/wDzN79uXyCnUb0LKvyKvQ/Q3W3dD9V+jd1aejfpbrv81um6G6KfIWPoe35M/U3daibrU9LXBUr8
77/QU9DX0D/L6/J6fIKdenoH+TU9BXooqrd6Gif+2nO/yPNf7ZXP/wAze/bl8ion6Keip0U6aZdD
9avQKfIa+gr1MPRV6GW/5M/yRumvQ3zjX5RTqmnTQLBligQKjNUZuxDUan5uw9Ht9K/Ur619fo96
r6V+pTqV/sC/pXVerh1ezofoZN0sE3Qy3f2z53+R5r/bK5/+Zvfty9PXofpdbOg9bb1d3UZN0t1q
9NOvT0tflLv6Cg+c3+XN8sKZOQ/aqBgt/Ur83N8toq9D/Lq+np8516a/JNnoX6d/RRV/thzv8jzX
+2Vz/wDM3v25fItvpQG6jdO1YdV+o/pW+SP8ib5TTr19Fu6N/oX2ekb5a/yU9dvmyno2y9Bt+UY+
g39FPTt6Tf0N1G9Dt62PVp8sf01PkTKvUY9Neq/9ped/kea/2yue/mb37cvTP1KdNcFTpfpdbk63
9Hb6Nx8+t8gr07vk3b8or16en3LH5BTqMiyYp+nhCqon5q3dfHorh1G+TOOkfI6fK3T/ADrVVTrb
0v0Y9D+gb0G3q16lf7W87/I81/tlc/8AzN79uXyPDpp0P1WT9UD0lE/oH6z9WnyGvyCnTvTfMG/5
FT0m/wCWv8jKKYJlVMOhygCn9PX5qr8hwW7ob+wT/Jn9Oyp6GvQ3oX67dFes6p8/U+a+d/kea/2y
ue/mb37cvRUw6KqnTTpbobHqV6dy2+gf0FMeq/oW9O3zBv8ATt8wv016mxP8uf5BTrFOn6D00QQf
5U/pdnyLs9DX0Denb0W7rUw+Tv6KnoK/LsOvh1aekbqt0v6Gvz4w+bed/kea/wBsrnv5m9+3L5Jv
TqnVr8mx9Ls9DTrv8kb0m7qU6p6K/M49O63/ADPROUwwTqnQEEP7Ft1W9I3RXop1Kf2Oqm6HT9d+
q639DKvVp1Mej7epTrv88MOl/mvnf5Hmv9srnv5m9+3L0LdSqbqb+pu6rLdtQVOnd8jfqP6anRXo
f5Xu+Z6JvTP8p3ehb5C/oXTBOtI6oBNMk/yavpGA+Qt83P00+R1+W1+Qt8hf5dT+xDfMvO/yPNf7
ZXPfzN79uXyF0yO9On67J+pT0lMemnVZbfQN6BvT0+ZK/MG/02/5xIxRZOeowqUHoFT5PqZ2RhIa
WTio+fOz0vZ/ZCvWx6Kdeqf0Rr0VTpvR79nWp84V6j/NnO/yPNf7ZXPfzF79uXoW6K9TCvU29Wno
n6H6d3yBupT5DXpb0T/NT5/2UZE9NVXopVUDIasR8nIWqNDmQgNTjegTj8rb5kr6F/kVVh8rr16f
JKdR/lm/ooOhupv6a/Pz9Y7Pmfnf5Hmv9srnv5m9+3L0rdWqw+ad/oKegb5bh16+ir6LDqYfMTdT
f81UT5LTGp6HPTSiqXQ+SiOkMTitQxZALFM+aHzi2fpG67+ip8mp6KvzW561PSV+QU/sHT5o53+R
5r/bK5/+Yvfty9A/Vf0b59FfkNPR4fKa+h2/Lqp+mvyPf8ir6Z5lhtJCaN0Ep4l/mkk4LTFOyboq
uFUdPI0QBNU+XyR9iYnBU6AQUA7/ADPTrfV6GvoKenp1W6a9celp8jr8gx6rdL+jotvWp1KU67f2
CfqYfN/O/wAjzX+2Vz38xe/bl169FOvXor1q+h3denoG69fmOvyhvmB/kr9TVcIARjy40j3jinuT
MjvTgoQke9OPlFPkBCMjVUoOs2SBOBQ+SSCIK1dIiE4+bceoehvRN8jdU+ZKfM2PVfpZVVPSMqqn
U3+hPzi6PS56zdUfMvO/yPNf7ZXPfzF79uXoX6Hfp3fOFPlFepu+U1+TP8kp6SiechqyCMpGmQ6j
hCJNUCMOl/kNPkZKICfDqMMVxKg6At3yM9iPb1KBD5lb07/LafNT4dd/mDZ6Ruu/UPQevt+bnTlU
w6HPWp1WHzNzv8jzX+2Vz38ze/bl16daiboZV61Oo3VcdL9D9VvkdPkLqvpqfLadfd1W9JXob5BV
OaAIwsB5e8U5m/0Ko1d6aDWwdgcoynIykcz1hkEGq3S3zIWRJWnqUVFULZ0P8jPYj1HbH5j39ZvT
Uony6jdR+vX0+7+zDegfp3devRvXYq9DfMeKxVD1K9DdLpvRv8zc7/I81/tlc9/MXv25db7einou
zpw6lOrT09PQV9M3Q2XRXH5XXob5Pj8l2+kb0BlIsAjbtnTHcn9E4QiTgnGHWb5gIRl19qqGQIw+
SELV1Afm2vze/WZV6lfldPkr9V+tv6aegbq1Tej39FOgqnU2/MOKxWPRT5653+R5r/bK57+Yvfty
67eiqqrd0v1KdSmfUHWr6OvyDD5i3enp6QekYfKPKhjtT+kBCD7PT06mHyQo9FIk9yqCOii2riCp
RMSn+RktXpA3rVuRGlwN7LhiAPmDd6OvzTX5qbqbPTP1261PTU9A3oGTdFOo/W7fljo9feh1H9Gy
c/M/O/yXNf7ZXPfzF79uXoK9d1XqP1KdVvQgdSir6BvQV+UP8wV9A/zQXNSjJ8fTCByQPTXq0T/L
dIx3oy5ieuR9gUTWLUYRyLOV4g3YE12IK1WcdgTEd6qqxTshKETRATgGTih+RHy9iIkK59Akztkv
Ks2yAfdQleIhmRiVpj837vkjfLH+ZNvyVvQt6bZ0Ux9K/RuKf0NOtX5DU9FE2Cf5G/Uc4/NHO/yX
Nf7ZXPfzF79uXVp1ft6adRumvTu64VOrTp3fJa+ib0O/5O/oK/IaKnoaekr6UyOS0vQJj1m6HKwV
B1AYlkNRcBAGiBicfmMgd5Rt2S8s5IykXJ6jRqNi4zXYFTBaYhytV86p5QCYRAGxM9UwLoH5CUSd
vQDeFN5ogLRiBuZYpn+Yu35moqph6Kqp6B/Qt6Svy6vyl/R4+gb0WCfor8kxVSqYqlFU/IKpop+m
vV3/ADTzv8lzX+2Vz38xe/bl1K+i2dFOhlXoYddugddvkNPRbutQdNPQU67fIq/OO/rmIoiMZFPL
pqVTpZOceiirTqiqpL1riqNyANFwn5c5oEasBiUYWy0cztTnqVwTRDdqeJ70IC2bkj3J7Vny3zxK
JuFzmmCeSBCHpsOk1aiPS6r84N0v8kZbup9vTs6r9Gz0FPmB/kFEyr8x0VU3pq9R+pXqU9FisVSq
osWVT8gbqN1H+b+d/kua/wBsrnv5i9+3Lq4dO5bk6r1q5rf0OK9NfkNFXq0+f6/It3UfJH0DfJmO
CeOK4Yv3riAHeiZyAG0oiM9bZgMOrRVVE4VcFgqdTFBOJEMqlwmmFjVUIPyjVIsFpYCIz2oyJYbE
wpHZ1Xy6WGCE40IQhcLS3rXAu+S0s3SxQPom6xCPRwRJ7ENUCO5VDKnz4/Vr1W9E/wA6v8zMq/Ia
eg29FfS4rFMB0Y/JaejZOfmvnP5Lmv8AbK57+Yvfty6lFj1NnXG/0TdDfKMPk9eq/wAnZV69fRP6
GuPyXD01U0iW3J/KJO8lfdfSmjaA+kpzFijonp2Kt8Adi+GXbavCaZtRaRF2xTssOio62PRQ0VJE
KsnZcYZUK1SLBcEnb5EZNgjAmgy6Meq56HKp0vEsQhbu+ta7eKY06WJTj0xKNadDofIX+Zn+Sv13
6tPmOnyl/mp/RN0VW7p3dXFbVRYqp+ZH+bOd/kua/wBsrnj/APuL37cuvhiqdDqvR9XpH+Wt1KKv
pG6jdbD01PltPkGzpr8u2hcUQRsTRiANwVU4FUWqEXDFP6NwWOSETKmDBPEppJgVQv6djUo8L3D1
nPRvTnquEIXS42rVEhMegFMfTEDpA2pz/YmvUr1adavoqdDfMz/JKfJG9DTDor0N0OOhuo3VJ6lS
qVK3Kp+VMab1QhYhYssX9DVMPmznf5Lmv9srnv5i9+3LpomWOCNOjBP8j3dG/wCR19Hv+Qv8vb5b
T5pqsMVTDpbrP0fZ1KFiqF0NWKcZ9fd1SAaowtl5nNapFyepToZULn0OmR4VqialEHDodBvSEoh6
dMQh81N8qPylvRv8tp81V6tejCno6dbsW9YdXFMKKp+Tt1qdGKx6uHUwVPm3nf5Lmv8AbK57+Yvf
ty6aLf0OVvVUdnS3Wb5xp8of0b/K6/Knz+U1CJARo/pmCcyYpsWVaKh9CRFt5Rt2z2kJzieq2fQw
9GK8OxUZ1tCclgmlIoaZY7fRkBEnHpqKID5wr12+Rv1d3W7PQ19G3Wp8w4fMWHo367puhgtnRXqV
VfmWooqD5y5z+S5r/bK57+YvftyX19L4FbVTqbuh8+mvRv8AQ7vSM9OvTq16Nnom+esPl7devyhq
ImPQ3Rh39ZumiaRTqqD+FB5B070VCD2dRyi5aIzRt2jw5lOepuTBOUwoPSgxNEBLxbFqFBvTNqK1
QBbYgJU9CSiH6mr5vYfJm67+g39Lelf+yuHS3TT0VOnej0VT+ir/AGO5z+S5r/bK57+YvftyVepR
V6a9DfI6+n3dXet/Vb0Ff7NU+XVRbpqEes3S+a29qwZYrEt2pxIrTKqqWTguiZFgEYQpAfT1XOCY
JyqYemYISxJzVaJoF9wTHFNkUGy9Aaonb0sECzf2Nb07ZdFfRP8AI3Vfkj9WnodvyU+hfo39OPVp
12B6a/Mj/OnOfyXM/wC2Vz38xd/bl1fr6lMFv6a9Wq3ei2egw9G/Wp6Cnydz851+YG+Q1VB0N0YJ
/X0OqdZhinJfpdYsiIksFoemfVc9DlV9O4TSLALRbpHMry7gGs+0V5lvHcmNCM0IyTx6zlGI6gG0
+gp/YVurXH0D/MVfkj/Oteph1Syr1K/J6KvoGZOaJh86c5/Jc1/tlc7/ADN39uXQ/RToYdO5N0UW
/wBDX5op1GW30NFv6KdRvm5vRt8mbqv8kqnAVQ46rjBV6HTJ+pVbSjRgn2o7Oo8ujenPp2z6jihC
Fu7WOC1Qx3LjNEAS4VMeqexEnqDcgPR4fM20dSnzRX5C3Ru+V0+YKfIOz0GPoWKcdNfk9OrXpwWC
BkqD5253+S5n/bK57+Yu/ty6jhb+jf1N46H9O3RvTdGzpp8pHyenRX5lb5FT0bdd+l/k46aKoTxD
KoojTt6HCwWCwVceoyrgtMQnZAZhVz6GTkuejf6dzQJgt6c9URkeFUxVJOEGLjNAE16SdiMAceq7
IDrP89v8nf0NafJsVT0VetT5rfp39f7eo3Tu6adUum+X0CeRZVDqgHzzzn8lzP8Atlc8P/3F39uS
bquRXqP1KdG30L9R+hx6WiwY/Jaf2Efqt6feq/MuC7FgnKcBymw3Jz0VVMemgRDMFpl4ZZrVEcWK
0NVVx+Q6pYJgmGKc9ehTHHYnyQMT2hCMjVUqiiX6oJH9gq+kfrN1a+k3denpaf2DwVUw6u9N0b1g
u3ofoZVT9FflNAnlRVWCp8+c5/Jcz/tlc9/MXf25KvoG6ezrb+mvyeib5v2Lb16/I6+h3+m2/Mtf
S1WC2ovgmC1HBABOFxBMOhujSE4Xl3A5yRnGIBOxMQqemYLVL1KmCaKr6FwtJ8S1B9KDFiEIyKoc
keoI7Sh6ag+Q9ir8x7/SU9K/zlUfJ6fJ6IbfQP1KdFUSqn0T9NFVU9Ew6MKLadv9gOc/kuZ/2yue
/mLv7clj2dFERs9BRb/kVes3yPD5GPRMm9Dv9Fu9NXrt8xun+YSRitLUQdYLemWPTsTmp3Lhg2/o
Z2VPTME5x6GHpAUxHE1V5lscKcUIRgcD1RsCjH5fT5TTrN6VuvX5A3Xp0t6N/kFfkj/NNOq63FUW
PpqejYLYE5qVT+wPOfyXM/7ZXPfzF39uSbq7kG67/MT/ADY3zjT5JT0Gzof5QxXDQriLqgDdiYhO
FVME/RVVr0a7tIpoxDjNGURwfV6RgE5qVsW70wiSwKY1dHTg/Q3UE5DH+1TfJ39BX55frt0VRr0N
1MVj1HPpaBcVFhVU/sHzn8lzP+2Vztf/ACLv7cullTBVp0Oq+n2+jboPp29DVN8ib5a/ytvmyMWA
0vXt+RkLUnyTMqivV1zwGSYUin1vPYHTSNSKhPGsD6Ld0OSt3pg+CEo4haZGoVDmmR6RHah8xU/s
NT5np1G9Pv8AS09PXrFP0t00zW5Vp1MepT0tIriTAdpVP7C85/Jcz/tlc7/MXf25eif0TKvy5+u/
/A2odYLBUomKw6HOATZdDoFaZBEgcB9A5wTRC3pz8g8uXiGHYtUaRl9a1PXNFi7o9I3IDd8j3+nb
+wVeq/yh/wCwdOrvRKIGXoaD0jAJyOL+xPOfyXM/7ZXO/wAxd/bl6Dt9Ef7GN8pp8zP82snbpLYJ
5Yp+hwgfoWkjJHOO3rPLBMFTFOfkInHEIEZh+wowJrgiH6mr/gHX5K3yynydk/oeGic0PoWCoQy3
9Vv7C85/Jcz/ALZXO/zF39uXynf1B0dvXp81un6r/PzfNVfSVRMahO1UZ3KIseELd0vknBWmVSjT
hyPUeWKYKic/I/Kl4ZYPtQuxyxVcUdnSD/wAf5e/yzd6Pf1WTo9FASnMT6uo+3paVNhTdVgqlyuE
Kibop/YjnP5Lmf8AbK53+Yu/ty6K/KB8tf5O/wDYWvz0ybUxTipPV3ZpwUYyFVhw5FMFxeL5O4xC
aVZCkgjE4HAquCeOCZDs/tzX5u3/ACph0P1KkIvIKjkrgiU5CeRZPIuuCGoqltu5MCyrXooKbUBq
B3ehA29NP7F85/Jcz/tlc7/MXf25dVutT5K/Vb5up/ZynyXf6GmKc1QICY0VOg5dDZIEFaJ4lExr
8pEvZNJBaoY4xKMZZZLScEJDvQtu0t6cYfOTf2Er8mp80V9I5LLF+xcECVSDKp0p5XHTzJZY9ypF
z2KkUwiywWx07pjMkbFXpeJ7kSzSz6rSDxOK1wrA9I6lf7Fc5/Jcz/tlc7/MXf25egfqN6Nv+Brf
NWrYgCcOhwmZVVSq1WkCi1DFHXRaY4D5T5Ez+ivNiOE+JPiQmwCEoVmE0gw/tdT09PnOvptypjtW
q4dX1Kkar4dv6E4iQFxSYdqecnUhHEBargfcnFsIsGVcVXrshOOJFetticQvMtYZjZ0atnTv/sXz
n8lzP+2Vzv8AMXf25dO5U+QN8+N6avzM/wDYogJokkDFBzXoqE4R1DpaWCrhuR8uHFtRjIUyPygS
jQioTSxIaQRtnu7E4FE5QGSBev8AbcylgEQLYIGbr7r6UwGk7FoajOT8gf5ur0uUapo9yBukgbE0
YPvKoAB0kIxOeKlA4ZIlE7/QjtUezruMMwtUcCguxH+xnOfyXM/7ZXPfzF39uSL+hG3q7/kD9bd/
wpOxVbVtWq2hGZrsTxLjoqKqmCbBFaSaJwjSqIIpkflD5GhQnDEVCbE7FWi7EIv8/N8wP6bt9E5R
twx6RIFAvxZ+gp880WkLXMcXVdbk6jdGeK1dDegCHYj129SA6W6r/wBhuc/kuZ/2yud/mLv7cuh/
kLfKt/8AwWf0ZROXRqg7oQnQp4noYreiwVRgmkexPEreiPlHlSNclrj4ZLUC6JOdE4y/sO/zeVI7
+pqPy+nyyvWkRjkvMkH2dR+o4xCkMxUdyMDj6IKPYj1nCCH9juc/kuZ/2yudP/7i7+3L5FXqN8xd
npqeir/b5kSBVeVcpWicVTxoUIXD3oEHqE4FOHMd60lEjvT/ACgTiWIKc5iu4o2jlh2IAIFv7Pv8
hr1pInqaTh8zV+Vyo5TD0JiiMpV9EEOxHrMUDsQi1fQN/YbnP5Lmf9srnf5i7+3JP1m/sBT07fNr
fLW+byCFrjiK9yFuZqFqjgjSqEZPpQY16dUj2BOaRTQyRAd1uK3pj8nOcTiECA0hmgTgCgAPS0+c
N/znu6wgD2/I2+U1+T06CjEllokccPQ/UhPZ6PsCPoMK9NepTop0uqf2B5z+S5n/AGyud/mLv7ck
3TX+3jfIaegp87naqZFCMscwnAxRLUVC8UATVaiexOcMgtEcStUg0Nq0CA3lEwjVaTgnGPydzWRW
uVIhC3bjQYlAZ/21MjRGWWXVc5/L6/L6otinEiAnlUoSIZAn0AkjDNqIg5eiZH0rehf+wHOfyXM/
7ZXO/wAxd/bl8x9nyGnzTX+2bHNGcBVYNIYoRkVSqJAWqNAgJl9gVKyK8y5SOKEIBgOhijICqNue
ITjA/JdslruUiFogKJ9LA5/2Gf02/qbvl3ljHqgKPTu6H61PkFetT5a5yRhAOdqAIrtTlRlmEG9A
QtJ7k+RWPoSjISc7PQU9BRUx/sLzn8lzP+2Vzv8AMXf25dDhD5GPSP8AOD/2/YhGQCIgSCvjTeO9
M5MkdDVTxQ8402IC3g3UYihRnAcQwRBxTH5GwqV5l6kcgmFIDAdAY0+am/sRXpMtiOwdUbAgBl8z
v8mJJwRNthHtREsVwh2UoTi1wZqUNuCMJetUqOuw8S1HEVQmMR6KQCkMwehutwpz/Y3nP5Lmf9sr
nP5i7+3L5xfqV/4OkZoyLB05lRaeYDntXmcvQbExj3pwGI2IQuGm1Ag9QxIrkjOIaJx7U/yLTGpX
mXayyinOGQ6AogACmX9tTF+Ionb1dZHz4TI4ZLVaA09tU8gwOZKMdWmJTzeR2pxUlarZAOYKjqxz
WrKVVQuNioa7E/S8iyIjxFSJGKY5qUDVwpR2H0JCBGfoGPpWTf2A5z+S5n/bK53+Yu/ty/4pGSLS
aMStM/CtUcdqECHyWo0dOFgmPhCDGvTTEIk54qUJDDDsTenfJaIBOWlcWqRr0xcOHwQEYiLD53p8
vp6CnyXVckIhGFip2rVORJ6oAzQ2kfPWqRD7FrtkMcBmtV2TRKxMkww6upbwqFV9aEZFlxSAWmzj
tTOZErVem25aIRZs04T7UJjCXoSCgejd1wB6AEdOxb0/9gOc/kuZ/wBsrnf9e7+3L0T/ACanz7v/
ALGt8kb5IQFLXSq0WgNO0IRuFihIdy0Tw2pxmjRFw29PbLjYmuwL7qrgtSJ30VIQgNpLleXdu6tW
zB15kfFH6R6fVKgQjHBaLfFcOJ2JzUnqR1vpzZRFsMAP7AN814rxD1rxD1rxD1rxj1qswiIvOW5f
DiIjbiV8SZO7rx3ID5rb0tZB9i1QmHOAC13545Kgc71TrlSgVOHhuDBGMwxCotJqd6iJVByC4QwF
UYxLRCfFOFvTjGPoSTgh6AfIq/PnOfyXM/7ZXO/zF39uXRT5vc9Zvk79d/7fHSdMiq3foXmTnqIq
E8cEzVWmR4dqBia5oghGVosmE2K4C42uye7chEb5IG9zLyGUA60Q1SbOQZ1hwnBP6TVPuCpSIxK8
qyK+1JOeoyiZyMQgI1G/5c3zzT0ZIDyQhdiADmFrGBXFMBYk9gVASqW/WVwxEUxuFtgVZk96xPRj
0aQfRaiqdZvTV+U06XwVZV2BZgbU8C+7oqWRk4fILVG5xe6y135Ek5Lhj6P61GYoJJpiuUs1UPA4
STjFDXiMCtFvEipVcU3RuRjtClEhq+gfMoDZ1KdQLd16f2G5z+S5n/bK53/Xu/tyW7quOs//ABJJ
lgjZZgDROMUXCFeBAg1TEOE8OE7lWcvWqh+1UACcKRA3jtRicRRN6FgnlWWQWufDAYleVYpDM7eq
wX4jmBw5BCNs+XCJ9aABf+x9fke904WjWQMFUv8AJXz+bnKaMQE0oPvCYUHSCCxWonjGSJMpa8gM
FrvgyOQK4IgKvonJZcUwO9A25ajmyEhjGqETICQpWiMS0onJPCTxllmOqydA+tC9Hv64BwzQjHwx
69egdqH9jec/kuZ/2yud/wBe7+3Lqt8wt/wvIZ1rngSgQjtTZIe6hUP1jE9y8wChxTj0DBUrM/Qv
NvFoCtc1ot8NsdVgHJQv3x2RKMfDEBarcyIg1dACv9ogD0U+ShDs6aH0dfl5kU70yCqnT9QZoSjE
RkMW9E5LLTaGqW3JUkw3LimT3qpdAoxyIRG9cMiO9PMmR39fSc8Fo6zBaY45n0LJwarVKTjZ0dvU
c9Rjh/YPnP5Lmf8AbK53+Yu/ty9C/o39LT5E/wAyV/t/riHLoB69JotUfDmEHNerRGlDQqUJ4hN1
mC0wrLMrzr9BlFNhAYDqsFE3QNzoEYKQJbenFzgBwZAf2Lp0t8jJ2IjZ8ip1YxAeqEc/mx15ce/p
3LHq6XofQmVwtsRjDhh1gELgz9CJDJCYwPVYLTHHM+kOz0Lp+u3RT565z+S5n/bK53/Xu/ty6j/O
NPSU+b2+Vt6Gof8AsEXRgTmn6DtRotUMMwhGRqqdQxOEl5gFY49icdXYBiV5dsd+1eZerPKKeWGQ
6oEQ5WudZnJayH2MvLnQjFFiA4xUpXLg0g0CA+aK/NNfS16SjLf1qegZl8OB718SQgPWviEzPqXB
bHfVF7YrsRnZGqOxMQxWmIcnYvMujiOA+a6dBJUiKdNOtFA7uu58WQRMjTIddkWqYh/RG1c8JRGW
XTuzK0w7z6XUcD0nor1Am6H6H6+7545z+S5n/bK5z/Xu/ty+Sv1N/wDbZvS1+eiCgxYHFAu/SxRI
C1woy0TNdhTjqEZjFGPsnBbul5FohC3aHCnPFdP0LVIueqIxGK1TrJUwTDFRmMDinBYgZozuXKPQ
f2iMW/8ARRPpGhElA3TpCpHUdpTRDdVk87YkU9u2I93yKnywuWRLU29DJhXcqgh1XqRKid3WNXns
CMpHs9ACmxBUo5PT0LhG3KlyOBRicQnJYLTGg9KEPQU6D16dZvnbnP5Lmf8AbK5z/Xu/ty6Kekp8
+1/sZT+wAuRDsgZBqdSqNFrhQhNfOlPak79OqWGaJhiKxKIOITFeZOkR9K0QDRWi3xXTjLYnJc9V
gKLBztTA4JgEZFcYLBGMCIsEZXLmok0A/tEZI9YAVJwWoWiQmNqXqTC1L1LijoG9A3jrOxAQiIjd
8sp8qclgntAEDMrxCAyCAuz1tkjwsdyaLadqc1ktTVHVBQPVNuB4z9CJJcn0QiSo3Rnj6ISjio3M
JHEfL9/SPnvnP5Lmf9srnf8AXu/ty62H/BxvnUgB0bRdwc+qxRLI0YBPGscwhIhiUSA5RhOgyXly
LkYI3Ihoyx7V5tyWo5RTDhgPUAvKsVl7Uk5qT1QWaGZTQDnMoW4UGbINAyO1Ugw3pqBPKTlOJaSt
U56pH+ylMVv9Kezrg7ENJw+SV61fmAwtsZDFGEpaRmQq3DKOxMBTqkFEZdXTmOncjbtF5nE7EZSL
k+jGwoxeow9CwTyrLIJz8gb5O/ztzn8lzP8Atlc7/r3f25dDdavy9v7WV6jFN8/kLYJFCW3qkCqP
DQ7Fqaq06XATRAgNyeRJO1eYcY1Rt6QAc1qkeHMryrNI5nb1gZYIABgF5fLx1E7F5vMVllFMMOpS
ekoEzMpH+zr9URyPoNMqwQnbLj0Ryb5RXrV9OIvxSwWnVQ7F5msucQmiGVepUgd68YXifsWqHVMe
hzgjasmuZTmp9I6YlP12CaNZ7U5+Quc+k+mboZN8885/Jcz/ALZXO/6939uXRXpf5TT5LT+0z/P4
uaRQoYU2dUyuyER9KMLUae8VQ0TCLlao2kY+WQexaZO++iE+YmJSyiE1vhgMgnbqMEJSFFqkGiE+
o6BkE1uIG/rDjIO5RETqLVJ/tFp2ehbWNRy+UN8tM5YBCUJ44RCErpJn2pohuqXKOksqphisVp6r
yLBfeAdq8uwaHEpzj6PFMKp/oTzD7kL9mLD2gOtsGZWiHeU5x+QgAIRFOmvRj1z6Bx8885/Jcz/t
lc7/AK939uXp6/Ja+hb5VXpb5NX5sf0lP7AHUHKNqIAbp1XJCIG1GPLjUfeKMrkiT0OSGOATgPuT
eUCEwsxiTmiQNL7E5qmzCYhOKLhb1p5zAG6qGM5IEx0hESOltiNu2MHc9cCUiK5KIgKDP5BX+yJk
VIjKnWeNd3SCHQfH5O/p6ekcyDBShqMYjBs18bijkCuCAB62kVKMiabOhz0Vw6j9DhOa+jBkXJ9k
J4AADImqqw71UglVWnbgpW5DLq6pUitMKR+RMFqPiPTX0z9Feo/Uf5z5z+S5n/bK53/Xu/ty+aK/
Mlf7GMqfP5Eg4Rk2J7ljqn7oRFsC2N2Ke5MyO/qB1piwKDl+jDoLKiwcKkSChAhUjXb0S8wOAFO1
bgAAcevAFyXUQAwb+zrykB2lN5sfWmjcBO5VIARtWpPLNOcT1nBZPnmgEJyxxXlkmJ2nBPAgjaPm
E+m45AIRsgyj7UgCtekiG/NOIAHb12BqUSSmTlMnUfpT+hbqsE0jqlsCcyxyC4aDox6RIYhC9Gri
vU1Tw2JsIjL5E0QtUqy69ev2ek3/ADrzn8lzP+2Vzn+vd/bl0P6OnRT/AIcN8hJumuUUZRjp2LHr
uKyO1apF+o8gwTunimIWo4nplqGoNgrkLcNMH69sRDkFR7Pm2nf8hb5ZqmREbSjx+ZLZFEWgID6V
quTJ71injIg7kRKRkDtRJxPoQJgE5IWox0xThMPFn82vItuRjYiZTyotNy0xesyhEAb/AEHYpbB1
HzTFacvS7BtK0RrLbgExk52D0JsXfBPDtVKxODdGqVTkFX5EwFNqFK9Q9Nend1G9K+fzrzn8lzP+
2Vzn+vd/bl0t1H6H+QV/4Bt89kO88gjKZfYPQuXJBoAiIACO8riuh9wTA6ghEDtQEYg7VgHVBRVj
VAkN0yo9MFcjHgi+HWJUdDBjVAbv7LucEROeqXuxqiOXiIDInFfEuSO5+logk7kZxtkgJpBjv9G4
xXlX+6SfGJwKFaFCQwPzK/VeRYDEoaYEg4FC6G0nIlPIvI4keikRsUu1F0/p9UyIR34p7Z4R7Rot
Ilq+pUp6PR5gExiCviReJwkKj5GKcOZWmI7T1K9WvXfpb0uPRT5v5z+S5n/bK5z/AF7v7Z+Rv1t3
9on+ZK/O2qVAjbsGuckZSk5PoALUDJ88kJczJvzQjG1ACmOakDt6cOph1DmpazpGW9DqkpydLFBv
mRvnF7k4xbaUY2h5ktuSI1aInKKclz1HIdabdiL+8cUZR4QchgjOZcn0ui5xQKE7ZeP1Jiaj5bT0
ht2mMhiSUYSloAzbFDzJmenAZJurTrSAxZGJ29BTEejYLjl3BCYIjHeaqjy3lM9NnpRO2WkM15F0
tMhiDgUSBRUDLaq+kr0CRDRQiBh1O3027q7/AJ75z+S5n/bK5z/Xu/ty+Xb/AE1f+FBncLAIwtnT
BGvXEYByUJ8zU+6tNuIiBkOghMSQDtQbD0cJXTQ4Mg2zqybYpmRwPzbX5n1TLAI2+WplqRlMykSq
xPVcLVI0yTyq2SYUHp2xicQhKGBxCf0b+lr1n6X6ruDI4BAxuE6sQMl5hcnOq0xDAdV/QMtcRwyq
60zx29TFYrFFsOmgXGWTCJkdpTmWr80LhDD5ELfMcUcpZhVAlA4SCMRTYVX0TAOnlWezZ0Cc6BNE
MPQt1t3oa9O/545z+S5n/bK5z/Xu/ty627qU+Q1+V0/4OlyDPIImUqZD0AERTMoCIeecuoVA1AfF
Bi/o7VyR1VwUZMzjqyrkrkjUv/ZN7hrkAjGDi26cjUd6yCrU7FwxY7VXoYBygblBsTDDof5AJROC
EZGuacFx8x8UnIyCIlCWj2QAnvQBJyKa3ERG7q09GYTDhGdjiGxaZYjb0YqhVSgCKbUKYrU4HamE
TIhEMIxTxi88yUSc/kreKBxiV5lktPOK0TFFwlx6BoimZWiHFczlsVakoTudwTDDqN6Df1D6Sqp0
v86c5/Jcz/tlc5/r3f2z6Cv/ABJclgM0bdgvLOSMpFyevgoxmdIKAtDLHrRIBZ8UKufR2piOqYKh
KQYkYdWZOxXCBR8f7JiINBimTOqJ1RNILioFwiu3qMnGHyB4lkITLJx8wMTqlsC8vlrZERiTioyv
CMQMhUlMA3oqei860OIYpj0MA6YhnTScla30wyzKMo6pHILRK2Ytmyr8o1RLELRcAE8jtWDhcNDs
62qXDAYleTy47ZLaStVwVyCp126levTBb1X0rdTs+cOc/kuZ/wBsrnP9e7+3LqU/4mPOTbkY2+GK
bqYIUomAJO6qcx0ROcl8T4ktpwUbguGEQagBR8suAM+sTEEoAni9HCVuDkHxKBul5N1ZlnopTwi5
p1af2NlM9ylcnicFpjhmsXTlcCedTuVAqdO7pbFbCqn07ihQjIu21PEh/lpncLAIeSIgT8OZXm3L
zSl4mC0xrvOKr8l0zkN4xRnYYRO2iAB1diBtwxWq9E60+gd4TaWTxNNiaYEgciFqscMvdyWm5Ejf
l6WgdPG2W3pyz7ExFR6HRd4o5HMLzLReJwZaZUkM1XpBu4bF5VmkdyYVJQlMPLL0XZ1x6Df0U6GC
r009Bv8Am7nP5Lmf9srnP9e7+2flD9Feh/nivo3/ALXmZyRtwgAAcUZTLkohP01Xw7dPeNEDfm/5
oTWoCO/PpMjIxIrReVGRlIYk9YsHUoy8QOHo3hIRiMXURGWs5nqz07ES7ku/9khZBzTDYmCC05LT
Eeiqn+Qgg02IOeL5Keq05iJ3leVbm0M5hGfMSlK3k9HTwi5yJLpvkjEh15dtpS2k0COotk4TWDql
LHah5hNs55rTGTjaVt6+m5ESGwqkNJ3FHRMvsIWDjaEwiV4W7U85t3LjkZfQvCT3rwkd6eE5R+lV
uE9yrEzO9cMAG3dPEAJbVhTaq+gpWJxicEZ26SzjmCtJBI2LAhV6BGIdPIPI9SnyinWp1t/Ub5s5
z+S5n/bK5z/Xu/tn0z/Mz/2Ef+yjE65+6ERagIb8SiLlwkbESM0z0XDRVqgAHOwISufDjvxT6dc/
eKYUGzqmM8FKDkuaApxgerIBTjOhJoh6KRnJmQjGLAZ7erKrBAmOkbdv9kSdgUq0jTpATDrN01T/
ACRwaIAn5PqmWClC1CRuHwhabtsiTuZy2IREI0xLJsuvu9HTo0h5SGICJtx070ZQjqlm5QE7Ytj3
nqhGZ1gZFNCIj2D01Yh0zekIkHBRlbCII9AJQLSCcjTdH0o6wRLcqZYphggYjiOJ6j9FPT7Ou/RX
0lfm/nP5Lmf9srnP9e7+2flD9SvzA/8AwOM7haIRt2DogMxiU5Lkp03SIQiZHYELnMnTHKIxQFqA
B25+gY4KF0xzqoyjg3VIQlIUJohL0U3jqpgi9IjCPVwcqOuTuP7G7upIblc7VpKfP0dcFu+SiUSh
E47E4+Rb1uUoWACYYkoXDKMBE0C8y4ddzan9HT0DTmAdiEbQqfaKIldEtzspBpRfAjBfFvSI91Pb
iAdvzASKSRAq2zrARDlCd3HYnEQ/YmkK7UTEONoTGPegB8udbvnrnP5Lmf8AbK5z/Xu/ty9CfQN/
w4e7LiyiMUYx4beQTtTqNahq3oG8RbhnmUI2ogyzkcfRmcoCRFXXlxiAI0p1oXcWOCiW9FMQxbNX
Yyk83NBh1RF2BIdQES9M/wCyJG1S31QkhIZ9Vurv2Ld8nEolmQhI8XyFyWG0oiEtdzICqiITm8vE
MAEJEGUs3K0xDAZfIuKQHaVptjzJHuC8uXATgIoidqU3zxXmE6In2TUp9AMsXKYUHy6qIqSNgTgk
y2AKUpXDBsIrTK9KMMtqLz1RO0VT2+LamMSsCmjAoG8dI2LgFdvyB+pTop6GnVboZV6j9O/pw+bu
c/kuZ/2yuc/17v7cvkLf8NybQ4mxRM9Upk4IXebBhbxEcyvJswEcqJziqIACmZQhEVzPpZPESopW
IwMQTkG63mPhVlFxRvRSBpRXrcBqD+Lq24u8nDBQcNT+wzekFwDo8uXcn6tSAsVwhMKKpTDFNIMf
k8SNqid3oadYgPOQxAQNuPwwfDgEbt4RMzhEVAVAB2fIXJbtQjH4lw4RCMBEWzhtKHCTMmsn+xRn
rDD3lrvgTnuoE0aDZ0P8lr1+OWGxOJvsDKUoXPLGQTSu6Y7dqMJEEHMpwBq2p9Ifa3UqAe0Lwj1B
U+Y29LX5x5z+S5n/AGyuc/17v7Z+VN1n+Wv8or/bNphwtULURLa1UZOBSjqUpnVWiZkI5DEoQtgA
5n08ZQgQCWcUCiZF5N1SZoxfhHojR6YK6KW4k+Hb1bUSNIfxKABcNj86OQUwTyiQNpHyx+gFeIJw
mKxR0jvVerRV+TiZxQ9I6MpSBllEVQjZNJUMRiO1a70yScYg0WiAaIy9HXrcZcnCIqURatHVtKM5
GcpjEA0QFsSF3MsfrQ84ky2hPCJfaS6r6JvkTyIC8Wo7qoi0RFtqMQWG0KQuaZavaKfQCc08YAH5
jp8p3dRvmvnP5Lmf9srnP9e7+2f+H1fRarkhGIzKI5eYnNEXZcOwLwud6YRFVGZDE1+QarTFt7IC
5MGeYBfqycPRTgDpiDh6I5UXHIzJwZA9S2NWuvhUKNTD0NPQarhYIxsRw9pR4yZEhAHFq/LzKVdI
oN60yi74ImQeR8L5IxkHBCJtycjEfKyBVgiDQhUQbFNKoVGO4qgAVXKcCir1KYoXLkgJHCOaZMfk
oCD+j+JKuwVK8vlYEwGLYleZzEdI9x3TW4iI3D5BquHSEQBIyGAIZEi6YSfhgKBASJEhmKqWrSYy
zlin0vPM5Kgbqt8mckBeJzsC0Ri0vzloYxhtiFqm0on3lqJqcQME4th9qYYdFPlrdenW3dWir1q+
lc/NPOfyXM/7ZXOf6939s/2Qf+3BFTsAR+HIDsTGB9SA8oxGciGCFy5LWR7OSEYhgPkEog6aYqdq
5deT0HVIZ6I+Zwg4INs9Fb8u2Ikmsyg+zqQFYManaoAYNn8glM5BMZcU8dwXl28MZFR1mgKEgXBw
+XaRQCpKnfgXMQKFE3GjIZI3ADIgOAETNozBYxRkSwAqiciSflRicwykcpFwnnF9ifTw7EKgHYnG
CfBMmFVqZuprI1SyC1TLomzM+bHGJWiYYj5KAh6HVckwRjywbL85ebenKBkKjErRaDbT6cklgM02
okjYFLy7nlAeENihcvXjMjYHUTZeczi4ZCV0aZDKJTW4tvNeq3yWpbtXFMJ4xcHAlDygDE46UJCX
DnrQnOTSHupzESl7xqmFPQP01+Zm6H6T0V6jen7OtX5h5z+S5n/bK5z/AF7v7Z6m/wCYaf8ACSoB
VIh+z5GYnAqN0uATXtUZxwI6tu5PB1EjZ6KzOU+EnwjFRIoG6kYkxIBoo1enp9IjrmzstMrflxGW
1AyFc08QxREYimLrgJpjEoG6PhmhAQMQZDM7EJRkK7/ld2Uy3C47ldswGk3KA5Mhyd6ZIiDdJGYD
UTNRWrlqXlm8dIApxIRuXJTkRV963fK/OgxEcUxy6eGRbYtM6SVE6INRsRlDDZ1ROBYhOGjfj9KN
u6GIW7I/InQQ6+q4WCNvlIkz2n7FG5fe2M3LrgiNXvZ+nac67qoW+XlpfGRCe7d028nz7kBOPnXB
SIZgpauWDSwOAC4zqJ9nJNEADd1afI6ovMURjaGsjFEWoVG51ITEhM4E4JrtxzuCETxgYak0QANw
+dq9L9Q9Z/QbvmvnP5Lmf9srnP8AXu/tn0NfQ1/4VP8AMQnG3qMS5KDx0iNOrG6DxAqJli3orVwD
iEhxZKBdy2PSexVBJBoEGDU9NrmWARuRpHAPsTRrKSeZI7FwnVt2oSIocU4oBihpK04BcEnZGJmQ
yrN9xQjLhn6DimH3VWbbUCKg4dXZ1cR6+oLQLazXsRiBgKIX7lCOGRzYoTtzEnDsKlcvO5qt2bEu
Fx4pEg/Ytc5ANtXDhl8rMZB4nEKVyEjDNgpRYmIOPQwTkcS8JIyomlEpowKYRPYmmGK1zpHeqYdI
lAsQmk0b4wO1G1dDLaMj8gYIBDrGUiwGJKNuwxl7xwQu35m3EYf3BGQ47hxmfTfEkxOWaezLVI7s
Fpt3QYyFZNQI/ibkTbdzSq1ctYjKIoJFC7KflgYA4LXdl5k+yi3fJ2nKq1iWo5BRlEAiWAFVGUrZ
0e7gtc5G2D7IqtQJdNEAdLemZU+VP6Rumnf0dvyTHor1K/MfOfyXM/7ZXOf6939s9VvR1+Sj5ub5
yf5tb5snGHiIU+XvSAjE4E9UnNAGTnoaAfaVqIcbuvGfsghwrZZqdJq1MVMxmAHrtPpjcmWAWonh
9mKOrABUFdqxQJFQq4GtE0cHZBsQFqiGMfEEBCLjNCmlAZbVsbNMLhC4bj96rcL7MUCWkN644+pO
MdiLy0xyAWnIZ7U4wUX6jEhTJkdILRbYjETOlC4JEiRrEpowrvWkH1JwXJ3oaxTYqUlmOiRyHCO5
VU7EqAvXeoB/MuXHlKZ+gK5ZmMYkxlmCBQoX/wCpSMpSpC2MBEbe1E2RplEUHysgHVLYjCERF83R
hNjE7l4SVSC8IWQ7F8SQfZRcEog9zoxtQBJxkUbhGuZ9S1HDZ1RKJYjArRcaN6IoUbV0UTioOB9N
ToAQPUeRERtK025Cdw4bETOTW9rsPUqjzJj2pelMpnTEZotIkjANintS0l2EAELty6DIioIwRjIA
vUkshZsEDbLIIidzVZGeCzk200TD5LqkWG1AF2ObIQt8MT7RClGM+H3iFKM2kJHxErTKIkcyVwRA
PoG9Pv8AlrJumnpN3Wbq16KdSvRT5n5z+S5n/bK5z/Xu/tn0L/J3/wCHxGDqNy5daEjVRlEuGx6k
huVwXCTOMmbJG5ayxCBGJNUYnAqQGR6xLOyhxai3TLsVwmAkx8WQ6WJD7Oq8iwRAOqQyCaERFOZ6
Qvvn3LxCR2FfElwjIIgVbNGORTmQByC0yjhmgi5yYKERmXIVCjoLaqFap0WsGjrhDrVOTDYmEDLt
WkRY7kBMd6eEnbJPcg28J4SW1OQoxlQZoCJDAU6HnIBSNstCIxzKN25ImU0yc+FAAMI4MmDE7VxF
liXWkSdRnE1iapzNttETbLxeh6JTjU4gKPL3yfMjSLVPqU/KhIcJeRDUVm5bIPCARsIxUrl2QAbh
GZKdyHyXiZcMgfk5bErVelU5LAknAMtVqzwnMla7mmI2YrVbuB9iNvUQBmvvD61xTJ714igJ+tPE
utoWqOHVEoliF5V2l4YSRtXg8citUaxPphFaAhH6ehyiIg3JjEDAIHyvhbDQJ5wErmNagJhQbPSa
ZEkjFg7KMbBlGBxmyIu3TGIPC4xQnckJyiGFKIzLR3sj5ctVzIBMRPWTXIAICcBOWZKaIYbvQV9G
xkAdjoRcSmcAEYeUTLJlIXYyifZAFE1y6Qcgh5s9YjgEHgC2FEwDDd/YGvWb0u7rt8zc5/Jcz/tl
c5/r3f2z0t8tPS+XzIZ3D3L4cRGOSAu+tAxkHOXzJX+wbfJd/wApF4wJkC7hR0xYRDdSQwor1iEX
jiZZoxzIZG3MNIFPkoXreEsWVD01gJb04DDYrgkWDIBmAw39M6PTBXiSRXw9E5DEAsjKRJltdBp6
hsK0XOC5syTkgDaiLfFLacFp1GT+pNq4syuEd64ivEPWqn1LXA6oZ7UZQcS2KWuhiM1KZuVxY7VR
OcUxNAjc2UCMkSQaJhgoxLmea8XCclqkV2YEoSBYjFkdBVPUiSAY7FJ+F0wmfsTzjqjtTih2Joli
mE6p7s6bEdIJGxOQwTmScAo5PsXFgao17FTo27+gvVjgN/RF91FLnIW4+dcLCWwDYpWrkRKMgxBV
66LshZhIiNsGhKNu7F/dOYU+XuV8uRi/YiQK7EZGVQWIGSc1lGh39TyyCWxIWqBcfISUBENVnUb9
9zLEBCMQwCIiHJQnqYnJeVzQELuUlqjx28pDqPEsUIzx2rBwVqgO5UCqG6RKBYjNeRfpdGEkYXA8
DhsIWoENs9E56ZzIoApdqG5aTLVP3QjG0TCL4NRu1a7kjOWYwCYUGQ9GZ3Cw3IiEJGfshkCLk4k4
uGARM7xmJeIMhCMQ0cKJzQBcIMwMSMELtq09kYRJxRu3YxEzhEVZU+Q6pFgM0xkW2tRC3aoPeKnC
LH85abkGL1mSoguJD2gVwCu01Py1vk9fT0+SN006GT9WnzJzn8lzP+2Vzn+vd/bPydvm8ykWARc8
Iw6KlPbkyFu6apwadR/+B8xGIlIijqdm60YxOBp1CDgoiPw4TpLYVGVvCNSNqjdgQZbM1XBQ5MF7
VziY5HrXMMDipQuE6gS+xaBJpHB+i4SWpiFdlqBD9/QYnAhlO3OmwqlGTyx2rSCSN5TkovQDPMre
VQsAiHJjiCE8p6QgBxHaVLMGjKUoAHavMIYtVNEBa5kNsCYUGSb1rSFmmIBG9Np0SyITXIucAVwg
scAmFZBPOTnYFwwVIgLBYkLiLoW5RDZkomA1QNG7VUNmELsKvlsTmWknJfeL7wLiuPuXBHVvKMZB
gfoWqJ1AetC2Axw3riLbkJR7x1Cex+iMhiylyvMlpai22qJsxlqIoSGCnC/IRmZk1RuGYlL2YipJ
U7x8UyZHvXFkjMDxYqgcGhCLkxYOSRRarUtQ6CffYhao1B8QyWqJ7R6Ii2wiM0YRdhmvLmxkzuOm
VyTHSMEYgARQeNAcULF08QwJTzuRHeiLZcbUZSPYqIWr4128F5/JyrnBGMwQRkel0GmRMZKhcjIp
zFpHYmkHVQxVCqFCUSxGYXkc0OPKSwOk4S9C56WRuToSFIjAleXYEjSpihPmjrl7q0wAiBkPRMSA
d5Rjbibhj4iMAoaLc4WjmM0967IbAnHFLDUcekDGcqRBRsPEA4yGQWmd7VDHSAhCNAMPTUXEQO1E
ymDsAqgRa1Rl4a1XHCdu2RQRC+LelGOUSgLlzVGOAZDVAFsE0QABgAqp+tu6tPTv8yb0/wAkZV61
VT5s5z+S5n/bK5v/AF7v7Z9JX55echEb15dmTvi3QyxHQ4oQhC9JjvWqBcf8EjHB0JXLvDI4BRlE
uCKdSzclWAkKZ1UTgGVy5brB2IRJfWB4WUeYkWMZM2wKMtoVVpoT2rw+oprcK71KVwkvhEYKQ9kn
vTg1CEbxIlGj7VK1CTzIzCuaiAScTiU4w6DOVJxwKxVSSsEAO9A4bkQWAyOabUShGMX7U7JpvE5k
LzLZ8yG7H1IAOAcVEDA4p3qmdwnlgtLuqEBaU2BQk9AmIdOzBmoiIy4inxKp04KnQ8Tgtci82aIU
jdOvLSdqcEPs7V8OTDcmEyY7VVtQzOaA1BxVlpfjOxagXKjcA4s1qMmJKpXapSiXr6umcTmKdyZW
/UvxsrcTenQSIwARjOIIIV6c38u3LwA0kTtXlytR04Ciu2InwSYHcpRkaj7Ez0GRTwRjmcVc1BoF
h6kJQLg5rzI+KFe5FqvinB4TiFwxJKpb9ZVbY9aacTHfiuCQO7plInibhG9NbuEk4grTKh3ICMtM
faOamJnSZMBqW5StwOq7uyKJ5iB0GtcGWm1FgVKEgK4Ji4jlIJubuTJyYstXLXiT7pLowuRI2HoY
LVGXctF1oXsiiW1W8pDpcYrVGkhimlVcVCsVwnowTihX4bmg70EitY47RwK12++PVdOm6ROdYguU
LNuOmAohIhghGNB6FyWG0qU9YOnIFabFkS1eFi570/MRMS7mWrJagCTm5TAMBgOkiVyLjJ1OMZmA
HgERihPmjKU8nKa3Ft5qfS1TSm52CqItRMpb6IeSHJ9kCoXmXSJOPDM4J7x1k5CgQMYVGDl2W709
fldfmF+n6uvT09UOo/zJzn8lzP8Atlc5/r3f2z1N3Vr89m4ckZSLxGATCgVVsCpJYv0ADFC1POnU
JOSMLLCIzQjdHetUDTZ840Vflz/PkL4tmRBclQnp0hsOpC7EtOJBCgZ3QJSjjFXLV/ijIvCe0IX7
kRO5cqSat2Kd61AW79sahKNHZRfAR+pFpVmfUEbkgZkmjpxAx7ExBG9MTRTGITxpuREqbFKTPE7E
ZOyGm4exUIG9lxSNwoPQ7EwyRJrsCDmmYWmNAn27VUrNULKtUJC4Ac4lEx7aJjUHLoeZYb1otYZl
PmiSeIdGNEA7FYuqFOfQ6jU5KuIqnIxo6YSZPqTl5MmhA75FbypayWkwDoAYjFQ0U2jNbTkEREaX
x6aKqgVL+n8w4uxJlb3xRuAGZAoAuYjzMhC5K4aGiN8kyjGrRq6u3pYXZEjcMlIwDSAoQhIuz1dN
HAJiaijrbvCaEqIxMQCQzp4liqF1RcUiAhEjUd6JBJ3KgLjMIRuvIbTimsVkfaOSe4TInajpFAgI
DVI7VxxYblxZZlGzCbiVHxX4qV0ykayigBgOgqdu5ATgCe0IziXgPZOKE7RMWTTiNcfaQi7IH1la
YoTJYoW+YaUN6N7k5CQNdKMZBiMj0UWoYps0/RQOejBOF+G5oaoSoCV51jisnZktUaHMdWnRuTLT
FAAYoBqt6BrkmOwVKjHly+rGZFAmMoiIPZqRlfhGUj7IwXwoCPU0y4pnCIxRs8rakJZlRletiAjj
VyUNMAG3ejclhvRJuRpvXBAyBwkaBGFqGk5Si8k1wESJ8RNG7EJXLh17YpxHVL3pVVAB2en3/KX+
eK/OPOfyXM/7ZXOf6939s9D9bf8AKX+U0WPofLBTdcS2KFwmg2IGHSWxKJOfQNEyBsQhe9aEoF3+
fG/sdOESxbFTs3Jg6CwCc0CYyc7lSXrRMQDvUBEHhoXVqJI1k0UbV2QhONK0dXAJCVy4DGERXFSj
dmIs4rvQDViTXaEwj605XGzLgAZGZDnYEzMmwRIl6kSSsu1bU7MEDmFIEMRkmiO/orU7erRYUTlV
Cog6JI1AHBO2kFNFNIN0DSS+aaZ07CqFwn9FpwfFMgIB5dEjLxtw9rLTMaq1ooSlEAyqyoGBwRID
A5okhzvTG2AdoTjoqgTkUQMCAR3qHajzOkeaQI6s2CYqd8ikWmYjAybNadI04M1FK3aDW7gE22Pi
mWqAGrYmn3ujO2ak1CBxBXFFcUUCzOmZwjNxSrIkT0iOC0zlqMcd602ww2KsXGFFsKpxWzhuRcdh
TCjpn7FrkSyHlEAbEDMAyFCtsTRlOLMYM7Vx6CNq8yb+VLEp7UNQzKaxifZWm48SnQtxTnFERqU5
LrVblTMZLTMeXfah3riDxykOl40KNaptiYYKqoVisV+H5ituVHOS86zW1LZknHf0Dq0QAq6BI4j1
i5AbeiIxlKILahtREROHuACjIXLl4mchxOELYiCBmQmGHUMpzFMnqibEhGrCOaHMX7xjclkMURBz
I4yOJ9ETKcQBjVNxNtZSjy5EWwBFSom5EGHuuzoyvSMH9iNVgZ/pVTRAiNw9DX5obr1+U1X1dDdW
nynd8x85/Jcz/tlc5/r3f2z6OnWf5A/zGQMB6FkNJ4d6EpF5GvR5Yw6oEiTFAxIf/gSYnAqUjExh
LAb1ptBo5D8qGqY7AjF3IzVyzdk0WojyfKR82JNBuUOYvxibdurRk5UbdxxGNZbaIW5WYmIyIdNY
D27o1RGxB+9YLSE70RjMLgDPgvi8JQjDHcjqoE8RTamnPTnRBzqWmIpsVaISwlGhKcVBVOpXr1Kc
tVAWmI3J8CMlUriwVPWgZVZA4BVT+idGRFSmiHmVFxUGiE9IohO4SD7K1XqgYBGMQwOG4I6g8sBs
TwoQm6li6PajpPaEDsKjypgblrmA0GyIzVKFXI3uM3gDAhGQi5AoFdnfjpmOEROURgh5YJ2oykQA
MnqtWG1DTJnqECa7VSIZVgE4RbFkJykzpouXzTmNVSickjcgZDDMJiaHJfDL6WbsXmAa7jOxwXFE
C5sCa7Hh3oiAYIGWZoEYxwiKnejri8ZVltC1W5Puz6DC40pHCOK+AZQfI4ISFyMTtdCPMNIt4giQ
NVvIhPlmmhQdRwWIwK8jnI+ZbNNRxXn8kdcDXSmIYjEFPq0y2J4laiGl00KxWPR+Fv1jKgdEDwSw
TIdLlUTBC5MbwqdOqRYDMqUhMSMchmvLsW4vIUL4IfiCw9og1KeIJzAJcdbQxnPHSF5fK2pBvEVG
5fhogMYk1JT27cQdregrIBsarS5kdwojHl4VG3FRe3LQcR4UTduGD4RFUNZN1sNWAWqNuIO0AfN7
9Z/Q16G+TU+TN8285/Jcz/tlc5/r3f2z8n3/ACZvk0pbApHf6IOaIWonD6kZZBSOQ6wMTw5hCvF/
wGMADOQxbBcAAiMQgRGNVG9MuUCSw3IuHkMCuGjovV1ciIEykMdg6JSHhNJd61xuRI7VGNsHRZGl
yMSqdDw4myCfSxGRQi2meZwQ+gpySRmhcgaLQQQXx2qMdPeuEJ7hZcNTtRmQapiKekoi8u5CLpx4
hgnlU9DSNEAKBNmg3qQBDH0rzPEcAhEBaiXlHCKEpFpHLcqYAJmqcAtIDbSUI6TdmMdiDwNs/moG
JeJwK3dAtn2S4Ula1kCZcQfF06BLG5EFjsdMFExYTMHn66Ly4cMRsxKc0G0rSJOVGQrbah2FMZNt
BTthiQmLoRhigckAO1V6XFVSh2KniGSlGUhEx2oW7RcHEhC4TU5Iau5VRaoFI9q8r2pV70Jmshwk
IThLSdgXk2QJ3PededzJDEu2KAlAE7wniNJyZStzlqoWU7N4a7b5o3uUNTUwRjINIZHrCVuXDnE4
FPEi1zGzaU12NMpZdIBotVsiQTGJCqCqhYISFCFF6kIOnPQ5TdAnIUCERkq4LVMu+AFSURy0Jasy
2ClG7dMbeIcYrVda5JmwovhwEezqkyLAZlECYMhltKe1OUZuwgBkheuXpebIcS0WxTM7eszh1pMi
4oSBQLRy7HfJDTaaGZjiVquzNsH2cSh5hN1sAU9u3GJ2t8mp6bd8yv8AKKfJ69any/nP5Lmf9srn
P9e7+2fQU9LT07fLy2JRJz9G9uTHctErhZHriUCyEbsabVwyY7CnFR81b/klf7COaAVJKuytF4Eq
RD1xCA2DEqUpl9jI26uFinNSmQvWjnWO1RumAtSmHANVOd86/MJOoIXDF53KklXDoEbsAZRkAxom
zz6NTmPYnBohVRnEPuyQ1AORgmslonEJpGmzFAmTDIIzJJR1Eusa7/TYsck8gSM08cE6crtWrowW
LFDBB1j0YrHrucl5hK1nJamqU5emCiJg8FSBm6E3ZsFoj4pZomarRCL0AonBTdEhuVq5ybmduQYD
aaKEb8tVzSNZ3qPPWXnEFrsMaEqM5DQZh9OYfapc4Zm7CR4nxjsTOHKIxBxVIuI0Ec0bc6xOIUZk
sMpbQjAlyQ0W2prgLrUC+wqvWcUWucmITW+EZlAA6jmU1yNQmjRY0OO0rXLEeGK1SrM57ETMhs15
dkkbStYJkdpQjcFdoQ0kHpF+IocVqgW3LRca3eykuIPHKQw6wlEsRgQvw/PRE4nCRXn8qfMtGrDE
JjQ7OihcbF8QMV4u5ERGreuGLdiaYYmrIE4sgd67eoKUQE5CKjGw0zLM4BRlGIjbBqAWdPzBJnkA
cFpthhmc+txSA7Shas6ZTOZNAjAaYiJqdqjd5jS8cAE4iAdoHUqQFpnLiOQqgLAMpnaGAU4WoAke
0KIG5qtj2i9Si9wygS+lkCbcSY4FkwoOs/ypvQufnt+pRb/RV6H679Tf8wc5/Jcz/tlc5/r3f2z0
N8pf0O/p3+mp6bS64BqAR1QI6aBV+QvEsUI3OIINICWxUr/bfBMqlGdyTRCI5caID2sypwunVpqD
0xMY6pSOaMZS0xPsjok1HVy1KI80mktyaR9aLkREitu/ptTkHiJBx3qEgGBAYKWvbTtUbV2QjKNA
6noOu5cBjFq4pzj0bUNYBjmESaRyQ0uwUclwjUDgQhEy4s0HLR2ko2xxPQriB7U8CmxGR9LquDVI
YBGAiIR90KqYJig+AQiMRiqjBACIG8qtSgY0HTh0U6lelhmmTYRCEIjtKtTiMG1HsTYqEdOgGvag
xqFqbDJTkXocAjcLm3HatQp0tuRuXS0YEE9xUbluQMZBwQmbUHD9gKBBoy5gAaiYsANpVKnElAxk
+3tVADPIozqJihBRs3A42bCgBwk0daLnFE01JsQemnUMXeeQWqZ4EBAMRsRBWFVRYUXEEJyBYbEB
ct6bf1oG0GIyCGkkvkmJIkhVpZFNep+cFqgXB2KUSKgUUrchUFOKFeTzI12jRzivO5M6oGpjsREg
xGXWAhGVyGcWdebCBsXjiCGBRhdixGeRQg4i+ZwQn50dJ2VXFeL7gtN+7OUswEfwsJxgPu5EO+9a
ZS1TbidRCCbpcigUY2YThbIpKIxKhcv3jrGIIwCENAk2ZCaIYbOsZ3CBEJ7doyEqRL4oyu2zx1BJ
wQFyAlMYyWm3ERAyHQ+SM5TAA3rCRPssMV8KI1EsINgibldWb0CEpXZa/aOK8sQEhmTiVphERGwe
lb5C/wDY5lTuVepu9Dv6z9Xt+Xc5/Jcz/tlc5/r3f2z8xt8xcUQU8rYKpDSvglmRjIPv6GFBmqLB
Yem1QJBQhdD70OPSd6eMwR2/M7f2LwWAWCwYKiM7h4RimBa2MAjWiuXNpb1dBldkBsGZWs0hHwhU
TDiJzRLOFqnhsUpRFBRC5iY4tggRQlAHoEs15Ntrli3QznluUJi5EwtnUbYGKa8HjCpG9eXO3Eww
YhSt2/u5jVEbH6ar6gnY9yIlN9gWknhORUDbGmcsUI3BwhanERtGa0Sh3lPCm70x0+JGRNT0Oyco
gDpqHWCPmEvkFpi7ZE+k3dDqVm7W3L6ChIHVbekgrMxiAyBlRsVwlgjEhiM9q0RGdUCMw/RToly5
LCZotF6WuYme4ZKuCvmR4BcMYDctMg4OLrmLNotETIBWkSDvTtRiatmFEnElijcfQSThmERI9hWr
EDJP39FOlzgMV5djikc15vMEEnAJgGVOr4hrOAQncm4yiEJHMsyIFJBO1URO1U5haoHh2HJOKtiF
qtnhzgVSHFmCV5wiBtbp1WzTMZFZWuY+srTMUyOXTCE/DI1QkLQkca1TQiI9gVUYXYCQ25hG5Y+J
b3YhM50jGElwnyQBRquUL/Mc1KdyQBnED6F5diEiwYZIzljIoB3QCc4pggBmgboclaYhgMB1xE8U
zhELy+UtSceIhar5nC1nGRqSgYxMiMNRdujjkI9pTCWuWyK/7e1w+9ifUqnRAZHhdE80aZRiVqDy
Iw1VZOIgHa3zLXob5fXrV6a+kf0deinoH6ez5BT0FfQV+S85/Jcz/tlc3/r3f2z1W/sI04gqnCdy
eF0gJgdW9VDonSiZBgi2HpqJ4TI71xHUN6Am0JJ4SBG7+05tWGNzOWxT1zMg2fXckAb1nP6lxEQj
sVZgn1poRMjtwTeGA9kJzitg2oWjbJI2JrcRDfiVruzMiq8MUwoMytEaE+tNPEqoDjNGIoHU4wDA
VJRE5ORgE5oAmRQESNTnUBt6LhFYEsWQkMDghcuQ02m0wlkenFk9GOZUiZhtgxXmSGGAVcclCINX
QBOIQadBXSnuMTkhKFOpVbuih652NRapEk/QpCOCYYBPTuVFVU6GXFDVHNcIbcVT02LJtTxOMUSS
8YbVWh2JssETIY4ImIge3FCMhouDEjAqi+tEKNzsPqUuXgKkl4HKIzRgSz0cI2p8VufENq1WxoMh
wmS13TrjcJaY24rVIsBipau5SyY07kfUrgl4o1BQDUOKbqaplgjbsBreBKDgGeZ6tSy4akYowjEy
nmcgtZDz2lMMAgB7OC1SDlfDDSOJVZklcE33FEFA+EnatUSxGxGBNN/UcFjtXk82NcPezC83lj5l
s5DEJiozGRUZ7ukG9LS+AzURygJt5yANTsXmXrBheI8UMURG3IjKi+7Penlpj2lASvBzlGqEIv2l
diYJkLlwdiAGXW0sZAUJGCIhG5GB+7YKN3mLs/Nz3JocIzkc0TK4CRkKp7VvUHYPj6lwxlB/CwYM
td+9WWINWQ1R8wjMrTbiIjcPminy7f8AMr+ir0N129EfmTnP5Lmf9srm/wDXuftn5TX52IBYbEQI
kjaE5gR3LUYkDa3p3BZcFwhAXuOKeBY7FX+0fkWj8SWJ2Ik4nFXIykBck2kHZ0CMpCJOAJVZxHem
NxzuqvhW9Q2yoviPbP0KkjM7AtNkCA24lcVyUtrpokE71XDcmMmVarcE4WkRqnbSNq96S1EOMgEe
Yu54DYmyGaGk4osNROamTEnUpQlFn29DqQQ5ewAYXJNIH60xooyBJhOVXQ04ZK5KeMGMTvQI6NWJ
yRlIsMzsRlE6o7QjbnAShnE4oXLVIS+gqEpVANFGWkAbFHmLJJBpIHatM+HejEl2wPS4yWooPQIx
LtkgxcFah1qdBDO9SUfLwRcHo0gOTmmbo8emWwoSEyojU5KoejDo29VuiqZ6rS1NqqXda4yeeQVS
a1ITjEbU8Q0c5HAIC34RggLsWOUhRbRl0HYV2qMgocwQIzvQ0E7070XKkT02rJAm3tVdBsBgtJ8c
pjQM6YpjgcVqEqDapyBqcEI5gurmrKKcYPh1CQXmMkJ3aWsQFpthh1jUA7SjCBiIjGSMRj7yqaIk
YFcNEJu4zC4JdoWGKwW0bE0sBknGJQAiYvjIoVcHPq8JeGcTgtdlrd/OO1abkWIQ5e9w7JLzZzAh
kVGPJuX8UgMFG/zN0xmKANkhbgKDPb0GcyIxGJKOkeZdygzJ2EQMggdgJR3BSy6IkjhBQiMB1DOZ
aIxUo2oSM/ZpigNc7b+J6BkBe5gnPSftQhy8RdYbWQhZtStA+0A6H4m60cw7oG5KVwDCJwTwtxB2
t8gf5dT52p1afIX9A6r8hr8s5z+S5n/bK5v/AF7v7Z+Y6/IafLMAVWIPctMohuxGVktuR1QcDMJj
6cSieHMISkTOewL4doAb016DPmFqtlx/Z8yOQdXL0yxGHaiRLUdqDlpBRlG5J44VQnORlLJ1wxeR
zKPmGpQq5QJPciStyo7LWT60wonOCcgAFG3EuTRaZZBARArisWG9OznanAYJpTrmhC34c1G1sFSE
JSDgZLTAsMoxwQlLxDEoHoOxRuyrEGvYULluQlGVQQoW97uhaEdQHtbFpsmJgKygMSpQkGMSxG/o
BOGxB8D7OClAxAAyCkDTdmpROGSiBjkoieavWjgKjop01TYMnAdkxiAtQyTHq4dDmgRhQoABmRnI
OBVkSA3RRVQP1Ly4sCM8VqlOqBlJPEgkY9FUwVSqlHcnGOxOFQOM0eGidlQF1UEoMNJRiQ5ahWDn
ZkviXAB7qqQVpiMM03SDvUt1VZNkao6hBthJxULZOogByUeYtxa5brTMKBul5EAtsR5yEyTbFYHB
tyjKAbaq4FGds6oH2c0SAyIlKh6XOAQt2eInNeZfYvVkwoB1KdA1VfAIzkCIZBGMBwvkqp3WK2lM
+KpnsXFQp1QiR2BAeXojmVVyRtTim5PEuAesJQLSGBCFjnYh8BNeba+JaykFC3duHy4n1JuXkJE1
k+L9BAnqkMIjMqVrlbIjM4SfALzv6lelGHs24ly4zK5i8I67un7yVTTJE4OVI5qUijsdAAdqEYir
dINw1OERiV5fLxMJ+0SjG9cuG0cW2r/tnm1IiYqye48AcDE0itVy9Kb+IHNAxtRcZphh/YCnXf5B
2fKqdVurXpfZ0BblXpfq06/Z8xc5/Jcz/tlc3/r3P2z8gH9hGmAQjcshijGYZvkWm9IjYn80DcU0
LoJTgv8A2cNoSec6FsgtcSSDkokl5T9ScpgnzydAPxHEp8ytdzLAJzQJmLZAIS34LSI964iAE7VV
ZFhkg0qIi0DIu5OaB9aEmdkZGgPsoP8AQterRDahUzlI7VK3AMAWWo54LDhzK4Y1ejIXdJiWcpnq
EybJbAVZEa6hqJ3lSjIZUKLYuXVVeFvAly23oeQEtjpg4RJPrRjQHFNEqLk6kNVWzUrVv2qSKdYd
IfBGWSxTMCFwyZMano4i25aRTYiScE8S7KpWketO1U5ohCMdRPqRlJojoZMqLVIMQsEBFaoy0k47
1WbrF1WRVXKwWCwCchUg+xHgCB0gDYgYRDHNVYkbEFj0MUAKbTsCJEjIdQum2hTuTwgxHaCo3bcn
BAcbFKNsO+I3IEepXtETORiQAK4pjhmE8cMwsWWFdoRjq9dFVPMspWrPgzKGoAz29L0ZCWoHcq5p
3ZNaIM9iFy+XlkNiMRTYtMfE9SgAe1VxRpVHcqLUMsVQ6p5IAhoFAs8hmmW9EZqUT4tiY9aMBmUL
IuapNUSKM+XIjdZzDaqPCQyKjDmuZNqM5NKRLBio3Ld6N0isdRBqc0J8mIREw8yGAUuUtX4kWiBp
BeLlCxMsSBQZnokTmicHQADkqJI4yKp5EDtWh/MnmIoR5eBjbwIBqU1zXGT0MqgBNdmJwx3rVCHc
TROIgHs+Xt16davoa/Ia+gb5y3/JHyTpvmHnP5Lmf9srm/8AXu/tn09fmZvkFfkDLjjXaibU23FO
BrG5NOJHb8heJIO5B5mUdhQMpgSzTGeo7AmqFSbdqpIH+y2mB0ymW1L4kjInNcAYN2piaDJSJzKc
J5UmMFqlVARwCBjSAyRMywGACZqZJ8Aq4LhCxWKrVHQfFQ9iLJpYiicyYJ44KNp2C8wzcjBa9RoX
kgY0AoEIu5zXmTjxnwA/WjqOp/UpWpECJTAuFRRkO8qHJXYmT/dyH2qQtB5EZoW8CTxA9EebgTpu
nifb0AjFAyFCgMAjsOxEF9ycOybSUZEAfWnyyTdLhMhInDFaXd1qJWK4QSVqIYJ14mC8TKsinZOy
w6DqYEItUdILapJpRIjuQYYoynV8AmjRlqlKmxeILxVTuqyXb0VwVFUdvQG7AtZLyJqNyIOBwTj1
KoqvCCNq0FgCCxGKlpLsWZNINIYjp3obkbE6RmW9aNky1z1kknZkmdX5Dw6gIjuqqiiui2GBaTDa
VqdltTRLHeuKIL5hGIYyApFa7h0wicE0AE6r0a1UOdiDAgDFG1CNcCVUPLb0mRHCuEMdqcpgiMSi
XrsTUETiokR1EVJWxdnTqGKpEdpRuQLn2gOtqhHVcOLqM9OiO6iBv3JayGjJ6HcmuRqMJjFeWCZH
IoShMhkJSvTMji0ihdLkanJGKM7mHsjYOgBCLbFrlDXIY5IeVEW4HwjErz3MoyOZZCXMPOeYeiHl
24xbAt/YRvkVeh/lj/L36u7o7fRU+T85/Jcz/tlc3/r3P2z849vzPWoRE4CqMrMmOxHVAkDMKoY/
IDPB8E8k8aBVDqjjvXjaO9VIK4hROaLxiqpIFMC/UHKclHzLxo+xC5fk5xMQgTj/AGJFmNdFZdux
PdqBhFUpE4hEgcJw7EAPDIJk2brTHE4rVPEoyB7k57gnliqYbes46DLahbIqKgrhHYg9EAYu+1Sj
bAGrYtInpITXqiNXUpRwH1IE5CqjdjGjqMgKkVZOChLFkZSIMhBo9ELgxbiWqJcFWrQidALmWToF
VXly7lVR2HLJPGh2HBUai1GQbtTGTdyGljuKHAA2LKlAvEvEnMkQ77lQJiMNywPqTjoxWNeitEz1
VA6JJAIyRApsXFVbFWpWCcAKkU2lPhuVFxTA7FtO1Y4rctUQqBlxUCqViqYJwcFt3IvQ7ECcEdcg
AVCFty5Lk7Fdt+6SyYezmyBiGuDxEbUZSoOoQoXB/wCmUuVucMLgOkbWHRbiB43fu6Ic3CnmHTPt
yQjlsTP3FcS+CavVkL12Rc5IiI7EYzoclRV6DGWBUo3CdOESuGREE0Hxq/U0hNco+BWkF1xIsWCq
HJxWLBB+4odWhbaExCd9NzYME8WA3lkYnEbEAInvURIDSTUjJGUxruHB8lhQ5LRg2CjAR1FmkSiL
0IknMKUuVaUR7JNUxBhIZFG3fDEjEICJrtW0oP3IRLh82ooyhGEYijbRtUY3YCcxjJCEA0RgB/YZ
/lFfldPQMn6u5MqdG/0r/MvOfyXM/wC2Vzf+vc/bPS/omP8AY5pBwvDpO0ImzLVuR1wIbNV9IBvQ
jHBkdg6tFQoCPFuWm5FisJDsXACe0ppxouFgd6l5UxInDcp8xesm9dlgQjCHL+WDnUlab8CTtT6x
A7JJo3onvTxLjaPk7/Nxn7RpAb1IkvOVXTHHJCJqChtFFAorhitU8VqIwREcM0brcOATzPotWYQJ
oAmjgnd5AUTkoUdzVGIwUge5AEcL5I3IHfpQjgY4gqqkVG7ZLTgclbuXgIzkHICN8EnaDsUBHBlf
jcGETIHYQuzocYrTdGGElExOoPRUCO1HgLdirwxG1ExLrGqqSsSsejwrBYLBMVRNGB7StRIjtTAm
RWq5JhsVASqFtjLiLug4qmAosEwoCqkp4gsFUNsVSCqM/Rh3qqoU5LjYgBIhYundtyqSmdaXTk0T
RNUDmVpk2zoMso4K5flTU5ClchKh8SAkXdyR+VNagXnhPJMS7dDob1qGMSrXMFo8xAtCWat3ZM84
g0Wo5ISgaHNSsz7YnYVKMvZJBPZRHzJUyKayNUEakEYog4ZLVLB2WqIG4quHQ6Itl5IXLtGqAtJO
CJd1uTjoJOSEY10pyNLKuCdbQmWFHW0dV5SrsGKaA0x27UZxDtiUPMmQRhHcuGNR7WKodexGbYYA
IEljmiJSZtqblTrkMdiPmEN2rVOQbBgXQ/DxIkcytHO2Afzxij5AlKO8KlqXqQiIFzliULvMy8vN
mqhy0bAMI4zapbNMzNl/Yd+iny1/ktOvXqU6W9E3QfmrnP5Lmf8AbK5v/Xuftn5Fu9DTqV+R1+bG
nEEItHTLciQCYbfRhMPQPEsU5qepQrhlRcQcLTOLBa4NqOzopIhNGZMdhQHMQ7wntzA3FUkP7BCM
a27dARtzXBQISuRcnNcIqjFBvZxRKqVwliENQxxUmYIB2jsHUb0EYAnRmAnNSn2q3cxjMOUCMclG
RNJB0ZJxIyPurSCxIqpC6S+AVCT2qQ3KcNhdW52yDwgEDIhSMjUhgF5RDiPtKdrlyBKQqDmFO3Ma
ZRLEdOxRTOpEFiqyKxWl0ZSqyMJRbYUxPZ0UBXDFhvRACqHTEHuTyBJTCP0LhiXX2LwhcUcNirEp
qhMsVUuvE25ULrFMCvEVSRCrIrNUJZYl1jVYEp6hZok/Ssao+WCSExCFCylckOJ2XEK+yU4FNyZi
2ZQtwGOJUbETQeIha4ScHEBEe3c2VIUeXakc0wDbekSGSIyK8i5LSX4TvVvkeYe4bfDGY+1GERpE
hjnVRtXJNUsT2p8AV51q8JGR4rZPE5QMiDEnBAW4tHNkJxxJqnJHYjCQeJ2rQDhh0bEbMQ+9GUuK
RTYIhs04DdFSwWmPEfoWqZaEslGQHiCjV39koxBd6piqsjHB8Ex7k4zTdDzkBuRhy8C3vZrVcLPi
+K97tWkACGwLVIMER4gxYDajKIaQxGSlO5MQEgwi6M7MQY5Eoz5mOqYyCMuXtiIOaJuXCCcQiJDU
RmVxsAnidRWqzwiOO9S0zJMclrja8y7IMQcu9Ac3bjbgcdJdUwUxDD54ZV/szTqN0V6adOHzDzn8
lzP+2Vzf+vc/bPTT5kf5fX5UYzAIRucuHzZGNyJi230Lig2lawdROQVYkdyqPTiQwzC8wVLIjpeE
iOxMSTvQ1EgLRbOknGSHk3uI70/OHUNoUY8qTGINTtUZTkNWx/nl7kgDszWixb/xSTSuUOMRRBsD
UlVLyOacjVIjDenNNyfYVcmT8SUuEbk5z6WIrtT5lMUwFOl+uzozxCYCq0lCy482HhBzXxYmOxWp
k8cKMnjWLVTxyzREPFkhO6NOo4jBCW3onHavMxldL9yMJh9h2K5b9oS6LphgcW29TeEzAnFyrmkt
pNUSZLUaROG1CQDNtoSq9i1RHChqwCZluQYKgVRRYUVAqmq4ariiqht6pIKlVgsFkqKoBWCd1ise
jYvEW2LxFeJ1kUzLBM2K03GA3lCTA78lotsQQ6cxj3kLS0ZepeEQGbFYxA7Vx3B9ao8t2AWocMdg
WgwZs8SVqJYioipX5R4dTOnx1HFUDN1I7whdhTfvUOZiddmUwZvkHqhISDMuXsW7ZnbjJ7lwYByo
Wf6SBzN44iIcRCP9S5+7KfN3WlGwRQSOXcnYQkKgBCMJYZbV5csQjEihwK0k1ZSY5lapDVFRMHiM
0wxzPRVas1XBEWhrngjO48IBMOOW0oDTUYKNvOKMpkPmSjEEMPp6KVQuRqM1Va5SACaA1nbgjC3R
9i1cwTKWx1phEAdTRKVNieM3INAmEBpOYxQ4S5xJKjERJipCMAJEMVpESCcSiTIdikLUS5wRhMkD
GqIlxE4lSYMAMlCzGIeZqShOI45SOpGO0Ixjc4lLV4nr/aN+inzE6f0AW7p7PRU6KdNPlnOfyXM/
7ZXN/wCvc/bPp2/smRcgHOaMrEm3IyMHiMwmND0MF5l25GEd+KIhLUNqogCaDJMDRVAK4rcSuK0F
WDdnRSTLhmVw3W7Vw3IlUiD3qts9y4okd3WNiXctYFJddwWQ8w6o70LtvDcmlAxkMXqVwybtR4wG
3p4FxtQHzm6NqxUjxT2diJnxTkcSnTvVAzqCtMS4b1JgeFUKaGJzTmqYetN1t6fqYqgWCqeneq4K
hbeENUiYjF6q7MkEgNEb1pOBDquJwTBByXFQENooeiMhgShyVwF8YEI6BxNRaS8JOTJMo8xAkwum
oORQ6SfazQjpyxCuManNNGoGLIjS+mgfJHUx7U0S+1HooOlwsEzJgE5iqBuhnXCapnKD+tbU6oWT
AlVmFQv0UTEsqF04CY0VT0tiUIgcUsSjcmDLdmiBRsk0zXJfEcnY6M5A6djrUX07FwvGuaIAZsyU
QwiGyxR01BzKMzNmyGKMRclE7CocvAGNsYyGZUhMvHJOA3U7FKPqUoWw5dyNwxUee5t7NqUdWjCg
zRj/AEW3LyrYPmymWB7FKPJ05hmvasItT6wjclzGu6BSJHD3I25jTcjSSMaxIwO5GQk/aUbZpMLR
MljiVok+pETJ0nwhaQaJwarFcRbegbfEhN2gclq08TYpslqHTpkHicVpFswAwO3oATOSMwjKA8uJ
zRgBK4cihK6f8KaEWG7oaRA7SixMiMAF8G220qJ8zjHshCE4uNqEwNIGxGDAvgo8ADN2posIKTVk
tI4XzT3ZahsQ0BgemW9lCZlokC4IQkbhAkKRQNyTwlQbitZmDuBUpbS/9q6/Me/qt8ixVflPOfyX
M/7ZXN/69z9s9d/kL/2MIv3IgbCV/wBq5OcskwTZqpJ9HQlPqK8RXiW1NO2DtcLjsglViYFfDu+t
cEhJeF+xRmImmKA9oYIj0BtHwnBGQNU8SQAg5LZjaoxlMQtgeEIaS/zmbFk8IpOf2InanK0wFNqD
V2oRaiJAaIoUTHwPQdGHRTHqU6lUwqmduxVJVB1Kp1h2LS7NiyoGIWokgHALSA7qvqTt2LbJScvT
JTsklwaDohPfkjKXi0cL9Fu5mzS7EJRwNUZQPHa4gNqAOPRVSL1CEsI5FSkfaWkF4kVKlppGdQUY
y7iiScU5Om0/eVUmEiKFGOIyKYOiJu4TQiSFS2e3JcQY9ONeholADFNENtWDrEsqErjDRGJR0hcI
LoTkCNrp4gEbE7MmYFOAyGk6ZhY1XEUwVaJxU7U8uI4dicUozImFXxCBNAMVxEsuCekbMUxuF0dJ
oM3RhPx5kmgQnK5qkSwjEYoSnD1qQEAHwZajURq+aBEgAEAC75oS6kge3o1kPnpUbt+ZsclaaELU
fa0oiEY27cRVg2C5u7ZA1XrhlI9/Rd8qTAkAyG0Bipxi7wzOJQEiwz7Vr8wPkVohW4Cz7VGd8EyG
YUI2gSRkE8oEJ3ZMSiDUHBMQJA7UInhTg9FUwxFCuxMSn1ucBEdDu60QtCVw5la7x0wx0hCMQIje
jHW53Ixs2ySMynfQNiErtwk+0ibMRKQUoziwy2I3BQnEJ0zaQM0Gk53LgB0ijrVckQT7JRh5WuZx
JXEAAcB0AJuiPahC7IiJA0DJR1yaTCi224n1lHRHSYijf2rr8up0Mm6jrcvq6aLf6Sp+Xc5/Jcz/
ALZXN/69z9s/2hNyMdchgEdAjAdiY3ZMcgnk8icymESe5Gc4nvR+Q16lJELx+tVjGSY2hvTzgqkx
XDcXBcHrXDIFUYrwv2KsCg0TRGMsRQoxKcrhxyQndiwlgSiJsCMS7Kk4vuK0xLk4kKn0/NrmiNiw
f0pBBs8eicjLwZIC32kqkgEPMDDIojF0WDOh0P1N6p1NIw9BuTFOaE5omJJG0rQACck1wtIYIGJx
XFitgQGoRjntKEbYYDNRkHaWKBCfYrGiWmWoBwg9ULo8UcexRA8QDEK5pjqJiWHcpxmNMgS4OS2o
FXYkatYopQNdL02IxJZ8FLUDqyQhzAeGUhiEZwa9bOzEIiEtO6SNu6RpxCAHEUxx+lUiO01K8yUX
kdtE0pAfmxRNyOqDVCM7dkM+Ek34ePaF8ImE8onBeXcBByVXCAT4KpUSRwI6QWKdA7aoDB80IyAI
xEkY4BmWmIJmjGXcm1V2BUqiDRsFi+5VoqSQGKEYs29MGiiXTSie5MQtLLUDUYriLp4knYEZnGRQ
nKIMh4RkFK5KkcO9aWcHBVGiJ2qtwDsQJnQIgHUMggIhs5E7F9SIHchqBD0qGXl2OKeCjPmC8rmS
NrlpA2iaA4BCN+5qLVAoHUoRiZRMiQRvKlZYWg1THxLVIamLledKcok+zgEZkYoC05tZoyiHmdqH
lnS+MTgh5vBvGBRt2RqOAO9HzBpGRQE3f3lqHFDb08JPYmp2ppBSniJZJ7YYnF0859ybQ52qgYhM
TTYgbcTRBiLccytV24Zy3IHQ5GeKJYAZsj5FSFIX4/kTxzyT4I6pimS0WYPI0BRF4mIQ80aw9XVy
EQIwfhAUdVZHBC3YHxD45KM5El8U6B3hSjjm/b0W4nB1biwnciwfY6jC3EUGLYoaIu+ICkNJG0px
UGo+dW+Q0/sM/wAh7fmLnP5Lmf8AbK5v/Xuftn+0OmYcJzZBKaNmIVLUfUqRA7AiMPlWPRgsG7FS
UgV94VS4/aq6SqxD5lHapKqBGILoQuQ8wjbQJ7QYy9nFRa1Ix2sg76sGFKqMrjxfAFD5pAiHuT8I
KMtbAVYBASEZj1Ly4DQ+O1ajUmgCECeLNERw2ogEnV4lWibB80Y6hPVgBkqZJyXVOkdcgdNMF29D
ZphVb+gsGbMpzVY9EpHIdDRCOosclomAJDCWZRi/Fm9E4xjmotlmpBR0njjMMomWLVUmOFVOY8T9
EZWwxnF5AIFMiCaMr0XaEHMicUbsKscFxBpjEHNG3GGKeEzAnJ2Rhc03SFxWK7QUdNgttdHTajFs
SSnEqfmpq6t6Y07UZCr0JTgDsTXIsNqcYZbUGjqMcCVputqOTJ5wB2LT5fYy13ABsD1QFsAAL4gE
olGZJ0nAIRtBxGnchowOIKFvLFPqY5oGGO5EypsUpGT6kWw3ppB5ZMsGKYSTOUIu8vaKZ0NFdqrA
jsWBWCBkD3r604o+ScxIG0hlghVgMU44hi29fEFRRaeXPAKykcAtdu9qn7pDIwmGMS0olawP/TrB
3DNhitIDCLqHNc1p1zDvLIZKXJ/0+xEG3J/ONKDYpwucV2JaWdQrQyiHXn3eCzhHaWRNqR1AYFSt
8taeMC07kqRCnzJ03ItWMcQjOIL7EDMjUagDFEX30HBeXDw71wy0HaNqlZt8ZyOC+PIxIwCiNIMo
ZrUwDZBNXF1okNQIqEfLOiWw4LTMVyVaLTEVOBVaMnJqj9i4VsCqVQuqgAjYniWROLZI+XQo+aib
eaJnIBsgmswMjtWqUvLjsQNwmcs9i4QAmKpgozzjSQ3KJiOHSCAjcxEsURIdy4aHYU6jLPSAe0dE
B2qUDA6JMRPYoG7HiZFTtybVIcK03Sw9kpxUHP8AsFT53f5S/Vf0FVTqlD5O7cAxKvQHt8tfi3bA
hc3/AK9z9s/JX/si3zFRVUuh1VR5iURK6czktJZjsQmLhgBVgi2qQFI4kqMb9uTmtdi1xOCph8z2
rmRBHeqSY4EbUIwHenOajR9OARlOhOEUwoOowFU5z9G0fX6LsTkMBgEyEYh5HAICdwRkclIGRMpI
mWGS0jHYiJBtrJ40bA5p5UIo6MAXkcUYbDgjvClHMFx3KMQWugNKOalGFDINVG1KlK9FvmHJFwMQ
dyDdBkrwLhw7KU5nhcsAgw4tmaHCwGEjRaZlyFm+5OZFt6AEywyCIjEydMImA7ECbry2BM+oLThH
Z0PIdycCiEAanFUVRVag5iqYpzVHhJ2pgW3Kh7UdOCeReRRiHMhiAmLg7FjRYUXCWKJOORQ8xhuT
laYCq1EYo6y2xCzy4eeczgAtQ5g+Y2DUUrN0NKOe0JsSMVxYlMHYh/WjzfMREy7QicmzRtztxMSM
GVyxEvEVj2FSJx/vTDciYUoE3t6jqTK6YMwYHtTE0GPrQMSKZIymdUsAApcrzN/y7NgDQKVVDESj
FgA2qTKUpmtwmR71KtAG9St2o+wGLLXItEVkUTbABlORkO0omZAAxJwV+1yp1Q8w6GwKF3mg8jWI
VszIBdoiONAjy/KQMpEcL0opc1fIjOFTDE6VC5nH/wBVTwZzRiFE3IY5jBcA/SitEpMX9SnnKXtb
kdMTKfugKILQgfCTipQuHURSm5DRHvR9lDVLUdi2ROaoXRbJYU6NQqBsR0gg7UfMLlOCIvkiLMTL
etROiMvoC1zkZ7Qdq+EBF9ipgqhV6GKYgx2b1CRDGIZ0wDkqRuSDYR2qMpHURUZJ8ERt6LcjUB0Y
aREQIpmVGWoRDZoTjMxhHZR1+KjOU7eE4yLtlRTgKxdx3ph4o0P9gH+SN8+0627qv6F+pX5GIgcL
8RWhwGV+vDHlr8vVB1z1OH8Tebs1yQEgyrQqh+T7v7GN8x9ykekKAgREDFGV6TaaL4cnOxOYg7yi
Yy0E5rTbuTuRFZHBGV+EjEUCaFJZj5mNueBwOwry5s4wltCedULcQ8j9CL+IYEKtSU/VbrHqMOpR
VVBVOVXp4SyNy7I9yMbYJXmXAwAo6kTgDRVwQlItHZmgQKakWzWnSxAyRjKscQAvMlaYj3lqiwEj
0EHNXZFtYAbs6LdzOoPYtRmAN5UBaLwt57ShpxQdOp74oW7fjJoTgF5lyRuXs9iFqHCMynmBI5Eo
CMRqOa1mMDIB9OZUjK0IgmhWi3OMTuFVqF3XHNcJY5okzYM7oAlxtTyLbE5kmgtQxVQgTRbiqhjt
CBEnC0sBFmbauLB6AIRHCM6JgMKKNmwHntyAWvzgLhqQ1FK1dDTGB2hCDuQKogd9FTCVVpuEi1Di
lv3LR5UWbYhKzS1cwGwoE4hm7XQzdAw2B1fjL7wkN2JlAR8QjxfYqGsmDIAGtKJ5HAAL8JdmIl3g
TmjO7djGPbirl8hhI8PYMFG3PhE8JHBwhahMTkdmSfatQGvl5jijvRt8pa0lq3JZdilKUnjMmRJ2
lG9aGuEg0oqUyADgIjJG9zB1aq6UbnKTMCcWT3pGdw5yKgR4js2IRtUM8ZIicjKEsCoWY8Vy54Yh
Gd2IlbnUwJRsQiLVs+LSalSPMRNBwkbUbXLDhOBKE+bmzHMq3OxCI4QNQAfBXZ3C0BEu6A3j1FVi
Cc6IwZ7cg8ScqIiGIDqdkWwbgJJmUA5JGSE42xZjIVOaa7dJY0Za7YBOepRJbWaCMUaNE5lCZIY7
UYnJN0EROojJSjKLI6pvuTWwSndonJAzeW4poRACD0TxLoAYlNivs6OEuq4ogsXwOa0E0Wn2skDI
LgI0BqL7EDt6IdrKHMayMDpBxUbkwJTkHrkEzUUbNuPw5F5z7MkxLSGYWo+E4jIrFpf22f07dFOl
1Xq7lTqb+s3zAQBU5o1YLnpE1HIc2R//AIiubcY3rn7ZVAnjRUwCwKwXEhVlSSp6DH5M/wA7V9C3
zGexHqaRIsmiGsxxkRip3YTEC1BihC7OUyTXchbqMiSGQNDE4OjbYR2Mp3IXi8sgpW+YEpgYLRHh
lsPzIblwsBlmUbhHZsAVMsF5ki965gPdCfMpz1W9AyxTCgW1P6IkmgRgA0clwipxKd2O5RcrGoTk
uyL4AhBsgvAItQlGUS8mZhuQhOkTgxWt3kColQntUL9mpNDHaCozlDTKQditV4cJ8JGC828NWuoj
kAp3uWeFyIfSMCyriKdILYhkS9XoE9SU7MVR0DI0BqgY1kDUbkYjBOSjpONOgRxKBVXfYhYsw1TO
3JawbZnjpUrV22IziaghESgKZoaYjtZEDDJNLAYpmouEnSiCUBmgYnKvarz+Ng3YmVrS2ti/YtRI
rRY1zTbE10iMbtATtWoSGnbko2bZEhb8RGDqE50gZAE7FEC7GT0iBUnot8zy9TE8cdsVG3YsfFOM
jgFK5dlqnLElRuWDx2y/aoxuRFuMakjNY1QMJGM41jIIT5icrgjgCVULTKL7EZRNSmMlxydcFFqk
7LyrAoc0JSAM8y6qQE2oPuTguyk1Rm+QClf5iIIhwwGW9G3KAbsU7dwfFsHhJ2GoUL/M3bdqxbJM
oPxEIR5S1KFqdIzlm29PclXcgNLb0DbJoXZC3HwAV1VCnbv3Gtx8MBQKRtHVpLE9ichjmndgABXc
vwXIR02S34nmyKnbGKM/6f8AAv2YFyP3n6S0MdYOARkRKQA8Mtm5fHjoesY596lG3wxOaa7xXMiv
LngME2oUyWq0RI5owkGkj5haByKJsQJJ9SJPABgnmXIxXCMcenctyohMHBRIq+xDLIuiXBATg4qv
RjVCRFUTnsWqAaYqy1EMdifYn2pxkVGVzVKEwwbJCMxSAZynV0EjXINCObuiZYppetMHohbuVBwP
RqmWCeJcf2Cp1mTfMJ9K/S/yTd6Hf1K+hp16rFY9OKxWPUO5YUQkcCHXPfyHN/7Ulzf+tc/bPRuW
HQ+xFaQ5WYTOWXEMEaVR0FguIuvCmkMUZuHAWp2g9Ah/ZRvmOR3I9vUCFkxAEaUCcJ9Ac5shK3c8
sO5YISjelKMcAFb863LWQ9V5kDTN0WjHi9oAKV+xe0DHSMULdwznHPEoWzExegJTg41+V6rh7I5l
UPlxyiFKPNTYCsZH6k1qJnLfQJoiIOwVTEgHcKoG5IyrUlGEQxk3EckAZVPtNROZunl00RP0egxV
ESfS7SnJYbEAA29MMdqpU5larUX3oi5Bic0wWq5ialGQHY60gMGyVwSi5JYFPEKQnUkKVo5IT2Ll
pT8OoOqYImWLhlERI1RDEK5duyAGkgA5lSO0k+tOtRHCMSoStycihWLRC1+G2PaOfYjctCN2Maml
U2kdjKoYnZgjmRWid2Q5rmRrEvBE4MjEWxCWUo0IU7BxgWfaEX2ZpmTwwIr2q7Mj4jt3JlCQA1kc
SLUJTP2hPmhrLCVHQm9Gd1SowQkzAlidi1ag2RDVTjtQ5ixWUTUbQhG3YJvGlcAUbsy8pZDIIeWW
uRqH3IWTaEThKa4jVGILHIoWjfmbIyfJAMXRhKPCUJg1GA2JnRE5OE8aE7E7SVIEqkEzALFYqpKq
Sqh1SITN9CJmeI4RTyHlwGO9aYxwzTYI7OjRHxSoEAR8SVZFHk+YnpM6xOQO9StclLVzGUmoEbt+
Wq5dIeR30UeY5q6OYeIOg0iCrXLcsIi1y4Phwc5JhTsW1NOPrTw4dqMgA5zzUjy5aMy5BwQN2QJA
eRUIAaLJLGW1W7Vgxt6AAY4d6natTE710aYgVZ81+J5oglxpftUrfLWxASAAuGhCjO/cM7sqmr0Q
uGcbVo1i+J7kJXGnbJpcjl2p5kBCMOIFGhDZRQm+kHM4sjK4fNJo0kRCIMJYhXb4fUY0jvWmcSJZ
oCMnBr0AkUTYLFYptqDYppLBwtMpMQaAoRNIZstMPDg5RDVWxVCBiMM+gqKluVu0Yapk4nJSsTpI
S1RJzBTOi7+YBwyJdSEuGb91CqF2Ts4zbYvhkgfajavPIjwlM7kFwCvMN0wgDgMFrtlxn6Vv7Fv1
261Oo/yGqp8jr0V9HXpf0ZvW7Z8oe01FoIqFuXGV4mKpcVLipMFcMgmomZVi6bBaSOJC3LGOa5//
APp/N/7UlzsJDw8xdHqnJYqhVD0HsRKJmHZGOkMKKsQq03p4Xe5FzqTFMMFRaZSYZhQgMAxJWi3x
EUQeTSKeJcdDOH2dR/krfMzfIHtyaRWq7LV8xzOzrPGOrVihObQJyTgv0BwD2ppHSdoWnlpyOZri
pTv2pTt+zqKNq4BbkMXNEdJjInMM686N0wEaiICAjrmHriaKNq5AxwBJOaBEhXJ/TkktEYp4EuMi
mArsTmBbrPJjPKIxUr1DpNI7E8sERtWmUXA9pMGiM2X/AKdCIoMkbV0OBmFxAEJxjsQTp05VOtU1
VB1MPRMTVMCw6a1KJdtwTDNQiNle1EAPsQndOFWyCEIeAZ7VUAAYKWl5zOzBBw0pl0JJ8igciVL1
qhYxwKFrm464xoJjHvUIWNQ5eFZEhnKexcNuYwIKj+Lva4xwClDLLoYgGJFQtdmYIkX0vUFRsxk0
ZmqtaMNIR1YNVXNDNqLIAnSNp7EWLmoRJliocuZDzLdG3IzuSEYipJV2+A4kadgUBPhhI6SdiBN2
EhhFs0+SF+2NUfbjtCHk25TvHCOTo3rp4pZDIbE9uTTjUIWzbEDhKQQBxRgH3MhalORtA4IDSaLT
poUJSlQVAOC8SaUnWrMqjlYErhgvCAsFj9CxKr9ar9aoQCuGcR3Liuk9gZNU9qwVB6DyrI13MGXm
80dT1ESmFBkOj6k3rTk0zRuy8MPCpEycjIISsW6nEjFebzEnI4iEbxiBbHhHYtAnIR2OephVOBVR
jE6oEOxTXRpIR0jy7P7S8sZYI+dMh8EdDiEfD+VGHOzImDQnBNbIEezJWY3phjICux1plei8Yvpj
VXuXs2B5c+GMjiBtUTdm0MSM8UHdxmpHVpJwG5HSJSajgEhYVGO5abeJxWk4D6UBEReTaiMWUbkf
GKMFEyiWetEBp4SMhUIjYW6cFVCvQ+aDmmxAp2c7057wgBWMkIEdrrhwRRjsKI2hSGrTKGEkbRl5
l23HxHPoPltrbhdG3f8AFWT5F9iAToxZhk21P7cUADXNYaty0ADTmM1qiXB/spT0r/N79DDrbvS4
rHotTlOMIRtim9lcvQ8EpEgblqHf1MViU8ZHsWuEiwyTE8Wacl07Y4J2Kque/wD6fzf+1Jf1EA4c
3f8A9yS4ap9JZcTjtVS6YBUxzUzcIFFKQOJoqdFCuIunZGOmqeUeFPgUdMsqI66qgZcE2GxHUQRg
gbkixNUNEn/sbRPpeIyCaQb5jl1ndabkQ+1NCg6phcDxKl5dtrsvadG/CURCDtEnFCzzAFSxIGC0
RnU0wZedKUtoAwQFiEpB8dyjavWxEDhd6ugBcjqIwf0nmSLDIJhgalam1TOEQpTIAuQd4rRcuRBN
GKkIeE1HSYAiVweoItc0jZELVImROJxRJGOxact4VKLFNFVwWmHFPAlB6vinGBT+iYYquPSw6teq
59SYUC2KuCoGTvVPIoiFdgQ4PUhFiJDIp7s3PuhOBpjsCDszYIRyzZDhEo+pYOdipjkEJHEjBCeY
QkDQioUo5KcJ4SwWu1YIjlkiJxMZDIrzeYum1E+GERVt6HN2Zm7aHicVCdTBI4ASx2KUwaSlR9i8
2fHPLco2eYiZ8ucJDGK/DcjHhl47h2blqFQcULsYicMJQKjC3a8m3EucyVUoXeVnKFyOBCH4u5KV
uNdOAXgK0ShTetRlhgDVeIIiUqFagGO1UJKqH7VSH0KkVQLZ3JwT6lUlZv2pzRO7d3Qx+tUiFSIW
HUItxdsynZjn6A9TVMsBmUbXKjgPilmtR4rmcj1d6EXqcV5cOFjUjErUH0HxEoGBIbEbShy8DWWK
EBl1jImowG9GdwmRO1bVC5ICMCM6Ix5YFsNZwdebfJNsYb+xadOlqBeYZDUKshARLrVMdhC82/aF
2L4SXm+VGzAU0wGJULlusZD6yhKpAqQo2rlYkky/RCELcIxiMgFDmbMRAl4zAo6eNAcU4rHaAnbv
RALSlmmuy1TlVCJOOC1E6ScQFQnVktJihGctL4nYtFuJvyGMiWCcRMNsXcKJdiRktWkyG0ISkSOx
GXithAg4rTQ7USK6DRGcPEC5AwQJ8WHROPf0Tt2SQScs0JwiRct0JOBQN1hLMDotWYkGcIy1kZas
EOxujcVtBWqwOEiq4wwTs0siuLwnEISBcHD+wDfL6/LCn9H29Gzpbpb0Hb1G61SqJhRY9FVHl5yJ
hDBPkUYv1/rCeHqUwKSgHZQ8wAXbRaS1WwHbJSfaucOR5Hmg3/6ZX9S/m7/+5JPEsmnETCEeYsad
7LgmISO9k/K8z2AsV8IxuDci9mQ3iqacZA7wVWSDpigbUuHMJ5y4ti1x7Uy1NRGLHYuJCT1RjZx2
prvFvKN+/dMrj/dxy7V57gWpza3B+Ni5DhPIkBUkSdiBuUJTRi4Ga0+GWzox/sOdQB7UQA3Z8xk9
ZjgojUz5IV67Iy8qOqWMmqhdhd0RB1AAIDXO4HwZ6LyrluUciTSqjcuHiIcR1MhPlIcILu7BkLN4
ARDCiEI3BrzHoTLCAzQtvQYIl8FelANARaL5lDnLV+VszdgDjlVGVy9KUzUvtULXNxcRDeYMabUJ
2pCUTmFO5mBTtKlzN+uokxiqARjktU5aQMU8ATvKYkRWpmZbEDIlzkg0i4wZapeJbs0zei4cU5x6
GCr0uny2pokgJg9Mz01XCFqkVQuqUG1b9qpVMPWqmUpbkLhJi1alAPqKD4BNIOTmnBw9SFWicSaI
CAD7cSjKR00oy0wc9i1To+1CJLsjc5iOu7ciSPzXwV63EmPMWJH6EYGhdmVkTyqR2KmAwCt3SGOp
jvCjKFYsGXMG6WiYEDtKIETI5KMNOm4cW2LXcoGePag5GCa5JxsZOCQVUn1qkXHrVLQ3UXDBlgAO
1VkAmMvUF4j2ssSqj1lYBf3KnRgsAuGLyOS0XIsD1tFoiUs2Ws3QCcnXl8yccJLVGQIVC/QPLAMj
tQleOirEq4Iz1wZ4yO1GrA4ejMpmuUc083hYeiEYBtp29QnFPMgbk1oMD7RTzOqexC7dpH3U0QwR
nLAVUuYnh7PVqXOwIxtAjsT3TjlmjcvHRBu9Rt2LWtvaWrmp6bMaiIKibhFrlo+GL1l2oQsQ1Nhk
AmlPSPditYDbyo6i8himMaLgLbkbchQ5ryp3PhDAFOVru8EBwgITF6IBGZULPL/dwL6sNRRBrMYn
61OEw8dJd+5GFktbFZTOAqibN4mbO0hRThcBE4FpBNEGRBcFaZ9jrSajIqlVxMAU0aHaiJdxRlM+
Lwjcmk8RKmoVRY6hkRipW8H2pgDVap+IqZbHBREQBJmZPJgdnRE+8COgXJB44shKzAQjbkCRtUbs
ZBpB2zWNNiuczCIhehxuM9rptnQVpNWwRGGrFkIwkZXMST0P60AcY06j/Nb+ir8kp8tp8ibqU9BV
UT7fRU62onuVAqxVaJxVcIWxVPRXorJlQU2o3BEmO1CGDrsT9cGIRnboZDiUiSxK+HcLbCtc1zoO
XIc23/8AiK/qX83f/wByXUont3ZR7CqXTIbJVTXrMZjctPMctjiWBQBAtyPcn5bmdO7UCn5fmIzG
wo/D8wbQVxWZxbcVpmSNrp3rvTXACFxNEp7MnluTE02KdwQcrjDhNYAgdoQnzEzckKB8huRrRYOd
q1Cu5AWoEbV5pqRgEYk6RmjxEyGSBvBNGLoRkNJKd6Kh/sEexH5jPWdRlvUZAvSvosF5twF9gotf
KOz0JkzBCxftRaNC1XUZ3NHnyGYwK/E2boFocUQM0Ld6UpwBrR6LyYEieQIZUIPWABqMQi2KqrXl
zELdxzKJphmrULReEQK7Tn0MtdmZi1THIqF4+0YmSMdWr3Rs6NMg4CJxXm3SwJoEYCK0sfMyQ1T0
pxifaKrJzsCYBkZnALFxl0Mq9TaV9nRTDrFCIwCAbDAJptEDBNqfsR0DvK46lcGKJkWTEOiIRTzk
52KJbTbzdDQOEDBabcBA5k4oSkSX2riIAQ0OTtQMokQxdOImRxGxBi1wJrkTTPJCd19OUQtEAIgC
vQAMQVblA+yARsKmbh02eYiTu1I37dBrL+tWrts1FaKJ5mEvMlQCIdyha8mVqA8AOajb5QguaxlU
AITuXhKAxtCgRheidBoRvCN69LTElx2LTamXcUAZDSSQw3KsfWV4QnYepbOxM/phIYyLPsUWi5lj
Ir4WJzTy4pviozJAJFU1uBuHaMEdY8uAxVzyuGRFDmtXmO2FVGN55SEQSdi0i8YxOShIXNdufetV
mTaQ5QMy0IVfaU8i4912C+EHnkMgtcg5yC4oOmux0lPE9UymWC+HCnvGgWiLGW6q829EyO9CM4sN
yeJfo4zXYnMmA2ry+ViTlqOC8zmZajsyRt2QwGxC/drM1TxNHWh6qNqOMzXsUYDZXoc0G0po8ct2
CaPDFA35GU5YRFVIxGmYppIYrzdJu3MoDABebzd0QtkVtvVti8rlYGczh2oSvkW4D2ftTRe7LIlA
zHlwT+KW0psuiqwqqGqYh94VCy1nvZRgAwGAVq8K24kuEJ2iRtJRlOemIx3o6Dxk6jtY4J1d0B4k
iOoZsKoBg2J71rttppQ7WdPbhIx2xBKMZAiQxBoUImi4DhkjGYqFSsj9S0yyOCEvD2JmFwHaFWJi
2OafiIO5lpk8IioUZgcRzU53psIigT5FRnnEp9qjLuUOaNzRbNZQ2qXMO4fTGGxs0GVyFgAk+IZ6
c2TjpO9F6FGWSZUWuQYyPzDT+xlflG/q7ujs6N6r1arHFcRZYuhCbh0YWzw7UxL9RyHCYRKBKDlk
xK0xJO9eJ5bFRC1KAOnCSucrO2DG45idhKkfUvMxZao94VOs+aYBOqpnZc8H/wDA5v8A2pL+pfzd
/wD3JeieMiOwrgvzHemFwS/SDprtm3MdhH2r/uuSrtixTz1WZHbw/lT8rzxBOA1xP0EBauV5uMo/
nBvqQjzBiXwILq3ahWU5Ad5X4d3lo4p72QPoHkDXYqQc70DENLegxjEbQtYlqkMzgpCcyCNmC4Yy
JyJdlGExqk1QFWIiDgC60zYHNk5mFwSBW5blpgHKGogblnI/QgNIfEuiIRiZbwiWA3D0zdanzCex
H5jbrhkIkvLZ6XXctxlLaQjd1ztk1IizfUtPKTNyzHwuQzb0bHN8rGMvDOQNe3BG9Y5k/iZB4xEh
QnuU+YlHzLMatKTkheXzMY2ZbXp9K1WpiY/NL9QTfimiCr3LXLdqVq1IgSiCJEZe0Ub0rMrj8OqN
YxB7Kq3cuO5Lschkoy29I5Nhoiak44uE5xXCE2D4l2RFqXa/5UBAEgDLBcYc5A4p48AOeAWqRM57
8lAgsauEweR3BE5CpTPwg4BCcqjKIXEwicNqfHpqWTR9a27U2Sp0U6KqioiSdO8rTAu6B1HUNi01
kTktUjU1K0wxT3CwK4Q+8qi1Gp2I6IsNoQu3KzNYxP1la5lycAjrciQ4WyKIAqKlatQAFBElytdw
mTZYBCQgCMgEIwx+roAgCZktGIzKEpm3F/YLkowuPG5s9kjci54jVDVQitKoajxkPpT2ZmB3ISv3
NQhhkQ6F0EESL0qVauxDyBYjEsaIwMWjbL6jkhKMiZjAlvyK4Z+N27lgrVm22sDVcb6EzdETgWw+
Q1oqzHYChG6HtSrGUcUYC5F/ZOBHatMx5ls0eNSp2pF4y8Mth3ryZXZwPZQ9iMYS1AYvQlabo0yJ
o2LIizAl8SV93Dv/APdE6QewuhC5BpDuUYaRct4AHEImIZ8VVy2QoqRA9bp3Jfb0blpuwAO0LzOW
lqiKmJTSpMZdJkUblypyjktNukczkFTikcZFVC4oA9i18tIuPZK+MdBGLprENQw1nBDziZE1EY4I
sBHcEfZt7ULcatiU2oCIGaAiXfPJGQx2o3ZeGGCrWRwimhAA95Wq7LTHf+RYEnaVWQiN9Fau2JiV
y27jEVQum5ruy8QZgEbZueVCfinmntarshjdueHuCH4eDyl7ch9SM+Zm0TkDVNCL7zUpuo5LDamh
ISI2dQkY7E8mJ2ZppxTWiG2IwMSHzCaBAiNuJU7V4iNsDGAIPrU3aNyMqDNkQQCRlqVixcBhbJY7
S2NUIW4iMY4AKPNCOm7GQBIzB2qU5kiOTY/UUZWCTIByJF3+gJrgMZCjhNGQOxPKGOJRNonUMjgo
64ERyIVcCK4LQ9B4dq4pAyGRVXERhsWogFV9SKiXeik2IwUJ2yZxtewPyKETHQblZQliGzTvgjJn
LGm1ThdBgZEy0kYOn6C2KqmVD3shEcR2leVLu9DT59p0N1qdXd86YhYgKsgEJkHTLwlsVxEg7wom
cS0w8TkXQjpqUAWeQfPNaRDUNoQjaJnEh3XC2imOK+G+hquc0DGdBUArUGAYAh81q1MdmS1E1ZqB
O5dajI+eMCcCEDGIjLNincetPpfsTEMFxOSmZwuGirVfkQJBjb2leVZAlPN8UZyK3IRGajEZJzVN
kvMtd8Vqj/iGzqOminkViqdPP/8A9P5z/akv6l/N3/8Acl8geJIO5fCvzjuEitd+crktsi6hzFiW
i5bkJRONR2qPK8vbjy7xa/cDmUiaHT7oQiYuAGC44MqSY7CniX6XuSERvXwy+9buoyrEJgAE7Mdy
BnIsMkRaLHaiREmWSHD5cRV00eKO0BG5K92xXmXbhtnfmqSJbNl8OQRlI6idpTn52l2I/MY9BGAN
Ch2en13bEJy2yAK86ze8mDvpAeq8qfmcxbBwY6dP1L8IeX/CmRbzGAL9y1f0zmBzPvQDH6AUbX9S
t+TIZsVrsTEo9At3KAYFSnE6mD0qr5ty1XRP4nejGQcEMVLlWErUpGMdtFK2ccQOgeWeIFajQtXo
bAnNB5HeRmoDTqia7USYiJOAC1Zo+adMcIyKpxyyEagqrRicANiYxMjmQpRt0cMcijbliCt4CiQK
7E5DFbVgnW9aQXOY6KJ1u29NSniFxlwPZGCoGC0QYDanjEzmc00mA2BNAapbAh5vD+aKlcEabSnn
cEQcgsH3lCAwzRA8MaLB3DOuHwrFjmQuCIMhmasnmeD1JolgnjEnemlwgIuKwjwv0WpEtMSYHcoy
xct3IXTy/mghwRQo8x5ZjEBowGxfE4BuxUrUI6iRjir1g42slIms4GgUwaSPQDcNJUMRVTuWiZ3A
KQwquY5/mXYuX3nADsRGwsUO1Dd8g0w4pfQnuS8u3/6yTkGctsvyIRlEGIwBAT24RjLdigSSdtSt
Y47RxfEIAl4TDxJyKwHCWYDFOap4WTIbVwR07f8A0U5Ik6eYZqPmCmkDMg1O5EgGMQHLoS2phih5
9zS+EY1K+8mDtIT25C7Eetb8wgMl+Is0I8QC0zpL60NUgHwTggk4AbULhHBlEfkQjo0HZtT6QI5E
lPONNoqtU5ABaeUgf0itXMzMpywbI705lqjkMAtNvjutivOvmh9lUDAK5zESAcATtOxA3CZVq5Qj
bAG1t6kwfVTs3pgRAH2jiVK8Za2GRda7suLYEfw1iUh706BcU/LjsjRfFuGR7z9a+JeEdx/I6PlF
4+8zOuGOpGXMRlHZHBCMAwHU7OnTjuyWrPdh1a9GA7VT6VxR7wmjIhebGRFXOksmLjcVb5iZJtRw
bJCRMiSPZDurdqJ8uyJcMT4pSOZWg4Go7aIykXYUjtKHNc3HzDMvCB8IG1lIQtRtXAOGcBpY9yNs
1lEmJbaETGL6qsxRtyDHMGqe1ahIeoovEQuA1bBRug6mxG5DymMTi+KYFqsQgRnkmWnMFMS+1XDC
OokPEHBRuSPx4msciOxRlhqGCYZqzcFs8AOu4AWAycoMqdDtVbwmOC44u2aGoiMfZC+35TX5M3yy
mHUr0V+aayHrCrIN2rxd6cksdyjK5AgTrE0Ir2FCAFTmaMtBAJGLOvLI0x2sSpARLAkAtj61P8RG
QDcGkjFFiRscq5G5ISuS8B2FNKWk4vFeSCXEn1kVQhMamwIoVCyRwQ8O1B46jtlVCEgCIUiDkqAU
3LiWNFj6GnQ2Sp1NMQ5UbnO+AV0rRyQYDMUWq5jt6GKE2cBGR6dy821h7UV5kBw5jYqKpTRTlYMi
BkthVarBc/8A/wBP5v8A2pL+pfzd/wD3JfJCemhZBpONhQEzplvRhZaUtuITzkZbslFqHEoRmgYn
ru6BESRtUYRHch5h1HZktEgCThELXct0FS+CFsQEYRyAR8siFvYjKEpCW4sE/mSJyjVPoMhvTXbJ
fcgJjTvQa6Ox1wzB7/kTH5gKk3zHHrk7FBh2oAbPkUpztPOWBJLBT5nl70IiIOmLEvuVy5/U+Wne
jEVkIlgfoCuXeRujlbUa6ZToAhY5i2LgjSVyLk9qjas3omZxjmgBgp87y0RC/APIgeIbCvOkRYty
eILVLULL8bO959uBcQEdJH0oXYYSqyBGBTnJCRrEuKIg0IK1SDpwwf2cFoNGotUgWGLImJ0kZKMh
MmEakBEuI7ScfUomAlKrHVipGIBOROCkCeIbVK5jKSeZ7gquX2IRh9OPRSpVRQpoh5ZnJUxzQMsc
1QKuJwCG/YsD2qppuTuwWYGRKpXbkFpNewUThgtNuLnaVqvS1HYFptgRK1HikcSU4TDPYmLhO9SG
CchnyGKdqbk2kjenlQogLTJiFEVd1pjFyKKMoPqwb/2Ub9o6ZR257l/3EDAjE4hWo8pImFs4hwFE
GHmEfWh5pAj7oQjaj35J7hpswCoFKBpC+O5yrvLnCWCPlyMZgcMhi6lYv3OKOYABKjOEzK4zCCNk
A6o0IQ5O/EQtXC+uVCEZRrGY1AhDa60nr8cwDszVIkp/Kk21M+mWw0TjpdeXawzK1Eap7ehoByqR
HrCa5Ax35JzQLS4IkhF9Vabgpysj4m5RePGcXRmSwC0swykNu9GEw+nAlHUKHFFmD0GalaBeeOxe
WY6YxxkUCcVbln0PGhWqI03o4gZhPgiDWjEJpjgylGhCHlSlcng0sApSul5SHDuKFuEZiWZGHcml
WSjCUhqegFSwWmy4Bo5qE/MS1kZZVWm2W2acE9w+GQqvJ5YO9CV5l3iu9DDGVFasDHxS78OgFW7Y
DGQBluQsiAvyHsRGpSnKX4a2f3cS5709i5rkD7cQQgfNtts0qt6EeyP5UfM5mdyXuxLD6F8K3pif
ak/2oG7xy+hPAAAliBtCG0U6lZOdgXDElObclGTNAli6llE1fcmtnvK0EmcjsyQ39OkyqniXHUwT
RLbXVRqG5NOIB9SaDNsUZwBEoF4kIRvxjHTsxKNwjVIK1CUhGYFHKlIzE7jcMIlySvMnV5A95K4K
dyFu2HuSEQIDahKV6EJkeBiW71KHMSjEM8SK6/WpQeuxY6exE5+pCUcBl0TjlipAjhGB2rVHxRQu
i1/3EqGQy3rXq128dpQvyiI6/ADiyNu5EEEMQRQhX7EPBCZEezrCJGKZeTcLQyJVK/N1fkvb6OvS
3Tv6tOjd01HpakDtLLU4I2ptY+xB5AvhpqtJJifzgyMbcTNq7PrRaE9QxAC1RiGfwvxJxZMXzLoG
ERMHFwyDgiJx0svG0d5r9COuWOB1GiIneMqZVCMZy7CBVaRKTdy0yeTYOfyKscN5QBiCIhg9WAT6
Iv2LZ2J3rtVT1H9D2dFM1rMCI7U86nYFw23G9cUCFiyeJdbuh+oIQwzOxOWlcRAFFh07E2Z6mKqQ
2YVC8JYhfCPCVTBAlcIcbkQAxAzREgqdPPfyHN/7Ul/Uv5u//uS+Rk9ahZVxT5BdiCHFRb+inQZE
PuRIjUYLzr0mgD4QhbABOEYhG9dDE4bgjJnbNGZGqRwdMSw2Ktehk5ATNRF81UEo+SNMfpXwz3ko
RlcMmyCAuRBb1oeZbk/ZRVJitUZht6pIetU+ayESzLSMUYnEfMMeudqN1tyb5JK3cDxlQhShy8NN
w11EkqUdVvTPxaQ5btZR5mErtrnHeEm0xH5Vr/qkJcxGX3cgYiXeEbfLwMJTFfMIA7MaqwIs8YAS
bBxipmdIiJcnsR5eJJBeWnFs0bcqSHQJRHhILdild06RIuyZPIumVavQAIGFJfWmnSJy2ojHcmau
04pwW2oXD4ompWp2AwT4IER8wjMp5tvZRBkYRyG0pwndjkMyiREsUxNM2/KoxBJiBltTRie00RJr
sVKsiz96qfVVOzrURqlkMkxkw92KDybcFwkdDHHorMSOQATGgCAjFt6xwyRmRqnlsAXFmnjL1Jou
T61qmRGP0oc5zMfMMvBE4NtZGHlRgfZlEAEKVvGVuVDtUgbcdc8JGTEH1KV+5IQI8MXBqo3JliRV
aYjU+ZwXm3QNRyFAtNqLDLYgbpc7D+RUHTDmI+K2RXcuX5gGl0B+/ojdHgma96ldp5kg0NrlS5mR
LvXbVeddj5hjQCW1RMRSNO5RiNqoEHodic0WmMwZbOjVMsjCwNMNuH0p70zOWYj+UpvJHaSVwarZ
2xLhEXYicPfFJBREjrsy8Mk+1b15cTTMpoiuZ6Nd08OUdq0w+HHIDFVkSe1QhiwqSjGeEgzrX5ol
B3ABqt5xR0BxmUJDLJStGksQVpuTET9iBAyxRq0RsxVeOGUh9qcSZOS/atNuozOSEIl4wo+/oEY4
mgWmNbh8Uk8iy1yOmEcTtU4WHFo4v9aGsEyzILICIYDBNI6pD2QvhjybXvHFkDK9qOb7UJGQNsVO
1GR9paplmwC0x4bYxWmGOZ6YwGSlMl6sOwUTs6iYw1mJfS7Cm1TuXyWHsxNOxaYRERuXlgsPaK4X
jAYH7UY6vMlkF5lx7NrIsq2hdmTSUifqQEY6ZYyAwCMITAkKB1GBIJFSi209DQGuX0Lilpj6gnlx
HfguGIj2BY0RhPij9K8svKGyWKmIHhmKDYoSsVljIHNGLeUR7Ukzgg+0pTndOkVI29iFrAl3lJG1
GTgYdete1UcHYMFkXywKqDEowMnByK1apacgDRHT4lKzzhOvUWJrjktNuREcQCAhG5MTnEFu3JOr
EA3mDUT2LzZREpyOdQ1CKEKRMdMo14abBkFKViMr8dkQS3ajauwNuRykCD9PQNkukag8JUO5fhrM
hO9c4jqqwUdchbMA0gaYKRsy1RjQyyqpxNmAlIE6wAJPtdStmuksqdLFcUdWx1ADPFlGcTgyqwkM
vkD9L/NdenZ8sbNcRA2OWRIlEgYkEFVnuwLINMSfARqU09UTk4RMIEiOJcD6FqFuXehptCer3XLd
qby9A26SUfhxbImh+kqhlG4+ZAiyAlcjLbEFlqF02o5gSJKJMxMGjyFUQZyJOeXqQhOZlEYBgEAI
6NOBjinI1y2yLrVKAMtqaIEQcgnjERO0LFMPR067+iN+8HJ8IRBmRHKIwWJO3oqqLgkR3oCYBCDE
A7E3QCJAk5KMJUBNVrtECmeKJnJ6qq2dOqWCpgqByVWKMnXiKo5WCHmVBVPCcD0CIjVlrjTcjI49
LOue/kOb/wBqS/qX83f/ANyXyMnb6ASA7SnTPVYpwVpOKd0dR1SyiEWGkYAZqhxWi1MsckeY5gvd
OAOQ7058OQQ0+DNkDlkmVcuivcm63EH7U5th94QEogAbAuCWkICzORObmi0wnIjcg8HfbimvWk9w
ES2ALx6TvotUZgjtVC6r8vdPOQiNpLLj5iHYC/1Klwz3RC08raJPvT/IF4ojdpXij6lq5iTg5AMF
5to0OLKuPzAFHs64twDkoRA4mqfk9Q6H4mgGFdKjb5K5C3bIcggyFF5UuXl5Nolp2bbR0r/sr8ox
AA0kBqDML8L/AFG7ON4ioDQgjzNgiduRrN9RHevNsSBiTVvpQIwPRZvDGRlGXdh0MgbYcquGYTRD
AUcqUmfQaGVAhKIFNiEyMCmhQmgR8yZl+aS6boMjKrUCEMQFquTMQMBHFA2y4FC+KMpkcIR0R0je
cVxnUe2iaAZVLkepMUxDKioQB9KDVO9AYnYmau5BsU5L700cE7d61DJVAiTjROKIiPrUjkFuTW8c
yqnUY9rLTGh7SVofDJRsOBctUMUZSLAYlXxD2ZMqlCRlKVse8Sw7ENTyIyP5FwR0jLannU/SmA7+
rO2faBZCJpOxJQmMwEZDxQq6tWAHhb4e/avJERwxxzdXLWRqEYpsxiggmjJs5NkELkLhGk1JwK0Q
45nABauYLD3VpjSOwKqoCexOKbitUpABiDE5qbh7YpE70LdviuAYHctAs+VtkcGTA6j7ck+WK8yQ
4RgNqMpUAoI7EU6jciHMQ0hmqljm6xfcFqlwW8SSnhS3tPtIgBnTHFa6Fss3QiQNBoCCqZrzrGfi
jiF8W2xzMafQnacmyOC0WwLcNgxVeiV0+wKdpQLuTVGMwJSyiQ6Mrp0xjUQCOkCEU1sieTBcdwWo
bBiyBHGRiTVM4A2pjLsQJNCmhxTyZeZffRiuEMMuguiT61fvyxEWHaaJhRqlRALyOKvXjQy4YrDG
q1SLAKUQNcz6kDcPl28mVY65e9JaIQJhABgMFEzgQBWu5EnAmq12YRnEjDMFeZOhA4YjJP5wjcBx
JTXLwlHZCpKFG3Z96c4J2ptNAmlci+55L4cxI7MCtiY13ppU0FidyEtWljQlDVLwnxry9YbElREd
NwDIlvrRnoaT+CKJtNG5GjE4+tNdAiRsz6BA4nBEgO2S0SiADh1GKoT31WDHaFwkHcaL4kO/L1rX
EASGa0gujzNgPqyKAOiJbFnIQt8wdUp43JfQAqgaDvAXlxDG4QCcaUUbNqIjGIyzO0qUp8M7bGM8
8VGNoADBwPrUZxkNUfZqiOiUT3LzrB0ziWLlQF86xbA1kUEjsTW4iMdgDKZtiVy4AQBEFgd5RnKp
kXPVfMJ5ZJhUKNyFRmNyFyOEg/Up8+1T+h39RyQB2pjcgD+kEB5gJOAjxH6ET5sQ2IND6kDISEZY
SaiEbQMiczwj6UZC3MyjsDj1qJjCM9XsxJMh2rSYG1HIkaq9ykfLDjAmh9SjpMhIeLXSKBJE4N4Y
y0B1ITvkO7R8X0qIlIRYvri+r6UJi7Ike/xIynxSljk3YAtJlIw93JAC2ABgRQ/QnEQTtNT9Kfy4
v2BNkqUWPWbof5Pu9DRC3kBqPYCiIimHqRj1wRQhNM8WSdbkJtUYJpS4RgOjHoqegsaoBOSyMScf
WmJJCdlQBUZaVpJcJvUsXKZPkqp+jnv5Dm/9qS/qX83f/wByXyMDrsFK4S0clcskVtzlA/4Sy2FO
SSU5TjBcMyIqprmVQretcHdRlJ9JLGhUQDpLVBNUbJZ8yhOGSfNV6NRwCLekqH7U84OUIRtiA2hE
WyBsdfDvEy2YBEiRJGxyhqt6d8hVH8RGgzAKoJHsC4JaTsKYzj6wqEHs+Qtdk8/cjUpiJRG0p43Q
O0snjciR2rivQHbIJxcF2WUYVRFkRtRyo5TnmJDsLL7+frKY8xNtgJCec5SO8krHraTWOwpxLQd6
bH5eBvQ7OtTBAR8S0MSdgWz5O1wFmoRtUblqlyNYoWZ2YymcZCTBvUrly0YW70y/DFg6B5efm80D
8S2Wi3Y5R5SZ4blTAtIBsgVclc4dVW3rRsw6LlpqQkJvsenRVCEABtzXEMaoA1RFKIzOWKm9AQGU
AOKTGiot2CEQO0oRAZsStMRXMrUANP52aMDIRicoInxHfinxTk9yqqU7EyfBULsnNFxmm5AQLbSu
EOcyVxAIRYVwRAAfJViBH84pu9HZtKxD7StJk/1IRiwbYtdqxMg1ctF+x15JgY3PaBxUoypPFDVj
i4QEy0jltTwJiRgY0KMLXMnSaMalGcy0pl5SOK4iZy9aoNEfWU5qduaYBvQTA8N0OoglzEspD27v
DEImQ424Shp8JxdOaRlQkIQBpvKuXGxNE6G5G4ImQkGlELXZvDyiX0lwR3LV47vvHLsXHIOdqpV1
ru8Uj4Y7SoQpbtk+GKOrB6qN2zDzoHGtQhG61qGUM+9Rucvxg1cVWrmJmMco4FRjhqDI2bm2hQIL
jYhzFsU9oBYdGqBrmnuWQTtBIXw7UYnaS6ErsiLfqQjHCNAgjEhpHEkYDap3IlgIesoGUiCPCtBx
Ge1OFhoubRgUYTxHUuwHiZx3LybA1TzIRu81e03Tg+SPlQNyQoDkn5ibQFTGOCItR02o0MsfrWmA
0wH09qa2Hb1LVcLnZkFUAdoXkWBR6yWq5LXLYMPWgAGAy6CegjavLvEDVjVihKy7yxJL0QlOZidg
CtWInhiHMihDl465M1FqvS0R90qRtReYYudyGqQicCCqLxKpJC4hSSDGiMCHBDJzKRGxGUYiIA70
JHOq80jzCcNg7VxycZAYdHmypCFRvK7cFKE6GNG2rzYsDMvXYnmRIZLDVt2LjBC+HKuwpjItmc0O
Yg0AGcZ9qhMDUJBu9abIMpyxGIC1XIG5c3UAQlfiH9m3Gp71rMdJJ1COz0NQDvVD60dUdQ3VXCdB
3fkQnGXEMCKFCFwyJGcnKszjI+VEvJkJXZEmIYkBCFt7fLvUyxmclGUi4IZ9hJook8IxMvUvxd+O
uM/u4nBhnvUvLti1cA4TEMpCWTg9yl5ciLYlxMvOEvhkcYzBCEbEZMfaLNRaQBpOSuWohhiwydfU
nbpY5oSeicIsccQtL8ByOCcUkMR87N1qdNSAnnIRB2llqE4mIxOoJhdiO2n1o8RIGJiCR60GMi+F
PyokRlw44D7UNNol9/1sFwWOx9VUNPL1zBEg3rK4oaQ3DoANd7lYwidjh04u6Bm5eu5kQb7k+05c
dyGu/KRG0OD2uUZSuSL4gAAd2K0GUpRd2LfWhKEZQkPajIuq2xInGUg5KeNsUwzZcUIybBwmFI7B
QJwADuVFv69fQVw+YH692VyJlOUdNuIxJ2KMOWseTDMzDKU+YkDWrdTDq6JmuS1HAIxhQdXFY1TE
qqoWIVSndMSxTiSxTnJO/SIwDlC5cDZsnTJ3xXOkHHkeaHrtFf1L+bv/AO5L5GB14g5lWBH24iZf
86q56IqBfm3rKp0VCcdwRDZIuqIPgvxHMxjG2KvKjoDloxtwiaEBiVG9YjPRa9qrMh58TE7ZIWgX
CMmxRDN0bHVMU3pG6vGBLtTTthk1seX+itNm6A/tHFcMzcIzJYLTETnPKIBkgbnB+ayB5oEE5sm0
S0+8tUZgDeU0bkSe1UW70JlFvMlgjcmXlLM9FCvEW7VUkqvyDig3YqSI7U8SCF4Sq9LfJgN/XEIO
ScIxDlARtiBkKzmeJkbj6jnLady1R70Dt+TaYlxAMe1RvAjSSxGa03Tp1hhI7U702q1bkeGpLe0Q
HZDzLMJ6cHiCy86IaF0E6QMGWvlySAc2DJrlRuUw7C5ExIOL9BA2LVGr4nYgZA4UcNgjBt6bKVHW
knuQhEMMypStxM78wz4stU7cojaQgMlF4tD2lpANM3RkRqltOCYMBsVE5GK2BY02rF+igbeuJyEw
AB3VQGaEdSMR60RqAAQGoykcFASJN017EDNy1KImojk6c4IT1CEck7tHbR1oiG3nNDzaxgNTb0wC
hzEWFyMmfcVqkXOaMpWPNA2UKF4WzatwpGGarwD1lOAZS9ZVeGO78q2n0cZk8Yy3KcJFm4u5aI1t
QOmLYMMSrM7QaAYHehcuzEYs4fNXiOHSeFsWTykS22qhdgGEeGXaiDkmCqgmGZRlO4QRsDqIkdQF
HUJ1MCKKN21AxES4lKgQuBi444vmtjp8VwllXPNRkPZNUJThGRGEim+pXLchREbD1PNuDgjgNpWk
UGQVaony5AAUojqJ833TR0LU7ZjaHiIxQhy8ae8ckBrhEjMJrrSj7wThQu5inS2A3piWB92ibk4g
U4pSbFauZE7s/oQEhGzb2ZlC1aGmBpvkhbGVZbytJwzU5RYaGptJWqZr9aoNFoYEpohzmniXG3pb
oEY1OwI6iQQcFG7dLExcAprEZ3CDSMASO9NztudkkUcY96GiAB259D7Nq1QOiR9ln9S0wOlqPtQ8
u8NRq0jRSs3o6bkMQjCQcow1m3IZHBMSJPUEInJTicJAhaXcZEqZOQp29NsDOpWoYrzJQ4xjsKbA
BM2CoApCRBbYtcRhiFEyFHZC1JgG7ZFafpOKYSI3RTPw7SF8IwB3UP0ojmASZe1kgQXB6j9eoB7U
8SYn1hFwCNyacWfuWiJBjsP5UIxgeCoIqF5V63EGNNW3uRlpcmlKK1CUhHQGBcMQpRhcjcukNGES
CX3spEnimjYlA3NXiADqcOV5eWkgsAwdCxfszt3bVBqDA7qqUI2IwkcJEupXLknuSLkr84Kqp0Ce
QxTZbCnGfQAKF04xOPzi2oPseqac4xO8gIGUxXBqv6kZG4KZZ+pcUjHY8TVahGRg7agB9WKIhAkA
O8zpfscKMo2ZAyzmGj61JrGsDCUTT6UDbtCW0SiYN3kojRoOTadPrKAlKIGeggH6kf8AuGBwBJ/I
gZcwbbY6SZP6yiJ3dYlXUY8frdAi7Mke80vrRBiTI4ydvowQ1xNxsNR/IyJFoB8Q5b1KlmH6oT6I
7qBUoNyx6MepX0lVT0NME67Pmic70BIwYxkfpRlGhiHU5PUgkDaESa16MFokKp4Fk8S/Q6otBlRY
rTLHaqrajRbOhujYuErFO6xTdRtqHMXYvKQo6MY4MpQuBt6MrZcbFUVC57aOR5n/AGyv6l/N3/8A
cl8iCPXg+1WYx8MbcQOwBc1zMfDevXLkeyciQtjKnTREHJUQkQ7ZKMJHTaiOGGStxMnhqAIyAVq2
I0lp1AfarUrJiJP3p7nFcwAQlMaYnALzIlcWAxVPCMEaPI4J/k51wBdPK3q2A4BPCRichkifOEQB
SMc0KcIzGKIjExtimohig5MwPFqogI2ZGWdQy03PhS34IDz4udhWq3ISG7qE7EQS4j8m8iPKW9RD
SuSqVgsWT27gO518S0JjaA647coHcjKN1txCp0C2JRgTnIsEJWr1u4dgP2p5WSQM48X1LiBidhBH
yGPb1dNsOSgZWyY5sgOXsNcOMihK/C7cOx46fUEBG3OLZUZNKYhHPOSABp8kMjgA9FpsAwjnI4ok
12qQEsaCO9W5iOp8SvNukyi7QgSTENuUb1uIhetl4EYepG2LREoeKXs+tSu2YxmYhhEZA4mqkZ0u
Pq7lpIqKFmXwix2koa4647lxHRLYUDma9yaEXBxc0CMn4t2SEzUJxxSyAwRF3xZAIznDVHAhGItg
RNCCqRogBwj6k3iJ2pqAbimNVgq0AQINFU9iaIodiZq7StpXFQdqJkexEt2I1qzoyuPpGDJw8QMH
xCjLTqjH1lawDqOIXGHzYokcI2JomqjAnUUDLiYMzB/qXDwnZ/7KN61iMQcwnuT8uWYkFDleTnqa
QJI3KOqs2qnuHTHYFpsxc7vypyWByFAgc80woPQcUgO9N5gXDMHvVFG9CHmWzSZzCkbUtJkDGXYV
5loiuIIdRN23okRqhKJcFWuYlJxbOggKRmGtyDEo+XWL0V/k7lNQ1R7UehygrkwHlGJIQL1GPQIy
iLgHhJyVSw2BUKNq8KYxK1xjOQ/MLoQMZuSxMqMmOBrE7lK3P7udDuKe3eHljCtUAS7YlTmaBqIn
M16kLYwAcoMgG4jtXmQkWUeYLG7GhQ1AGMqF1K3b4QA4GDqRmDSm0oQFsg6WJOZQGxRgcSXZP0C2
BV3JTYxyK+HIgnYmncke9OSSVEnCIJ9Sfah61dsh5XpEM2FFr5ubNhArTaNI5IaJ6JDNCAq2J6ca
IozuSAjHageX5cSuP4zgtXMz/wAAwVsWA2sapHMlXDPGA1RO8dOq4+nBSIFIl4k7FA69AFSEIg4Z
qxdtjVcMWnvCfAjEKF7bQq3LIxCMNoKMJBrY9ooOoWB4plz2DpNq6WHsnYniYyG0ELilGPaXK1EE
x96VPUEWwUhGh2oQbhBcyyRbNCZkS3s5Lg2VLYJ5F9h6GcetUXlXRqBwdaXe2fDuT+lch+1PImHY
ntXIy3EsVxwZ/aRgJUO1EgynE4AGnqUgxc4RzXHJlIXY0lLUJbRsVFG9fEhKIbhOlfiLE5XLcizS
qYnt2LUPEnzQkMekxREhhmtJxTtUJj6l+HlSQ8J2j5i+1FpRO1iE3mwftCY3BSjio9eCIEjNsTAO
AgIPcJDtH+9ahGRODEN9KLWJybONY+tcPLmQ/N1fkXBy43xII+kogWhA5Bg3rdDVpjH/AO2R/wDU
gPNidofSfWFIHmDEnCL6h+sU87xhSvlk17XXFc1Rd3bi/WdEicgJBpChf1oCMTAjCUTxetEG2JE4
ylU+tcFsPtNfrWqNuMZbQFqnESIzIC3elp1qelp1K9FOs/yLGqckDtVJA9hXFIDtXDIHsKYyDp9Y
9YWOGeSMhJwNiqSBtIQnUg4EBB3HaEIgEkswwxQjOMoyOAZCzahxyLR1cNUbd21LWMGDg7wVogIx
kcBKlVKBtSMouKClFIR0xMQS0nDtkiPKrHOqlISEZRDiMgwPevDEEbDVGevTMGkZYEImRAJoQDkt
JLuKLSfCQwUoyxcpkRmVRAjLJESDLSAwGa0gUW/pdU6H6XPQ3RtW/pbpPMAPGB+pREJAEAAx2Kdq
XbFG3eixyktVo6o7ExDHNc4MjyXMj/8A5lf1L+bv/wC5L5GevZgaiUgG7Sy5i8KeTYnIdsYlkBuQ
THoIW5kWVPUqqkXCEo2qA4stBAEgBinJ/IozlxAF1bNskW4sG3pjUogDtXZ8uYhxsK47UTtQjEeV
EZRWmF4gZuA6+BESj7xIdaDeFoH88InzpTk+JJIQjG2DMYyKEeYgAD7QRha1XJkYRDhSkQznD5M7
dLihVJkjYapr1qMwhqtmJOQQlbsHTkZUT2eXJIzNU3McvhuIXDGdqW2JIXw+ak2yfF9a+LG1eG8M
UfxHJMTiYH/2X/a2rsZ7T4frKYP3hC5C5bL4R1VVLWsbYkFcdiY7imlEg7x6KKPSwQvXoA3J1D7F
WAXh718K6Q2RTTiJDaExDHYQqEfIuEtcnSP5VchIk6g9dqrhmpCOGo6Ux70DIkEYBRth+DAfSvL5
gE2pcUZCpDoSt4SDh96nCfilLUJe86L4NVE2iPLlMjRtCMTTVl0EHAKkdMtoQ8sm5H6EYXhpah01
dAwIi4oCar4nhH0pgBGIwCDDi+tEToMgmKdjp29DBgFUjuTk02pohwnEab0xk42BFgz4uixwzR4q
xTgsFGLoiQBYZ7VIDAIXDHTA4Hao+Xc0vi5YIa7sZyOUarFUNU5oc0bl2RFmFGjQyK0wgYFqSBLq
dnxbC2IT3rOTDVLT3o3p6RACpBBCkZENGRAO5MOIrzZxAKEbUX35IG5QbMAqBUVes6ItRMyMTkjp
eMdxYL4t0dzyVbhPYAuG4Qd4Xw7wLYVKIlCM454FSsARtSkTSVAG7FPl5ye1a8dyGHc6Pk3ZThA6
hGWQV/l5EccdcH2hG3fkI6RQCmCjcuR1WwagbFbuANCUmL7JKTDhNR39AW1aTsqNoUrtjitEvTLc
UxgCVrjSMg5COu4IRC1cvfGrKMjivJ5uGiQ25r4cu5GUQI3hsX4XmhpMS0Zp21xykKrCq1XOGAq6
EYUtxw39R9ihcFRIMU4Qnr0SwKPLiJ0NWTs68uxpERVgXPetEnDY7EIzuuY4HP1r4V46tks1puhj
t2oHFC9GoFCNnQSaAI3JYyw7EwDnJAHHNOq4KZzEUCaDaUbPKhyaGS8y5xzOJ2b1wgSG1kfNaL0D
J4kEdQ6pAnYF8KLDMlCcnOrPJTtnAB3UeXsB7ksELfOcrK7aFYyj4gOxeVCPlWj4gayPb0mMw8Si
1IipqngQQMwt5wCsxllHDuU2zKH6QZeW/wAS3hvCY8MkxOolarlZezAYvvRuXKyOG4JuhgtU/hxO
Zx9S1v5ssnwCeXcFgSd1U0jpbFcOC1Sk0N2KMT3JpSYsyIlPUHoApzmAQA9VxQiY7AGK8y0aZxzB
TjEJ82cdyHpN6clhmnwgMN6pRcEy2w1Ca/bH6UKFeVyUC/tXCHYKd5/MuyGJqSULj3JTxJiHiFG1
qNu9BnBxChK9CBjL2oku21kJ25CUZBwRghyls6pzI1N7IHYhEl3qDkgfWmyPSWxRB8SBOITivQCP
FGoXly8Y+n5DVPIgDaSy1SnERObhlqN2Ddrpzej2Z+pEG4QRlpL/AFJiZPs04phbuSJwEQD9q4LE
5E+yKkdtFw8rJ9h1OPoTDlmlvBb60WtQ3Gn+ZeKAfsBH0FDVfjvALfVFcfNkn3at9BCIuc1Mn3Q5
j9JR8y7MvlHhj6qrje5kHp+yhqBnEYRlgnEKO+hzp9Sc2og7qD1BDVCMmwcLSw0+61EwoNgWPySv
V2elfpYFcUhHtIC1Ehtr0TRnEnYCCmlOMTsJC1GQ07XDJhci/amlci/atXmRI3SBTeYB20+tNq1H
PSHWrzABsz9SD6gDhIhgmg9w/mhFyYkeyRX6Fr0S0O2qn1KOgGRlQAjT9JWiduQI2cX1KWmAaPvH
SfUtflTfNxT1oEwjIS9mBeQQiIaHGNz+5GJtatkolh9KlpEdUT4GLoSNrTI4+19CGga4nHUNKAIM
YHHQNSI4SMpSofUpC9KYkx0mLaXydMJwpsdz3oGE5xu+1rIMe5lEzmJxHijE6VGXLTnapxRmROu5
E3gL0ScTIxP0KURckxfTHH6VpkQJDCQJdCOuRILvIuEJHhI9waUJxNRlIakJSgIkUaA0j6F5kMD7
JAIUp+UBqemAD7ApCBeMg0oliGUoiAAkXZVjqG9COkMMBsVYA9odDVGLjAtVDdgtRAMhgWqgZSJI
TsH2rFOCx2pzUrFO6xTrQSBIYHaEY+1Gu9ebEGmIWk8MkxwWCNFRUDJmBKPSB0VRHRpJwDoiIVAn
ZcSojToLodNy0W1EntUhEmDHEISMnIwKMpYlMTTYnK5sA/8Ah8w/Z5ZX9S/m7/8AuS+RE+gsjZIE
93F9i5kAtK5ptR/xSD/Q/SOhkwxVVRDVgULHLRAlhKZH1KIMuGlF5kYgyoTJC7bOGKAkaEsoF314
BdyLhyiYp/lpuTLCKMeXtgAe1KpX3ukfm0Ti6Sd9Vx35tsBIH0JzIk7z1HiWKc/JQ+1Ruwi4Z8EQ
R0/AtvHOZpH1oHmrz/m2x9pQNmzHUPbIeXrPS04CXci9tidicEkbHXwJ6DsJR0R1gZgritTbsdNd
tP8ApRZVtGJ2xLLhv3be5yy4edfdMA/WtMvJvDsXxeRid8CuO1ctHdh9q+FzWk7JhvyJ7N6FzsKB
uAAHCoKw6gR6Y8zfjpsRLh/aKEY0AwHVqAe1PGnYq/IDOVIxDlayGiKRB2JhhpKlaiXuzDUyBzKE
/ZiGG9AyqNifQ8meO9ECPEaBW9UgGFXwooztkGLZLFpuNG10ITkbhapkXdQ50ExgPFAeFRu22PZu
QIzHQN+akJ54EI+aBG2cZ59y18rJgPeqVI3IExOcvsUAXFw4gYOtWoSmMIjLtQWosN52rRMO3tBc
GJVXZVpvKYyJI3LTEd5QBkH2BaaDcjEVKb1JyQqF0Dbi4Bqck0QJSGLIOA2JTeyMAqCmxEnYtEZC
ERic1wtr94kklGJUuXJaYOptoKdeTEAkRbVvTumjKYjnU6UBKVNmS02YsNpQldk+5NEd/ohaiWGM
5bkIxGm1HAe9vKb6OjGvRqJ7gtVvgvjI+0EJTBtXY4ygwPevKsCmJJxJ3oxNYkMQowkOAy4CcwVd
g3DI6o9h6ARiKhWLuM9LS7ugAd6CdcVDtH2rXCzbnLazFNeiwwZmU5Sl8OfFbIr61EAtKOaAuxjK
9b8JIRvWRonDxwGCjP1qN1qSxXwZEblVnzLVXHInd1RCIclRtjCOPat61TqcgmqQcGwUpzJBGCMZ
h4mhohK3WEsCtbPvBR1eKOaY4hGMqgozgNVs7MkLQyrI7kG4bYFZHBlcuMZWI8MfziMSmNvTsIK1
QOq2foTK4ZmhigDw2goG2GfxHOqExVqSCE4O+YdgqwD7aomOHupwABsZCUzxbAmtQ0Q27k9w6pFC
MaZKNu8KGu9GNmIi+JxJRvTAM7cXi6ZlOMAACBIgbSq9HHJnTRLxOIRECIR3ledM+ZMjgG9G5MvI
obTUqNkHw1PaVqiWkMCmnGE+0VRjDTbH5oqtUi539IAxOCEpDVeI9SeRWKrSJ9pMKthvKkSzk+pG
mlslc3RQqPNFCNqaVDsXCH35LyLRce2Rnu6BD2Z0IRG9AZIx2E9FanYsRbjvxTxumR2MgLg7wnie
tUstkBhv6MF5doapnZkhPm5H9ALy7MBbjm2fahGHDrNdlULcABGIwCt8zG1blfEtBMgHIb7FbtTt
giIcvUbaLywNMWZhRXbQNQaEgksdpZao2JygPDID6sEYSoRQghiFVMcE+1MU7EApwUInBaiaDNUn
pluKM7czqjgc0LcSBF/aq6A1DWcY705IHaWVbkB/iCY3oPs1BN5o+lVmRsOksVUTAyOnH6U4s3CM
QYgS9bYJxy05A4aXProuHlCxw8QbtonHLRAOGf1yCBNu3EbIs/e5QJnbjuiWP0gpzzIB2D/2Ty5y
ROyrfWgZ81cnIZSrH1Ohqu3DIbSDHuiva146wfswTG3qka6yTq+hF7IkTjKTk+sotZhXaH+tHy7c
YviwZcMRF9gbrOCD2JhIE7AXWnXHVscOtMpxEj7JIdNOcYk7SAhKVyIBwJIWvzI6drhGQuQYYnUF
S7D9YL76H6wTedD10TebEn82v1IfGiXwEeI+oIk3QPzTSXqxVZ6KPxPFxudOZkROEyCI+tHTIyAx
lEEgJozMycogleKWxtJTcb5jSqajs4UXEw2NB+VMIzL7gPtVI3D3D8qaVuYJD0Al9RWo25aCWDEG
X6q4bUtIDvPgPcgYWZ1zlQetGPkyJHu1CD2C5yfDtoi/LSYZ1buogRy5IOxy3bwogcuXGR1V7OFD
/tiSd0qdtEX5dmwcSqgRyrb2J+hEDlxuJEh9qDWA+Y0yp3ujqtCJbhABIPbVP5MAM2qfU6pbi2RI
H5V4Rq2DSzdroNKOnPVpf6HReQjL2SDHT+VASmGz0kA/UjpvDTlqLn6k/nmJzOot6gq39Q/SI+xH
/uS+xz9aHxzBsWlIv9Sed3zBgCTJwtP4mZPul2+tCRuGBH/Tf6yVI69YlR5h5DvdCAuScYPh6kJT
OAZojT9qlxEiWIIBI71pjqBx1P8AYhK482pWn1KWkyjGVDEEN9ITC235zl0DOJuEYCRWqMfLfKJI
CI8uLH1+tDgdsASSPpKEtABGYp9SIEQAcaYqluPqTTiJAYArgiI9i1GIJOZCbFUAHYFU9GPRiser
v67+ip6KnQ6lGbE57VKYck1Ca1NgMirceZmRaMuOccQpQ5SUbtsHhkcWWowBCa7BhtitEpaJHB08
g42jBPGJbayoKZlaBlmq9G9R5Tlo6pmr5ADNAXJNPMoC4dcjkgfLCaVmMwuPlvoTeSR3H8qoTHvK
YXyO/wDKiY8zhjUJ7fMg+pcN2JVJRPej5IDHEPRGUrQTGySFx2iEximEVz0pYnkOaA7fKkv6l/N3
/wDcl8iO/wBBrI8MSX9S5aw9bl4S7oRk/wBafoGzpJBTY9G8p0GOauyiOIRqT3I2LjElaQKSPCys
m8NMRgmRU4nAJo1+WkYqQ2H5eOW5hgQGBRny+eLJpkxjuDlCRiZkbQShG2ZRiMBGLBNO4Y7HXBex
7E8LgPaqwjMDNl8WwPqXxLRHYqEx7QnhMEFaJX46hkChyn9PuC3aj4rrArzDdFwxxMwAE87cZTFC
QFSJj2Ia7xiZUjEVJKBhcNdy1cuZSGZC4rcpdzprtgk7wuO0Yy3UTylKMjnX+9fDvrgnGa4rQkNx
TRtGLYklMVosnXcOEYgkoG4YWgfeLn1BC5fJ5iYqxDQ9SEYgACgAwHoK/IDAYzLJyvMtnSah085E
kniJxKYB2yC1yBb2VOL+EBkTM0XDSIwCkI3JQjEPLSWdaZSIkBwyJJqvKuxPmwHFIYHep6IvAVnL
YAuZuxfysQFpziUDkmTijYIW5uDH1LhZu1NciJjZipSgdEzgMIprcnOZjVDzQ4FCVGNotIR4npVC
QqI5ouajYmAcnaq/T0UBbay+HblM5FmHrWoytwOwyqjM6ZDaJOidBG1yAnuyEIZsXK0W4j9Iry4Y
ZlERLQGJ2oCLucSyEdBqieXtGUBjI0j6yvjQMBlIVH0IgSBnVBwxOCFy6dMcO9CVrhkMJDFabfMH
sKPNczJ5nAlDzJajs/uTWo6QMz+Rarp1SVEd1U0LYMNpxTgMRiPQyiaSIcBXbd06WLR7E8JAjavO
vGU45Rt4DtKrZmB7wmSV5lmZuW8wfFFOQ8l4QJRqCMlG4MLlJdvTq9q2XBzCt81MxmBwcIr3o3zY
mLQxmQwRuzu+XbiWIAeRR5WySRGrnF0N3SOiUrQeQFArv4iLSHhcNVAHIVVMFCINCaqcY+GQLojY
SnzCrgsV9vRWnR9S8yf3ksBsCcqUziBQIyn4YB2XmCDZVClQeVGlVI26XDhsUuWv1fAoxkaRP0KW
kUJd08cY4pyRRSmDSIwUuYvRBuXS9RgFHlLZYy8bezFRFvhhFgAFpnipW7hxoUI2pm5PYvMveGPs
lUoBgpQiaioQeomPpWu2WjkU8x3hEQiZSTyPlw2oTkPMkaucE0OHsQCpXSMEC7QFNKF6Ro2pDmOW
tSnGOIAJcKURyxhdjjqNPUzqV24XnIuStFrvKJmXQM46mDB15liWkZxOC1SjCJzNVrJJ2E/YnyCN
yeEQpTJcyLnpc9FegzlhAP3p9q1TwOAQ+FHTvCjO0ARsyTzaMDjIB2XBNx9aaRBJRtyrGSItnTGV
YsmcHtDppTLbBQfR0CMA5OQUbl6fGKiEcu1OMFoGWKNy3EziTUZunI0zOESjOUq5lOBq3lUWm7EE
bc0G4rU8D1HZ9yc0X5vQ5NF7lrM5owttqHiOaYV6AJHVcyhGpdG5zHKSOkcIgakdhWo2hCFl9Noy
JNc8MULsY6okMRmETomZj2SG+lC5MmQM+LKNMB0WPwwa7cBE2zG9REyJHMhMVpPcmahWoVIRBWoZ
IQFIjZmpTuEEnbiCnhOUTtBIQJ1TuHBjVVjMycF3Zu5P+Gu3J466t+yiRyM5Ee3qkPsCLf0/U3tP
I/Qarh/p8SBhIv8AsyqiIchbAesm/wDpxR8vlIRD+IBv+V3R0ctAB8cCf8OpUs24jafF6tSLQtgb
Tj+0iwtxGR9r6yEQDbA2ltf+VERnADLU2v6AykBciNmptXrAZFr0QD7zSl6wEW5iMRv4j9SrzIj/
AM32JjzTDvl9lF/+WY7TWT9xTnmpDbIOSf8ACaJ5cycX1gnV2aXZGR5iVS4lXV6sFq/ESL+1J3HY
yB/ESnRuNy3YxQP4ic2DfE4h3VQfmLkmylWPqdESvXC9W9n1IvduVyiwj6kXuTqGaPCPoQeUi2Dc
J7yMVUmQd2wP62KNZaZVMcf+bFcJlEMxAq/6zoGAMCMwXf8AWdUiRLHWCX/IiZw1yOMpEv8AQgZQ
M2oNRNAh8Ogwi5b6190GOIBLfWnjbEcqEj7U0bMY7CAx9aY2oyfEkOfWV92CfzuL9p191FshiPVg
nNqNcaMg9qFMKJvKg2zSE4tQ/VC+6h+qFW3D9UJtEWGTBNpAGxlwREewMtQiBLaBVVDttqmdM6xW
KxWKxKxWKxVev9SdU6N3yuvVbpr8o39WqkTmGBGRRBNFqHesUJSDketTccLI6cEdUQS7jagb3CBt
TECNuP0o2rA0wGYWo49LKV9+MxYA5rVIaY+yAhVAOejetRW5aslg8TiF5tmRlaliHwVJl+1DRdk3
atcbhO0IEkPm6qAVl6kTIBYB1zez8HzP+2V/Uv5u/wD7kvkQ9BdunYAuU5fO3blcI/Tlp/8AoW5N
tQ6CmCBucMdqELbG5QmRQEDqO5B46SaqOmBLmi8r95fIiIjfU/UtUQQRmoXb1RE1J3IWrbabdABu
QtxrM5IviymAXrX5cQc0ZAU9Hj8jaETI7AHQ8q3K3H2pzDD6UITOuTVJ2rVrbuX3rdyaVwkZ0Ca1
CAIxkQ5PengYtuoqRJbYuKB+lNKAPcviWH7KJ9EoFNC83bRNZuRL4sULkbkhc2xNELcr5MPdw9aO
sgWjicUZm5gMGqpcwX8m2eAFCBJ0jHsQtwDAbEbl2QhCNSTgvL5KEdAxnMOT2L43L259zL4nKmO0
x/8AQVJTtnsXwOaAfbQp4XhPvTTgz5oW34pFiUI2ojXKs5tUn5VpiNdzYMB2rXcOs5RGA7ECQRAY
RC4KH7E5PFmNiMIE6jmjKWeAUbsg07lSjLVxRYiO1RINZYIzuaZRlSTYoXLA4TgTRThMNcJftCMZ
BxKhB2K5ZhFrJkYgxwCIGEh9SKB6KhMMFqY6MNWSBNAc0XrluQ1R0tgIsETbLyOAGI70bZkdMRUH
enNFRGbi3bFTOX2Lzr9SfDA/WUwi2xsEACdJoAKLUQtMTjijEim9PbLbk0onuWkgxGb4qMIAnsCM
RA7lGJB8yZAL5OrcIBgIjBXLcw4IKkdT6SWbcjGcPMjbppwUOWsWDbhEuSWfuZRiRpAGJx9SeZ1z
9aoNMfpVXMtufS6EYzByLF1KzchE2zmMQpShwwIeuClGcdVeFti+Dap2Ep/Kk36BTXbfrBCaYMD9
CeEgejVIsBmo8xy8hIw8QBqyje/DapH2iWj2qVqcYsPcwU7NwP27CrlmETIxNBEEqfmW5RtyjmGT
InconMSHTKBwkGV6zdYCJ1B9ylyxjqty4TvUpclBtQ1AVJco80YyeVdJLH1KVq7ExuDxA9VxQqpd
eWCznJNOQEMnZ0JmoFQ+1eYaFiT3o9qKZARrKVAAgb3FP3MlwwhEBNdgP0oql06dma+HHVL3pLjk
ATtQjEuhDbitVqUdCO0BQb2nJUZDFW5RxIqpPgcU0QwXnOwGKB16DuwUbMJPahWZyKLSrhEDatdy
s7geR3bE0zwZp4VYJoAAHGSEiNUziTvWgUGJZFw77UJij7M1Evp0zodyMLcPMmcMwnvcETiAD9S1
QEpyGeXqRlSX5jMiJDSRRkAjuREg4rReaZERdzAYLyB4TwqFmzAREQHIxJbNDmoxAuRIBIzBTKZh
4v70JwkIlhqgdqbQ53FNAM+TOnuFz7q09AsWzwR8RGZWmIcmgAQlemLY2YlcN4v2BapcUDhIdLQF
BjI4BGFupl4pnNOSyjE6nifZTWwIHMyBoF5QkWjTtQ1B9VA+aAppw0shcgGkzllElWtrHpAFScE2
N2XiOzcmdjtWgARgfaByUbYL+8dqM+VOu3KunMFSndBFxuEHajDxR9qEsQiInQAH7FqEhOHvBbQj
CXchtVC8Hw3KvqTph4cyneipWWUV5vMYZBMAzZBAGgCM78g23ctPLR8u0cbhxKdtdw4zNSmOa/Hc
n7NZwC8y34xS5DNHsZ96uxuAaHGk5vkyb8M9zLiYfUjzF86ZtQRwA2VdETkSSKH/ANgiD606Dmoz
WkotgUQcEPMJEM6OgbNk2o53JU/5UBrkxxLLz5XXAwiRVCVwExyIRFonh25joe1F4j2jQJr0WfA5
FC1bDyKcXXudlEYSDSiWPzzTqdnUqqKuadU9HXr06/FOI703mR9a4ZxPeFi/Z1n+WU9fQ5LDenMw
25UrvRtygQ+BQkDQ1QjGjDBSmTjgiXdCL4l1qukCVsM21kdNIZBVkHRYrELFwneio6Ni+dFweCRz
RbIoSzTRWqSYJk23oqHgfFFefYL2zUgZIbdi3LVHArFMMUUwXNv/AP6fMj12yv6l/N3/APcl8iiO
uEZH25P6gAr+yxGFodw1H6ZdL+pFEbVESDxzUJW4DUWYdyEYyEI9qE5S1iNTIiiMpAmAxOEQgLYA
ED62VuEYvbthog4bytN6UQDiyny9lwAKmOZU4vxSJLqU5cUjQbgpSJaRDR7USak1J+XmQxRB+TNC
EpdgJTx5eZ7mQ+Dpf3iAgTDXuhVCUrEoxOcqAIxvR89hhCoBRuwgYWnqTQBRECLwkHkQA6lONgix
HGUpfYok3ay9mNSFqtzDb6H6VwyjLsIPQ3TxRBTythUBiVwSHeniH7F4TRUMh61xORvCqAdzKNi1
ChxbZvQne0ggeF896PkSjFzVguKImEBO55dsYWx4e9Ex0zG4ritnur9SqGOw9EYRDk4BC7da5eOe
Uexfh7RAA8RGKjOWKA2D5SYWTquZyGEUdc2MqknFODq2FC3AjUMslUii1OnPhzltQ1jVLCOwKZJe
L0XBFgTUpxjQRKJl61b8rwCIZlEQlpuA8MhltXlynIuGMsChbJB5R9UJ5733qJjhRluRifZKfofS
SDSijGQ0wjVgD9K0gvGNI0ZCB9n60QTxBEuzDFTldHAMtqF2JZxSA/KoxFZmrYlAXhqhh2di0w4B
+diVqGmTjA1Rbh7EZSkTFqBGRHevtWkrJOWO7FPEYmv/AKZPHhfYpEjA4qNrmweEMJxqpxtTJuyB
EQxeqmZu0iSAaoyjEWhLGlSuGOu5txKx0R3Y+tUFcz1TbkRxBkJ8sSSJaSMkZTOqcqyls3BNcloi
MIPxHuXBbi5zlUqhYbgvEU0wJjYQmuW/LkfajReZys/MiMhSS+8jvjMsfpQPNX4Rt4mMTUr/ALSh
iMRge1apxlGZ8QjJo+pCMAwCMiW3ZlE24iBliWqi8z2IPGTHAgUQ1QMRiAcShHIV6XT2Sxn9qjbN
mUQDxSIo3arVo4RH1LBWLwDTmCJd3Q6bpIBYsSCjdtXovHCMsD9C1X4R2ClFrvS1XMgMApSOK7EB
nIoCPFI4BG9dYEDgjiU5xQJkwREi9aICMaIE0JxTGWib0liqHzJnGSc4lFTt5kUX4edLts0G0LUa
COKN6VMoBOyZ1OEi1Md6MTQgsiKNLEnJC/efy4+AAY71SDBqOhqIM8oqVy8TG2MhQlNEMFjgjMly
ibkmAqyjDloYPxHN1E8zcMjKul8EJRGl8E4mdRyKFuZNuccGwKhck4JYytii8y3H4VwBvUiUZHYj
oAMzmUQfEMVpDmWTKEeaEoXY0LBwWUbFkGFt3rjIqQEsBUqXMkiNkVAOJZGUogPlmgYRDnAE/Yjp
j6g1UdI1SK+JgpjcfqRncl5dt6PiVqtAyn70slqlUrwU2o2rlHwK4ZwI3lPenrPux/KtEAIQGQTR
qUx4rmzIJgSNwouLjjmDVeZS3bxJOKEI3BNvCC4KMTLyrkcyjCU9b5pyaDEoyHhFI9nSCa6QT3rt
KBAcHFatOmOT4lOVRODVM1SjZgfiS8Z90bFcGMNNe1DUcU7uCibcQYT8JK4yDqxLeFOcFoj4cyiG
ptWiFZHALzb5eZyK0x9SBnickAOO9LwwH2oX+cP6NvIJohgMukg1BxCPP8kCLb/EgMEL9nH95bzC
eLawOEow5skRNAZZKJhMTPtCOHrqjwsZUiA0vyIRlFnwaqZGM012YB+lCVu2dOc5BgmBfahIDizK
DosK7URpqNuCaZoMdKh5EiYyDSfehqLRepGxW71qJuWSwBj9qc3BG644DWQRly9yErs6ROJHcjPm
IgNhMBgVOc7g0yLsMV5kJaoZvj8i3fMNfkJ+TujC1EmIxIdl4HmcU+oROwxKbRGfYFW1IdhIVZXI
7ncL73ulFeKB9YVYP+iQqwmO5VJHaCqTj62+teIHsL9Sqb0GKcrxDdUJ5EBMJBcUgm1MUATjgy0l
4nJwgKknILTIEFCIgXOBNFpMa7lEaSBLAlCLagWqEYxBACrRkyfSWQhENpzQlKRO1aRLHegYXGIx
qtcZtPtREpF/oKZi6MpSI3KsdWwrVNxuCMS5GSIgMVIThQ4IkUMTgvIn8PmAKHavKuOIvihLEFMV
pgtRxW5Ax6Dw6rZxivOs+DFkAS08wmOa3dNAuZOf4W/+wV/Uv5u//uS+QhAbOvEDMqyD7QMh2Gq5
y+7i5euEHaNRZdqdb+mhqnMnERmgJuYg17FGzCLRiFosjjnQNvzREi9yZzxRkDgtd+6IxOJJUpWo
65HG4R9SJjalXPavNmBHNYtCHhG35gIOBRuQCYioTjr49DgO2KFu1Ezmcgtd2Y8wikBl2ojypSAz
CY2Zv2FcPLTbaxH1qtoWxtmQPyoefzFcxAfaVxRldP5x/Iv/AMeJXBy1sH9EFNCIiNwZY9LSDg5F
HRahEnEgBfEMxF30AsFr5WMYzamviCJlzAkSXMYhNyvLNpH3k2LnapXOdumE5FtEakqNvk4St6am
WBPaSjO7zRkfZthpV3oGNuE/eLGIHeURdgdUPEYMY9zoGRNkS8JuDS60wvwlLYCFTNV6ah0J27cZ
xGIarIWRy0vM9qRDRj2lODp7EY2TCL4yEar/ALiUrnaaKV6bwjGtE1q9K3+kns34zHaqw1di4oyj
/wCt6OmIJ3hFsShLzSJ4gBC1K8QBQW4UJ7V507ciDmQcO1eZclHWcIv8kc0AxKgIn4Y4QNtK9D4L
y7MnuGhkMAtEZa39oioRbxHFHWCSMJKYPFEeFkZ6CKFpYKNw5hj2hB/B9ak40wtigXmT36Qc0ROn
FlsQiINa2lOKgZLSATEYBahcMLMaEYudzoXYkzNuphtWq3J9ozCgGMYQcm5k+xeZdOrTLSJH1qJ3
I7JJugAFiE85SIOw0UmdonPchcFXTiklTiGS1XOEe7tWqMQwFHoAjckPNvHGR8IQBi2ZIoj5ZcbN
gWqBNOEZhNegwGMoqELcxXI0K8uHhGa4QTsZW7cQYmZAc5IRlbF2TVlPiJ9a/EcsNMfahl3LTiHe
qJhpgDiNQP2KPL2pCcADr01r3IRFZbBVPfAERgM1oswDjZ+VcZaJyCoOgCEdVyeAWvyZGAqWicEB
K3IS9oL/ALSBlPN8Ah+Lt0yICHMcrIiQbUhbtBzKpJy7U0rhnMYtSI7GWoR4tpJKaMdUtgXxb0LW
53KYc1B9honiY3R+aQU0hplsKHunFeGMyQ8Qc+9SjOEoTjjHUj5MNJOMiSSnkU8OCHvHNMcQtFvx
bdi1TuSfHFCzdn5luXhIPFEhGNsUw1yLk9XUMYlRu3pcRDMgLfDIYKNj8PK7el4dBoUeb5qzpsAc
MY+xHeqdD9BEHERiQgCeIYhcVAmhJa59oRcs5REVKxcDkDhKJkPiEsDuTmqbyyo3LeMBxQTTpIKB
sES0niCbQQexPOnbiiIl9PS4RmYAzAcEUKm5AIwiaoGWA2IALjkxyjmjG3QZD8qmbtbVvxnaVqtQ
08vbxNeI7kAKRAYbF5XLjzLmDjAI3eaOq4cHNAqDgmPpCpkiJycj2RVcEfLgcCQhPWZ7XQEQwCMu
4diECPDgc0XDnLuTSpMB4HNeXe8UaAoWyXEcF2rT71FGUZCMzWqlqlquSxOTK5zd8axEgQicHRty
tRB9mUQAYncjbm8jGRE5DIAsiADKAwGBkd5T3pGMPZgEwLbihdgfa0suwLRsDno2umNVWPCteTUG
9VwVA0dpVfUmlHSNqaOGZUpD1rzJNcMjxVwCERCJfAMgbQ0k4xClCfcQc0DIEbCoXcwzoA4ZqNmH
DCQeW/qB8JcJ70X7k0qjetR4YDGRoE3JxeA8U5YFfG06vzHXAGHrWAPdVGYa3cFSQvKtVHtS2lCN
6IwoQiI4Yr6k5NUYR7CegQt1kcdy8y9KOo5k4LTAiZyQoCTjuXk8uPMvmhOUV5lw670qmR6xjIOD
iF+M5NzZJ44jBC/YNP3lvOJQvwA8wZ7VCxGIjqLGUhVG+cYVk9KblCFom9M102gZn6AjdhyUhEB3
mQC36GKe/wAwIbYRaKEixJqD4imh4RgtRwC0rsTbERKBb61NybcsRsVuAmw1AajkhYEonhB8wOXd
CF25O5GNYxlImI7AsB6lWIByIoo8rCETGNBM7OxG5dJuQvVJO1G2ZaScEYSxiW+eX6ezq16cVXDq
1xXYq9FOtXrM2K08vR8WxXl3xIA+0ntXNUfdLITJjEnEEgVXskdyaRg/ankYdzH6kwkAe3T9aMom
4G2MVphzpgX8M3H2p4cxbmMnOKrbt3N4IK4uUl2xf7FxwvW/X9q+9kP0g6Okwl9BQhKzU0GxaJ2z
bGUmMgm8oNlI0CMZxMSMNIePrTS8sbCTX6ERJxN6SBGlESuxBy/9MmlPRP3wXCIPMEyy2ICUxAjO
OJTebMyGBOCGsgafdDOgxIbPH60DIuR6u9Rpp0nEISLmQ24FBwBpwYMhIR4hmUDMVGDIHTxDNDWH
bB0OEAjA5riq22qAyVUdIXGCJZLTiyqKo6GDYpgGbYtMSaJtR7E0w6ZmWK4aKpToAFjsKwbeom3E
axiRmjcveI4NRl5VzxjwTXlTwyTjApzUrBVWkAlBw3amoUZAPA+KKF+wToxWn2swjGQcZFagCexN
KJB3hNGJK5qRiQByvMED/AV/Uv5u/wD7kvkR68AK1RkaCzYMv1YunIxr61RVTptnQM1okNQkjdjw
g4AZp2xQEYvGIqclPmbs+GDiIyUjaB7UDcBlI4AoebcDDCAQmRpicMqIWYFzmnPzCYyFCjcjQZpp
Zp4hk2ad1QOsF4SgJcL7VG9embhx0xoF5cLMRDYy+DbjB8TEN04D5C8oCR2kLTdg4Oyn1Jo2mIwk
S/1qRPNSjE0EIhgyiOWtQmQa3Lhx7kbnOklvBC0CQhDluWlY8zxXZhvpyXm3ObuTuQp5UZFj3o+X
PQBUC4zae+qNrTC7KOMqhzsDKMb9icLssICp+lkYmZtyj4tQoO/BNC9CT5OFwgDeFh0kDPHocUXD
dl2O/wBaaYjMZuGWmViImcwjenqlnpgwHrKeN4WpbCvxHMTt3ZDw6pOPUvJ5aIjH3jwxWuwAX21H
av8AuZvF2IA+plEGE2OMyGCEhciHwBIdPEuN3yCNi3QzczO5Wrs6RBYnuQiAbr5xIZaI8Fs+yMe8
rU1E0YjXLM5IAYuHQ18MBhtKyHbUrgjTeiBgcYomzLhPrUrUpcEsVokGEaAjJlgq5/Qm27ETq1Eb
MVLlb8hbkS9sk47kZO4xdaoBhJz3q5y/MB4EO+wiq/C2yIyxjHMspQ90/Woy2FdvRXEpoFyKEMgJ
BiMf70BHiZcQbsTNI7ECIMfzqppS7IjBAktsCErjiAywdCNq3GMsyzpzDUdpQJ4auQM0Z2wSXYac
QO5ATOqAxicfWqQ8qTs2KtXy2mJ4mQnbkJRIcEK45BmcAo3GZxgjbI1PsRnKXcD9q0WIPvGHrXxZ
U90YLhHSZSwFShejbOnUI1x0oX+VhosR4ZwpUHNW4XIwa74rpbFXAOYtxMawjEAv2oGZhd5kkcJI
oO9aPKFs56W+xSbERWo4riw2rVyhcGsyPF3JzU5v0PEmJ2iigb3HcNRI4hMA68yYMACNAlioX4eL
wzWk7EbYtm7dyMsKoeYXuTqWwA2BCI8UqAJpY4pzVGVsEOXqownLy4kPGZDh96k8hIPSUcD1JQ2h
SFsGQifCFK/dgYAUD0dTuM8xQJpBwaEK/Zt+ASoO3oCCl5B4nBYYoRvWSLgoCKE9q4YNvP8Aegb8
uLFsl5dttIFStU6xgMN5VIgPmhL1p4l3wQLOy1QnolkMkL0eGYcTGS1XIivtRouGUh3rEntL9BMR
U4nqGWDKUzcDHJ0wkNq02hXB15l8kDZmVotARlPhj+VCxaeNgVu3M5nYF7luIXk2Hha9qe1NAPLO
WZRBoNpRtj4sstOS1xBtwyGBTm3qO8OtM7Y0j3kwYtlHBHThg6Ehhiw2ITLRjvT0L5hRllgrsbIq
WKkGIkKF9vQNWAqVG2ItHAErUcFPl70mhcwnkCEZ2bsb1z2YQrXep3CXEiTPtOSdnlUgbAtLVZy6
1Ch3KOsuBgMApMiThIBit23ofYhcBq7EKViQZ6CW9eXcoqxJhkRgsGQ1W9RGdQFogzjIYBSjPijL
EJ4hhkNpXmXOK5KrHIKZOAClXEowIAFBEoHICiDq3LJiOpRCFyEbgGBNCuCzGJ2lyhe5uRMMY29q
YDTHKITiGmO00TymI7lQ6kRI6SNqYcUtgXnXaP4Y7Ony4GqfNaYVkcTsXl8udVw+K5i3YgbkjIhR
kcAV5PLTBJHHd2dieEhqPilI1Krdh+sF9/D1hffR9a+9B7F94/cV4z6isSe5aZiRgcQ2KN7k5EWS
eK3J6hHybYY5Sc/UhcvHTIZW4t9JQExdut/1JGX0KejlPMlPBuH6gUZaI8jYlwm5IF67DJRFi4b0
v3k8idyqa7EI7ShAVpitVXGxflWgsAcVp0vKPtJ4xcDEjFVDE7URORk2BJenUjOcBMDGJwKjG1EW
RbIGmD4KFz3ogoTECA1ZNR/7Bv0VNFiPWqziP8QVbsP1gvvYfrBN5o+tfegdoKaMjI56YlODI7aL
wzbaw/KtOiRPd+VEi3Jhi7J42iwxcoSjaBjtBdlqFsAbS7LVogQM3P5UZ6YiIz0kj60SDAHcK/Wv
MMnjhgCn8yQ3RDLWLmr80Vl9SrdnCWQLj7FqlLjGbmqIuW/MhkdnYUZ24kgbKsvLuxGo+1IJhdkX
7B6lolOUoDJgD60wEgQXEnqnuDzSKDV/cgIjSBQAIlml70R9YTDjt7MYoQIEJYEGi4TQ7CqyPfVM
Yj1BUAB2gMiDLXDMHEDcmMiRkVi/Rt9A/ptqwWCkJ4SZijMSDIyoNVAqVCoFs6tCy8q4ajApxILx
UTSLryZHj9ia8m8OHIogYAYoh0DMOFw2w43LhiyoqmiIkHgcQhf5c/DJeiGogyWTL4kQJDAphEOF
zLDhjyt8+qBX9S/m7/8AuS+Qko9exH8+P1hcwD+8iLQ7Z8P2pgNwQAxRomZa2oUHQLh9ijNmDiqZ
uEepaV5c3ETi1FONkyGqpBLqUpRBnLAyDo3LvMEGVTpZ1rkDckMNdfoREBpiPCAjI1Jz+YwBgtUi
AmiNQ2pywbIJm6KnuTQFVKd6JneOANQhIhgcPlmm7ETjsKMI2IQBzEQ6lO5O5InBzgojlL0LMBXU
x1HtK16Zc3eJpP2QOx0/M8uLVgV0wHFI9yj+H5e55xoJXRwxA2ITnz8tcS8rdsnUdwLqcrMoxtx8
IuHiA3uvKFsczIB5TiCANwZ0BzVmVqUsIjil6qKVsSkGxJiWHetoy7OnCqdATOOSBhbIiT45UDd6
jZmPDiQg1ouMzIo3I8xKEcox+pCFq1+ID4kBvpQjzFqNkRHsYLVC9LzMhpYDvRt25ExGbgfSV5fM
WxMjGRJCe8JWtlCX7E9m4DuNE8SCNx9JEwD8I+srjFdyEc3crhZs9qL0YJ3riEbtypOA2oSJ+Nc8
Edm8omReRxKZAjHcnkSTvQmKEYpj4Z4oASDSwKlO5dd/CVMTLgNpI+tFy5lUk4urULgeIBP6tQma
mxGXKzHlXazhOsYkbFONuQJaoAYncFqlalZt2nDSoUYHMN6kUD0OS1OEnaiDjI1baVplSOTISiHd
GRID5FPqAHagTMkjEYoeVbOkYEhCZHmT25BE3DjkgCUIgEnAAYkoTIjB6iEjX6F5V6OiQyy7lbhe
iJZF/rwRHLnUa8LE55FNCRBjWQFVpkZAbbZMfoQE+bM4P4Jlv/dCJNI03LTbg+ylFquk/orSAzdG
k9yLUIUpHCIco2/JMbWLl3LKbir6dB2blGN66bsWpYia/wCIoC3bhYhHBg8l8Xmpat1Cm5fmzI+7
cq/6yMbojbuwxo0SEbmoCEgYgMzry7ofdmhLRoicBmpWpVAqHUhENGXEB29NojDSENOKHmwkQfaF
QpSYs4Z9ycIRxEWfuRKjOWJoF59sPOySLgz07U8S4TzICAnETjiHqmiGAyHVEjSNzHvXlE6p5sjf
tAGPtRVyH9P5OUr0aapEaQe5Tnz1uULsy5MgwPZ08OIWqJIliwRlpBuDNlqBI3BA6TJhQRRt3KFm
YqUPfHD2heXM8QoqlhtTAuR0AetNdcQnR1EQ8GSr0V9XUJNAKkqUIExsgsAM+1AJ5F5HIKN6AcyF
TsWq7MRGzNSu3JEW4eGO1C3Zby7dNIzQne4LWx2ACjLVEQAoAQvL5S2bkspNQLVzd0xifYC0RhX3
jUoTF2Wj3H+pHyoCIOZDp7szLc9FpjjsRjIOMkLc6A0ByU+XuREoA0G5abcdEFE71zN41MRFu9SM
TqjLiftRUoRo9PUgCGgC5k+SNm3iwA7lCXMw8y9IAkuwDoX+WfyjICQJdu9HVgC7dqEicaLzo1BH
1LUaMaMh9aD4EZppA6ciMQn80yGQVT3KiBkWgK9qMxSBwbFMYiY+laRGQ3E0WsQEpjJaRSOJAomi
ADtKEQXKsQPhxKl2svKgXnPILiFWwXmSJk3snBEbECMQhoI8yNQFpFsunOmO4l/qWqQeO2Nenzrn
3cPpKf1Bap1mfoTDLEoQsS4Y4tgSoRjcB1jUc/rQlIcXtAZ9i1eTrl7JJw7UHtVzL07lW2dwGK0w
gRI5LVISc4lNFwTgSEYtKUpeIsyJgNIyGNE5XlTAjKRrclkNygJc1Hi2F15Y5iJkMnZHTeidOPEF
qjehpGeoLUL8BE5uPyoPegAcKxWnzImWx4haQYmQycIiIgWxYiiJiIkDEuCiWiItWQaiFycje5WZ
o9aIXYNK3L1ha+XmYTx0gso6owncjgZYtvQsXoW/LBciIJJI7yifJEcSSHf6QpztgRt+yAvNuA6Q
eEYOtUcRksHGYUBMPB6gqUbXLjURSaLn4j1XCdJ3ps8zvTO4aicmmQ6YCfhevYn5OyDqLSnMnHcF
GpkSAWOAfYpmfd2/La+gZwuKQHaVqcNtTRnEnYCCU07kYy2EgLWZREPeeiaF2EjsEgtMrsIy2Eha
jdg23UFS9D1pjeg/an86LbVS7EndX6kxuh9lVWZiNpFFSUj2BAAyJPssqiQIyYH7U+iWna4Q025E
nePsdfdAbjJiiY2aDEuSB6kSI232VQJtwiDgSD+VCMRbJOBw+1GJmIkZGI0+tFqAe4GJ9bp4zJl7
hjxfUhM+aA9SCadoZREJ3bkc8Q3qXH5ukjxAyKaQuCORLl+2qOu1LV7LMY99VJ4tcycjSgDbiJD2
hIMomAELg8WogjDJkG0R94Amv0IEShEe4XkHQuSlCTeyXZaxd0/mAOPrRkLgk+IMaVRIuyriGDI6
ZyaWIYIgSmxxD4oxBloljF6Jo22BxDn8qMdLxPskllSzD1LhtxG5l4I+oJ4wiOwAKpVKdixUtcAa
YkKdm5SGo6ZHCqIs6TOeGj66IeeAZ7RSQWu0TOGz8q0XeE708C/UIOJWu2NOdMFpuDVEYE4qkq7M
0+SYTj6whcFHoVoJ7PkO/pfpfrNlsVcsFQ+vqUCwqeii1nFYrHp8u74vYmjaujhlQFaol36jqgVa
BEeKMvEEL1gvbNUACxzCdymLlc7TDkuZPqtlf1GUrchGXNXiJMWINySrQqhVU4KwVfREqR9BaJ9k
uuX5OJqZeZMbohh9JWonsTnEppCq1RxOS0kkxCqKDNA4kJ7kmERhkjG2Qzs6063JzWsYJpSAdSEC
0IVJ3ol6DBVKqfmQyFUxwCbocLJcR9SjbAcZkoG4T+jFC3bDRjh8w1DrRdgJR2FaIWIWziJRAcFX
AZ3JzuZkt+yylO3eFkaWiIB37XUZm0Lsw5nO7gfrQt3rLASbyLIIB30Rj5Ny3oZ/MPFXcmt3DOjm
lAoyM4cYeIJDrV5cjE5jBSF6zO/dixeI1R7qhCd2EeXuM4jeAB9TprF0S+hcMgW2F+o12AmNhXli
EbYGGkCilIXpynLDABfCtQNuOEpt603NQJuS9zLvXwTK3tJOPaV5trmBIezB3itV+35loFpTZh3I
C5CVt6V/uQa/FzgHWqJBG0dczkWjEOSUbs+wDYEz0GS7c0asFKAxepQ3I3rnggKBG7PGWA2DoonO
PQSNiIli9EQaEhiFpBIjmFaIxEdMu5CSjK1IxlA6hsUbl22IPgXqd7LTd4RPwHKnRcuiHDJjEHA7
VC9EaROrdqITbD0ARqCvMnwwD6Y5kqAvUb2kBbJMsgAgb8tVw5DALzbkzGjiI2IiyBqzMqkImZ1D
tdNGoyT6Sd6eVDsURIPpiZRfasFC9cIjKMxEE78k4Rjy1u3J8yC4U79+LyuGpOCeR0yOJH2BE2AZ
gYZFk1zVT2Z1pudab1s2TgJCo71O7y843CKAioVLryxMXwQtmQMx4gCoXZFrBlpAXnAG5C4OGtO5
aYcsbYNZSmpcz5tqEiGEZ7BsQmLcTzNx+PIDajcuSaOcjjIo27ECPzjgiLkpTOchghKzJ5Cu9aZR
YsxkhCxOJMd4QFyEZNgZB2RnQlqZMmiXGM5ZMpTj4RSPYE5XavIJ+JCsHzGxaZcMthWmcYzjsNU0
QIjYKBG9PLwjaULssZkv3ojMGoUbg9gh0ObsjXaI+JDchzPLxuWzKshbYxfuwTQtSlI+1PFDzIxJ
ygDxepAXLEoRNBIomIc7FxeplGW3Ho1x8Uc0ZShKRngQCXUfNGmUqscVEW4gag5bMlXjcA1W46oy
zBCCdES8JDEqkiddK4AIwB+HHHetNmOkDMBCNyRMhhIfavOHixQctv2SRMhpl74wK4rsTDtTxOo4
O6aIcbkTdoBkjYuREX8BUuXuHTcj4XQNybkVZGbvuQvAuDgFw+IkAJjhHxS3oSFYyqFd0lpaSjtQ
nKVTsWZ7VExvm3MDiEZM5TwkZykaGRco2bIeUqGWxarx825myja5a1IQesjgtXMyEoj2YrTbiIx3
dHYtU67AmNBsVFrn3BOsaIGpJFVsIUHjQ4K8GcSYPvTkonBSncLHILVbqFqnJmNFC5bmCWAlFw4I
X4W3ISmSJTY+ERqgO9Bs1pBcbE86DJOa7k0skxNMj+VCWnVLPTRC3L4e+RZkQGkxpLIrioHonZoj
BECm9OalHSH3IkhonJVqNgXGC5wQMg+mj7FwlpHNajxSOaGXQH8MwpHWZOgX0lO6Zj2ujbnUNRTh
Hwg0UYRxkWCjYh4YfSVqOWCiJ0tjxS3nBG1CkzQnYFK7MPbhjvKMbQOkYg7dyn+Jg8SKFnIK8zyI
xjIUJAchEG1BjkwQa1EkeGgdeYYgSO5GEbYMpZLVdAldIcfmhSMCdOSFq2HJQs2ZxuT9skUj3gr4
1mxcIzjKUZFcfISH50JflTR5a/b3+JcRu2+22W+hExu6xsYgj1hGFy8YXciSAPpQ0cydWcniXQML
8wPawLomN+WneKqQHMEA7m+1EDmccmIB7Q6AjzAriKgIQN4TgMy9O5eXOUtGU40CF2F86fZk7qdy
c8K41PqUpWZ+X5eLvVESuSD5OvI5aBuzNSHYAbSSoSuxGmWGkiTdqrLUZZPgmxMsxgtHimcCMPWj
ZvNIjMJ2Bl9C+71DaMFplZd8CxH0qtsU7WTC3hsBX3TgYhiq23iMQAVWPCMgtNuJuVfSASVG35Ai
YMNBxP1Mmt2pwMG+GWlHe7FfDhpAHhA1P2oCEBGmERqJKERGNsjFgZElCBhG21DJiT6l5ZgIEGtx
i7fooxaMj7Nwggtt0qTmEw1JF417M0CJQuAiomNDHY6BjK278VuQ0gf4kWlbjJ6Q9kj9J0axt04d
DSie0qPgjEisrTTrvxZBtBL1lDim36KnpET7sp0n+qjpjrDVN0aa7kAxMn4oyi0G7VKLSjL2BCLw
7yyiJx0xIxtDUX3unlCJj7wbW3Y6lOOk224TeYS7mQnCTN4/MMdHcykNbSGBtkeWowncBiR+5IB7
3RHnRIBZ34hu1YKXl3JEAV8zjr/hKDXZm6S/DIGDdjoi5ckRgBb4GO8OhauXJPE1AHE36YWqzcnG
MxQH4i1xnIXhKpEy5/wIi/KUqUEiYHuZMZXYwJxlqZGVuM9AFSJTI7RmmBlImuqUpRbdxIBpAihi
TMRO/UjxS4sNBMgN2KgS0ZDHQXn3uQjIyjIEMBdqfoJQiLuge05cdwCBN2AGUo0X38ZvjqBTHmC+
UWeP1p7tzTRmtBvrR+MTA7YjV63XDdnHtYqtyZltDD6E5lOXaR+RadVzTjpf+5DSJRkPaEiCtREp
HNzim8sgbNUvypvKftJ/Kn8pm2E/lX3EE4sxPa5H0qtiHcG+pfdQ7wCfpWqNqIObBfdwb9EI6LcR
2BVhE9oCZg3YnHr6MVj0Y9SnUZV9Md4K7VGcovPEdHEHWq2GkmLkBASpLAunjUdP1rhaJ2NRPF4t
mgJzOkZlAaRcnvWmXw5DBqhRNDE4EH0bdWix6lE2XS/Tt6rHqv8AQqdUA5oRu4kPC4vKu+HIoSjV
+mpTRCpROSU0g8T4ghdtF4mq4qHNOA4XPXdnI81T/wDSkuat3uW1RheuRfFwJkOj51nQd4b6k/L3
dEjgx+xPy98T3FVs6wM4prtqUdtCm+tZKpZNGXrVKqsSqgjqn0AvGgiCVK/MkmVIvlF0HyTkp44B
VFFpkGC0wjEyOCFwNIE4BGAFSKgI0xyVcXQjcmG3Jomu1eXbLQPi3/M5iUS1D0sQ6wVAjfmOLIZf
NL57UJ3LUZyFXkHWqYMDgdFHCB5ZrMgGcvL7VAWb87phjalLTBRjqgIg8UbY1TbtIXnXuQE5ykQb
t7ZtMSvN5qN4CBMbcLIAt9x/vR/Dcz+DjbLkSfzZP9aMuV5n8QYnjldLQA/RlVGd21C+HpGyCfXI
OieZ5c2rYIAk5c9gIC0gziBUzMWiO0oQjzNuUjgBIE/QgNQrhXoeURJtoBWm9Gn5tF/20iJgcJmX
ARErkZxzYmUuwDBTtz5E1p5mkmR78ApRndnbuh+AYA9qItcydMcI6ivKlbFwxxJBJKH4mMrc8y1P
rQML8a5Eh08SCNvTajE0JJkEX8OSpijkdilI9yLy4gS60xLjchaieH2kydYdqp39DrUKjEFGeZqj
I1PulCAIBkatkuHCIYJ/aP1K3O2eAxDK2JhxqBHaOjTeOnTOJjLPFW/JbQIgBsGCiY4Zo76oFcOK
AkNzrzJT1Plm6EuYnwmsYHFGdwEgmjbEYxwyRuTqZ1k2AQrRCID1C8qQDjB9jlBgxcAgYMoXrcmu
RqM18a1LUPdYqHK8vanC3CTkyDVUQIsGbVKi1XTrlkD9gWm3HTHaVquHXPYqDSjG7ATBoXFUTYJt
SOA9n1LXakWBpKB+sLzL8TcB8b4qN/knhdka25IR5nlhO3kRQ9xXkzPB7l1i3YQtVuczDLTPU3cV
GzKFu5ZBA84BphtqhO34C0fyqEjxQPhGTIXIRaLBwF5d4ROrwkBihOBJgXCIlQSDVQnbgTSuiVfU
UYTu3IEeyY1RmZXLunIhkbVqItWjkMT2lN0iQLEYEIR5q2LjYTjSS4b07e6QdPHVelk40xWq4f0Y
jAISFDEuocxHwzDSGwoxkHhOhXkXC8D93LJtiNwa7YOMreHeE8ecPYQqCJn74qT3piXCGtiDktFu
0+kVIDqUJQ0acAeicGxCnC4QPLOa8uMgTgvjxM7Y8Jjijb5PlLkeWn95ckHJHYExx39AEiwfFabc
xPa5qjZ94K4KmrdgR0GqOsYbM08AGzi6DHWNmYTtF8xIJ5AGW5mTWZRjLaW+xTndn5sZeIbF8Mj8
05gqI5gabkPBdFHWkhpjxFEQlwxwRkxkRQRyXnTwOEcloI0QJeRzKjGOADBC1alqjHxNg6lKyHAN
QaLT5ZG8kMgeYIuXZYQGAWkRAhuCohHzPL1UJwQnL40jXUcFptxERu6lcFvOSY7HK3DJfUq1bFRf
apRpqI4XQFuDP4icEYk6pbcghBqPqJ3oog50QuRiZROxTndBiJeGJVzzS1i2zntWrk7s4TiHESXi
VITJFyJbSgWfViUA9dieSLYDBVQG1OMcxtRnaOHiiViz5jFcUnkSnkX2IAUBzTgtEZyWLp4gyP0I
TBI2hcUKJwcMXQhA5uSnj4tpQM5EuaMqok4Jid43Jpg9oDhHEAe0ybJaxIGrAJ5bMV+IBeMzVSvS
FIjhO89MiaQmPpCEcZTLBCIoI4naUOY5htI8NsD9pa/KjqOJZUwRLrXLuT+oIHmp/FPhiFDy9Qv3
SNMRmM3UYStmJlgSvJsDVzEvHP3QhC2HuyxO9OI67pzJoF8U+LAYsmJquGXcg7EHEKN4W46sJUQe
xHuDfUuESh+jOQXBzN+P+N/rC4ObJ3TgCqeTcH+KJXxeVcbbcgfoK+Lau2t8oFvoUrdy9E6sATpI
PejauHXy8sCKhtoQnAgwlgclK5bHDmEScSvxEm86/U7hkFDlbEhOcJapEVA9S1mobiIDN60xrvQu
AuAcF52trh8UF2YIRiagN3oCTgx8VE8BQGnRuTuFOUC1clZuXpwt0NZFq968w3LRkA+oEEt3IwEY
Gec5RD92aE7mjViDKBB7qIG5AH3ToI9TIarL7CIsfW6APLuTgdIB9epV5WUiabZftJvwc2wA1S19
wdMOTmBE8MZymJ12BGP4W4ImotyMjqP+FE27E7MZCsPvBLduUZixK1T2Tr9YKEjbEQ7kRkDL1GiN
waYg+y4Em7gyBBhb04VaR7WRFy7GOyT6petEXuYpkzy+vBaTzRjb/N1Oe50Yw5rGus6tX7SBHNeY
fz9VOypQ1c15khhGRLfWVplzAAGEXJCAE4ge0RM1+hE6oylkdWXqUX0kRFIykD9iMWYu4jqj5fqX
Fb0RZvgGIc7woidmEhHCUG8wDuRjCwLtn3bwAL/Q6At2mkS0rPlgQb9JGPkx5emMIawT2ha7nKhz
+8Y/UFSx58NsYyH5USLIcFvLIlqQjKyIk5yMgPqRM7LNvIfscKOq0Glslh20R02idOPEB6kCbcq4
AEH7Vp8ubiuX5U4jc9Q/KhrE4A1BIB+oomAuSAxIjgn1lj+aUAZmJ2GJdUugdoITeaO1in86HrC4
bsP1gm82H6wT64tt1BPGcT2EFYjqYdO7rb/Tb/QnqaNmKGwVQAy6jXIgnbmjOxUbFpmCQMdqoWOw
4pnVegWxhmtAwiPpTuW3obNqGpzEIaS5/wDWKp0H5OadLelZWgfCZAFRIi8GxRt3MfYmvKu+B6FA
wDv0cRonJCYGu5ARjw7Vw1JXENMTtRY02JiHXORGB5Pmaf8A6ZXNg5Xrn7RWAWuJ0k7EOZvuZEPU
5KcLQGgSIC+NbEk9y1GB24Inl7xge1wntXI3BkMCnuWJEDONU0hIHYQuIunMQXQtwgATtQhrEpGp
AyTg0QCDdcCMSUIgcRxQMQ644Eb0ImgQqw2J5AEbCqRAfYFKUsdqNsS1mPqHapHWOxSjI4ImPhTR
DFccjLrV+YyQHLIwkGZUWCwVUJTLRPhj83tJiNhqvLlAG37uSNryYxBrqiOJ+1ShEy8yVRORJ+gK
4LfMyFyWAhwxornFbJ9mMviSDbHorgvcn5sYs0rh0fRFnU25adqYoZ2IuOzVLBG7C9KN6DR+ITKX
+Fl+Itc9K9oYCJa3EH84HFC9PyZWYivlAzMj2oSv8pK1aaspGvcE3xIxHinKLRD7ShC3zMJSOABB
RhqEnxi4RlLloGRxkRVG6L07cThCDMm5URlb965JAc1Y82RygHC49fL2zXiDEetRtcnzRmMRqlRl
GzctxuEU4QSShC/anbn7T5LRC6C4pVSiDXGI2qoaUcQhMUyK8uIeWwKU7ls6ZiqM2d2ZvtQoxKHo
HZSkKDMrVkEDkcWVuFscAiGQBD1cJ3KjYMDKMx8NsD2q0LziU3IBy3IHcoTGGCbA9ATTB1QcgZHt
Rc1yCEIwM5DZghKZAGOl1O9fGqFpgIHCUjtXlm1HQzaWDIW7A+HcGqAGSJuwjORDcWW99qM+Y5iM
GFIgh3+tXJSJ8syPlviyDDQNpWogSmc8Svhx0x95Pd45bSuEMBs6K16dUiIxGJKMOXAEo4zIaJR+
JASyAzKEHnKAw0kyh6lo5qyAcHAC1cnd0XMWBx7k84A3gQCB7Q2shZnAxjiCMQUbU5PE+AyxClZu
R12yoyhOQILxFKIkSBjHJskBctSET7WSe3MEDGLum5myJkYSP5QjGNswB2E/atXLXTL82X5VouRM
ZDEHpbBV6jo2Lp+Fcz2FeVP/AAnaF5d2sTgcwhaul4nwXMjuktL+ReOAxjLsVHkBnApiLj73VY6I
5ylio+Vq0ispRFSmnzBhewEcKoWuYce7I4EJwXBU/KwmKsoQiCS9Vbs3qxDFuxaRECIo2ShctARF
4PIDb0VqtIDXHq6M4liDRCU3tXRjIVBRlO8JRJdoiq1Qo9arVEnVtOC1YviEJAt9PrVbcZj82hXD
KVmeyWCNu4xIoTkQjOwW2xyXxfCKNvUrjkRJYRKNu34jihb0EE7FpmGIyKFsEOaFgp2rUuLTiEDc
lxSqRiUJVjA5YK2LM2PtVogdZlPDFQjHEeIoGyCdqjdENTlmFSO1W4yGmQiAR0AvQZdLpjkGWmXY
EBGTRzVMkwURm67BVMcENprLoHaoR3oWtQMgMEwwVzl5SAnNjF80blwiMYhySp3IhhORkPWpQmNU
XKEg/wBS0DE1PYqKqB3qUgaxYhC7HPxBE5ZLiwFVpwAwK0EkiKHEAcw64QWVQGTBm2Iv4sAOhxQo
QauewoeYWmcslqiaJh0MU4mzYApiTPLYE85Nu2JndlCN3wirbVwROgYRjkmMSGxUmNQELMfDDHeU
ebvnTLCzHM9iFzmToiK27L17T04o+6E+SN2YaA8KI8siOpxcPhZQ5e3DzbsGAIDl9yF08rciZ5mO
ATgPM1lLejclWcqRdb0b1+R05DM7k1iEbYJYBnkU/NCIu5aaEdqBNZHwgZqUrhOonV3nrPn0fEtQ
kdpiEWtmE8jEn6keVMgLZNDMPFCFwQIn4bkCgL0tEM5I2bF6U7QoC7lu4oiURqyJH1oxtiJkaMA7
vuRtXIi3MY6nBHcQgPNtgYEk/wByEvPtEHFpf3LSZRP50ahaRd8vejH8QC4xH/ujIXtQGI/9FCUr
hrknM5H1KUiZFhmUISDxOLlfiLts3JE0BJYDuKIFkMQxDlvU6l5ViFPEwo/YnFiD7w/1p42YA7dI
Ti3F9ukLwj1BOt/Rj8h2dTHpforVMAK7gn0RJ2sE8rcD2xCrZh+qF9xb/VCfyIHtDr7oRfHSTH6i
m8kN2n8q16ZahgdUn+tFoyiTi0i59a0mUzEYAkIThOcSM6H7EeOY1Y4fkQibkmGBADoTjdJb2ZRB
H0EIyhe06sRpcDsqjE3Q+Imx1fWgBeEQPd1DV21TwvAg+yZSDIiV/wDR45U+hMLrnbr/AChA27hc
eISmD6k10zEGoYGJrvR4DPY5jL7UJATNx+K3oYN2ofAYZyMDT6U0rYlDKeiTIgWohqvISD9iJnaA
I9mofsohcNuOklmEiZepAiBMpYAELy7lswkcA4I9aMRGZkMQAD9qOkTLY8OH0rx/QU4kRHB2LetM
LoT+ZFt5ZUuRPeE+uLdoTxkJDcVj6Rugy2LeS6Msz1+IMdq1wwxBitN6o2/lQMZAk7E6lIFiBTtT
yONSmQitJo+BQkC2whCF2hyTj5BT5TTEYIcrzQHmRGkS2hSkAGK8q4Q/sTXl3Q8MiqY7FceL+6VI
BxFYEoeaabE0QHCYUVejm/5Pmf8AbK50OQ3MXR/zyVCrVglxKQdGMacLD1LVcxNVSTKksaYqhXDM
ppASCa/YiXxLArhAtyOwkIy5XmewSr9S1WwLrYGJqj59m4DmSCfpTXHpkqYOhdnSEQ57kRpcZFcM
mVCFiqoaqjNCAGmebrVEjuQiDwjFPcAO4rVGAEjmEwqEwiU8u9fgrEiAPvZg4fmoxjJifWnBKc5/
NjFG7bHEmNCOmFAz1dRI+j52MZAGJxClaFqMYSqdIYv2o24w0yd/Mxl9K02L84zdzqPCe2IULUb3
4hi8rcqDuJqrVu9CN2MfELbmVMAVaEuV/CWiHuG22qbYCmCgOVhd5WM2137oOr/Cha5PnZ3pSIe7
dLWx2AuvIibfN3DlHwAfp0Xk8zyz3PcsnVIdow+lA8zCXLyIOnWHNP0XRu3zFo1+ITbptqyE/wCn
3YWoj2oHUPrQu8rd8+bM3hDfavxPPcn5kB7zEk9kUSYGzDEiHsoDl75AGBlJYC4WqY7F5d2BgJY0
dkI27gBx1SxT2pCYwptWmQ1CWSN1gHwAyT9Yk4IMoxk4ivL9iNT2namyU3LbGVqE5Ey0gnvyUWOd
XWD7lO9dAE48NqBqyt3bsdM2yzrigNyIAwqgU6Bx3IkYflQndwOEcymgNERRhn9CGosCCXVy3dpa
ukETyBG1ahMEbXVi1aaUQGkBixW/YKlPdgBH87xepaLcajZUrhjogfaNPpWqfxJZ7E0QABkq9Br0
mUqRjUpn0ctA4DNAAaYDAbU54RknjA3ApG5ZFu6MSKFC/wApKUv0MQoHmLcpxjhqBBZRjJ7MsGmo
F3MvCYlSiS4GCkR4wKKcHjIwDSiMxuRs3yZ2Jhxt7HKhf5WZhKQBMSXxQF8C7qwapT3LUovlgvO5
eTgeKBWuP3gDx20yRicqHo7VXo1W7RI948I+lPpj2agtN2Bgd6Zfh7tdIeE8wy0k6iM9qNudvVZ2
M57UIXDqsnwT9qB2FGzclqbwE7O5SjOflyiHByKfTOZ34fQtF3XZmKxnGo7wVOWmB5keGUeEneQv
w3Ow1QI4Z4t3qUcgaKF0DwmqF+Wi3qFTQFCdmbmJdaLkLhvNSEYu6F29ZlYhGluMgRTpMpDiA4Sp
g5HojF6kobMOlwhq8Mqb4leVe4gfDPcsXicJbQg3ingnAEjiYlOKABtOxDUeHIoSssd+xE3JkE7S
tYBLZo6GlIoElgTUoEFxk6EiWJqqBnwIRlq+JHbgjbvRaBz3oiEtWo9qnbtMK0/9FE8xISkcAGLe
rqVT7S8SmMSJDPJMRgHdUw2oBdlUQ7asCUHOqOR6KFgu5SjDECiAiD5j4IaPGaFR5m9flanIOAKm
qA5i9K9bl4ZOWfsJKCkMwT9aAI3qR2U9XQ21CIiSMpZIxGJDFFaICpwZRkS8ziiJBwgSOPIlNItv
WkDhwcoRuEgnBGYBlA5rUYmMQFvVAyMoRcxLvmUDctA6cyCET5QI3Oq2TvYr7uQ24KokO5eIjuKp
cBD5p9YdOJD1pzIbmTzuCEcwUY24eYcDLAKWiJF00AOFU0ayJckqJvzE7gDAmgj2BeZblqYvSqqn
9a0A0zVMEb0g9uNANpRsSiICIoAjEGgVzmZgSuCI0g5b0yuQjQUJHarN0DVDiBG9CbEPkUI3oEgY
VwXnQJkD4HyU7snOmpAQt2zGRHhePEhqkDLE9HFIR7Sq3Y+tcEwew9BlKuwDEoQ8kxic+kwnSY8M
swV+F/qIMrYpEvQbwtIiQJVjciaoyskXLeVQChbkGk7EK1KVi3O5IPKZiHftV6UrEBcEdQusNbj8
7FCXlxeONPyITt24hjxEYqF63D4chXY60zjTYKIERI2hynESNwKDxMNzqmoDYiRKTbCvhT1Xo4xl
h9SjP8VKINTZBOj6CEdHOXIyPii5093E6uQhzkxX87/MuHnZOczq+p0/4sk+8SQ3+FOOb1H9VvUC
6Dc0Jf8AL/8ASg/MRPqx/UQPnQfPBv2V97F3zZm/VR4oA5eFm9SI+HuNEQI2yNsmf6CmEISpUzYV
3aSvBGRzEgIxHYQvuomT+FuFv0tSPwI7ojiH6zri5eMaUbif1STnlQBnn9Akn/Cj6fyo/wDaO2bS
H0I/9oZdmqP1oPyuoH2AJRI7TKiI/DMxbRXV68FoFjQwdpu57NKiDY0GWJmWj9C1Hlpu7avY7XxU
n5ec2wlb4o+uiBNiUhIO1viI7aJ5WJgOzPx/qotZnEAOdbRPcDigI2pufeaI9bpvLuahRmDet004
TiWegEvqKEpQnGEsJUP0BHRCc4xxkAB9BqgLYlOR9kBvrZEkkSFNBHE60z1WzjxhvqXtCJoJmLRT
CRm1SYBwhLzAdWERWXqR4xExxEuE+ooDW2rAyBiD3lEGeFCQCQO8UQJuxL4aeL6kZi5HSMasR3Jo
XATsw+taTci+x0DKcQDgXFUZRnExGJBTRuRJ2AhaZXIg7DIJ5SAjtJACeEhIDMF1pE4k7HDrjkI9
pb607gx25LhkJdhdMSH2JyWG00VFQj1rHHesUzuiRGIfGgXFaiTtICY2oHuCcWojeycWxEkV0kx+
pGItjScQ5ZaQZ6MRDVQIROoacGIp9CEpXJkxwcBa43JQl2BvUtfmCVGaUaKUoXARL2TGg7GKkJzj
LVhJ5Ax9SMY3InZJ5Ao/EPmZS1lvUmd5e8JhvpQlCU/N9oSmNPchpjIy9rwn1MgY6p+8DFmQ0W5H
bqgaHuQMoa4HEiJohGMBJ/aYgDtQhcjCuEouyEIwE5HAg8PrXl3IaZHCoZCGiRlsDFaRGUNolRUB
LYgIQDgjFwy4pNuXDcHfT61xXIh964LkT3pzIDtKpOJ7wqEN2ppSjXIkIytyjCfaGTxLEHEFwUYz
DEZjBNvc9Bke5Oc+jRcrFa7XEM0Iz4o5PkqFOTpG+iaMwTsC4ix3grgLkZAEricdy4C4GKaQkNtE
8HI7Ex1UxLJ4uY7UXembLWAdGLqurtZah4dtEzFtrLXhHanaTbWXmHw7lgW2rW4ZPpk21qLW4ZPp
Jjtai1BmTmBbaAtbhvpRItyMRmy1iQ7M08bcm2stWoAjLNUtyPYCmJ0y904rTGBNcVolwnfhVDVH
WJYGNQhrID1USBqEsGUZyDPiNicF1rIYdiEoS0n1LTKZY5la/NMihZuMJx8MitNz+5AM74omUOHJ
NEKq29G/o5s//s+Z/wBsrmy2N+5+2VRRvM+kuoxLi3HLayJ00GCrbdVjKKpMjtWIkmIWxaTJlGPL
EykcBF/sRjzVrzJkPF8Xyd0ZcxbidRcgZDvWm3y4iR4psPsQAwdDl4nHHs6cVj0uEIXCZRTwIBKB
9gIEFytUqkoMAFMxxAKlORczJkT2/OBiago3rY7WTHLpiDkPnytUPOtxuaajUAW9aFy5aBIywj3g
LzRqgKtCDRFd4qp3bXMytzYiBj/9RLq5ftztXL8veGqRGZEpYKd3mOWjflOgLmc4DdEUUZ3oXbZu
V0vohTAaYqMDzFy1bodNYCuQ1A0RtmVucIPVn17gSyMp8rGET42NSRsZNGErQf7yVVps3pTllkCh
OYhIbsVq8qQfNPa5giZHhAKN0zEtxLlauYtE7CzIaiYS2ELVGYL71TqSPqReJc4kqpIjmy0wDyd9
WZ7VxuZHLJardCoQ5mJBiABcyLICJcH2sgndefccRtx45A0JVqVgg2gAA2CCZMfZLJ9iJtEQtRob
h27gibN3zJCpjIYrRIaZA6ZA0ZTIIIxjHtKMgCYjGmzJaohqs3YtETISlRoGv0LzrkjXI1l3kprY
1T3flTyHlwPcPylAyj5k9pFPUmAZHpYpx0xsRoMZlO3BH6U5rlELiGu8cAcIrUZkN7IoELsfDMLg
kw2J5nVvKMpwEYjxTwZTP9Puy0wFATkpQmznGRDEP2I2eZuUmDplLark4HjYB41if0grQvPKLgzE
A5ZA8vA24QDEzDMtNmNBjJnKl+JOTx2HuZGVsERmPCtJkxEsNxVy7GOq1IkiUasuxbOgczzAd/BE
4dqeUtEMgE1u5XYVo5i2JjJNLlxHeFK9Yk8JRbTsT/StTuozEWjdHFHeoXBWVs6SdykfFIBxHatM
hok9W2LVDTdA8UZiq8zlJG1ejXy3+pG1IfHtjiiTiNtUfNI48IqcNzheWX0QLBSnXQFPmpxErnhi
+SlZuwBjIMC1R2K5Zf7uRi/YigYVfEKdxiSKsqxMXwChKIaYqSVbi9Gfo3dEoHPJQjnEVKnbljGs
SomJHC4LoxZmotZbTvQOA2pi7ID2Tj2Ly7EjOJxDOhKMxr9qJQi2GaEQGAXFQ5MjcuCkcHUhq0xf
BAajG3HBaIxeIzCFyMtL4g7VpdzmepVaZhx9K4J9xTRNJYlN0GfqQnIESOIBQEcBt6O1SJU5zNFq
iIvtCaWANVbnaIMDEMRuCFuR+JKQ0jcM0CtUQ8TUqV2VKUVc69BkzjJGBJIGS2IjbQLUcBgpNiKA
lPMcQRjpYjBAz8MQtBq+AQ0DcyjB+7tTRGqQ8UUZaSInAUTgRm3s5oxnCVknFw4ftCoROJyxWu0T
al+bh6lxW7fMR7oy/Imv2DaO0xp6wuHQTuNU4i3YVqgC+dVQkFeIgFVudgQ8wGerJ8kblqJjBqA7
h0TsygTcNdQ2JjEmG2gIRhAvHFVWiOKL4lCzD/EUOXB0jHXvCuXYSN68QzswCHNExhZOGt69jL8R
cAOqlCdJC4LPHtJorl2cxK7Kp7VpnGMhKvFkvMtTpkBgtMw0wWI3oPgmfhOKF2QYSLAQADqE+Svi
zOIaUZOxXHfMh+YNI9ZXHcPdX6U0jc9Y/InsX5W55ah9oZC3ciJgkNdGDLSIa54AZIReImfZHUIw
uCsZZo8nzbiDsCck0jqjLCWRUeY5YahqcglRjKy8SKgEUO5eRbh5VqXiJLkrS5Y5U/Ip2pgGBoX/
AL1d5eUaDwyUZCLTiKkZrFllRSBLjLobaV5t24NE8BEEkqEjAxgRwylQnuUo2iBI1cok4k1PoG9F
isejHqYqi3qoBbamkHGwh1pYadjUTQiIg4gBlqjCMZbQA61ytxMtpAdDzICTYOMF5eiOj3WojpgA
/f8AWnFuO3d6lq8sAnFnD+pD4UYtgY8J9YRBtiROMjWXrKDx1tgJEyA7iidJETjEFonuQMI+UY5w
oUQYOZYzPi9aBuPcYMBI4epkQDIQkXMKN9Sibb2zHAiv7SOoEyPtvX1CiHmSlMRwFI/UpCEjGMsY
sD9JqgYGYI2nUPUVImc9Rx0nSPUEAZkiOBAEZfrLWJkuGa4Nf2rRG7MVetY/qrUbhgQG+ENH5UQb
rgl9RHH63QIuym2Vyo+gqQjfnDViI0j6nUeMQMS+uD6j9K1C55oZviuSPUjbF+Tl+H2OxA+Z5RGd
t3PrUiZi4CKSmS49S0R5qR/NLgfWhIXfJYVECS/rUzKYuasJyMhIepCEeYMmxjKgbtxWuF7yQ2EC
ZfWpGV0TJ8MyZCQ7ghEc1Kf5pcD1rVC95O0RMpfWUdd0XH8MjKQIWkc0ZbiSKIGN7ySBXSZSf1om
d0XQ1CZSiR6kY/iTX2S7etQMZi1IeKUZEk9yBlc86AFRImNe5SAvmD4RHEB3leICbv5glJ/Uoj8Q
ZgYxk8X7wiYXzaB9kEy+tS1Xtb4SMpAjuC0jmSTvf63UR5ggY4kGUn7ihIXROOcZAj8qedyJGUdL
gd6IHlnZMguFJyDpzClcBZjmtZnF8/F+VPK4Sdzj7VqjdlF+9DVdlJs2TwuygdgDoEXBJsiG+pOL
kQNmKB1hsxE6VwcUceKQdPctTjtMSCjGBaJNAVpjQ5onWvGgARRVZPHBViVgW2LVAaZ5jIomoIQH
MDVMYPghKMREj2gWK1EaztkXWqMRGQziSPqT6X7SVqjARfFnCPC741KcWxHsdECESDi6JFsAbAjE
W4kHEEKlsMcskYi3DScmVLcQNgC06I6djKkABsWgwiY7GVICq06Y6TkyLRAWlhpwZlSER2BaQ2nY
y8MR2BaSxCeMQmGGxOIhyqFk7AFcNAnID7VQtsTyDlAGRkI0D1XFXYgMskxqOjdsWAQhKIIGBXCO
xP7W1agXO9cWCDBtqdPn1eb/AJPmf9srmv8AWuftHow6KqirAS7QtM7Ea7gibUdJ3ImBdATDkp5R
qtdsDvCN0l5E4Im5JhsQtWxQ1JGJWkjiTHLrYKgWCaDnYy06SU8RKJOL1C0k1fYtd+bRxNVLluWO
p6TuZAbt6bL5xMJB0Zxi8dieMaKNuEeKRULZrJq/2DqAWwdRldtRuSj4TIAt60b92xGU2q+HqwV2
VsaY10jAAbmQlGbTlllVRhbkJEipOSNucQdOygZSEoHTHFkJGLYsQPtUZCR8zFDzCLw901Q/E8uY
Nhoo6MrfMeQMoH/3T2L8Z24+Hidx2LVesE244lmRN2JhsAqhITAfAHFDTJrZDkvipyvTJGUWogYA
sQgJFhj3oMWJGITO+1RNusGDMpWo3JWpEeKJYo3ua5u7IXBw2yTQb9Toy5IiUHe7A+JtrqzAF6cR
3qMtoQAxOCMi4EsmOKMZBioRj4hKWrtc9F6x7ZL7iWqoi3W2PoXk2bxNuXijEB27QhC5LSBiBj3l
abEdR2j8qe8fLt+7/cuGOqW09LFN1GOCpgtwqp3DjIsOxADNGZ8NsP3oyliS6eJojCYEonJUvShu
kHXFdlc/NiG+teVAeXaGERie1B/vZhyvwsYAylWUyMOxTuiMZSt4GeA3qVy3zWiUqkDw/WomRBli
ZDCaPL34iInQTjkpWr0YzmBwzjOr/ohAG1b0R8PmAOvNlCM292obuTkeXPZvWmY1x25pzEW5n2o0
Ke3zAbZIfkKBu3dbezEMCgANMRSIWm4+rAFfiY0iDQ0UREPcIQEySDiGXDWJxCJiGByRUIE0AJV1
8ynBY5F2Wo1WqJYGijcFJM6HNWDovw8QGYVm/b4LrMTvRhdHEKKVq7EShPDUHWmOmG4MFqNbMqkj
JSPL6711mEYxIAO8llO5LG4TI96DKlFrgXlEVfNHVFiMQCtEAwNHJJKs3RsY9IMW4a1RBtxhI4kZ
p3DnJEDPFMMUbtw6pDADNRtAaYPUBRhEUCBuR4TgVwHSi8hqnmMk8Tql9CJhEBNgtWzNVkW2ZLgD
gIAxqRUhEu8StickB6B+trjSSiTWId0e1aBicStAwjitRHZ09iMs2QjEkA4lTjNzGOB3o8pygbS2
qXbtQNjmBKEayjAl/VILXdmZzzMi5R3KUXLGrKS7OjRLBGVqQYl8WXxJOdgqtOG5ahSEcSdyHkgi
2KJj4QKHJSJrI0WgBwcXTkcR2pyKYhAnEYFFhU+I9FAtJqNhTxBtzylCi4Ji9HZKkvWtN6Jsz/Ow
9axEolPK2H2in1JoXLkAchL8q0DmT5csHAP1rVDmQTviyFud+m7SCtV2cpbRq/IjGzAasHNT6yjb
xnLPYEPMHw4+J8DuV6cJ+VOGkw3g4hTgZCcYh5EdiphkiAarXLEry41mV5dsmIGJGJUJzNSKuiZM
YYkq15TaNI0tgpasS2ntQ1YIcxauDSfFFStTGIZwrdsDVGLkn7Vflb8OujbqK3G5SNyhOxMSCMYy
GYQjLiBwhi6eVuImcgBRVEpbGC4jGH6UgFwkS/RkCnRhcjLSQ4kMs1G7ANKomtUIE1rMp+pSLXY+
GaPJc3QAsCclpNXwO1RlEmUIl2ChcEtNxhw5ujGIMIW/FIsao3rV6RuipEmY+pSsEHWCzSFaKQk4
eoXatUjgoyGBDdHepCUZMDQydu5WmIcRAPaERAOT7WQXb/YKvo39DTpkdtFpBrJClZVPS/XINQtU
Y6JxzAxTVJzZYFY+tMJVG1OC6x6XWAPcn0hxuTRNNgQbHNOSe1eZ5pYeymlA72QEXJOScwmBtaie
Jcekr0buivyXHqV6KdHNj/8AZ8x/tlc1/rXP2j6BuhjjtTxk43rRJtSMgXRMnYKU70Q48L4IysxA
hAM4GJRk1CmxT57FRYIEwLHMgriYdqLVPYtIiwClcvGOvejd1RGkOSDQMpQtWzOMSwkcD2L4dmAO
RJWid0RhnGNH3KnznpkFq16Y7AHXwYcWczj/AGFMpFgKkowtHRaw2E9qAmaDNCJqR4ULhIMgMMyV
KczWVU9NIxJQjpEo7CKISuQEjHDZ6lOQBEpYNgOxDRd434jIPRUmDbAxNCjOPABgIlyUQdZhIe3X
8qNmdsUPiAZgoCEDbI8Rd1ERmSMnRA4gWcDNDVExAoWzUZQmATkaFQnngnJc7F+H5U6z7smYDtT3
RGVwVIj/AHqLBqYbFOU/CIl/UuYtajO3bJMd1VpNJRp3KuwripjQrUGiwr2PivM5adJeIGsSpC3b
hqbJwUecvQY5AYBPenw5xFB3labEGHvYR/vQlelT3f7kNER2nHp3pvoT4DYjtWCadyETvkAmjOM+
whVp2rSVInAAoadjq3MYEVU4xHFI4EtRfEgQNuSYdHFnmiRmog0C05AUUJTi8jRXWBiJsC+xSiJG
N2MHg2BKFiXxbruBGunvQNyOO8IeRZenFdIdMYylLY7rXGJERk6GoabsfEFrsnzYjxQNJBNLgOwq
PlNMe01WVM1wEvGqNu8cMHRAlIwjUgYOtYDS1CJfIFSiaAZlHyrkfMGS8u81cJLTGOsHAgIG795L
LYFKEA5OSecxCRyKJvRMrMg2qNWRuecbrFxABkZM2zsQkfDhLsKNq4f+3ujVAjIqewHPFR5i3jHN
GcpF3oFCFzxT+1CAtRJNZEgEk96jz3LwFsmTXIxoK5p+hsti8yPhmgMyp2ZeOHEOxNs6OPwyoShe
t8YaoBxReBDZyoy0wxGMlqnLSBiVoiCz+JAgM9TMqVy4Q0ax3rQzNUKRkXb2VrhTaESQRIIxgK7V
pOL1WlhqNAEAG7ShG2HkcZITulqOVIRiRGOBRAFMtqjO5IRgNpRhAHhFTl1XKYYn6liRsAUpmsjg
q4Zrf0HsTGroDaowuQEgQ9UIWoiEdgV8hvNk1c2VcBirph4NZbsfojLaGU47EOiiqWTlnGYUhoAj
GrtX1qVmGERgEIyLVcobRSITCr4IE0JOCjKBaSImOwnNOabk7ttTROCAlnmm1B9nS04iQ3h1q5Se
nbblWKEOagbMspYxPems8b+0MEPNusdgqqSmRvLBaiBq9aBBJfZRl5wLRzdPmVGEqAVpmSpDSDEw
aT7FftQrFyH3Ot7LUcBgnPcvMvkeZLCOaFy7aEiMFZ5SzwWjiRgAELT3Cc5as0OWhLXBng4yQu35
uBUDABGMJVG2iiRKIsvWoKpLUdgUp+3cDfohHSKEuftULOMbYef1qmGXYq1Ix3Ba4PWhULUSY3JE
VGzNPKp31URYfXiGUdbeYw1gbUxOSmSKCP0shLlpRMZUkC1Cm5gCZPh0LURIH3UTEENkenzLbRvx
wO3tX4Lm+GUaRkclokH+1SvW7YnqpxAln7FWYPmVffsUrt2QjEDNTvAaTORNQcEJCBp7Ro6eYRMW
BGQUTsPQ+wuhCTjy/CInHtVtxxDFO1dvzW3Vr8mc9SnpyTkjbI4QaMoxydNs9GIsWborUIlserim
EiFSS4wCEXcSCqT9CbWW2JgZNseiYxCfSYncVpjdm2wpo9Neluu6cdNepRbfT19Bzf8AJ8x/tlc1
/rXP2j6QXJDUybSwAZa7LEbEwtkdiGqA0jJCGgQhmc01oGUjR2QMLZKEOaAgTVgXUTzFrzLT1oDm
hbsWxC2BjIN6l4dVcWTRAdeZZYDYpXOZuaCxoDjREW5kRPs5HtQYxgBsCYXBXEogiVye0GirCQiM
GxWcY5mS0k+b3MEOEEnALXct6YZl08YkgYllpBL7gnM23GipcHeVSQPeqEHsP9sDbt/dQ8RGaDMB
tOKMYimLlSbLBF8c5FExNBQIAimSeVQ+CJiG62mcRKOxRjp8uMfcADoG0WiBUmq025EuHc0cKUDE
mI2B0JylpLtpGPavh3SRiKv9alqILZEKkzAvqEoPGQ7CFenzN2VwwuaYmZcgMDioX+VMeIjXCQcH
sQjfuGNu5FzGHDip3IQIjLxTkCfpRuWJaonEBa49qEZH4ZdwAHUrHLW5m9coTIUDqFtjKTVOAC1X
pP8AmigR0gW4D2pcIQiDrPvS8PcEJO7IF3fNU6KIgeJPmejj4rhwgPtWEo2vdhwj1pxy5lvM/wAi
+LZuWvzol/rXm8vdPMWRjCeIWqTQlHGJLV715cLXk2X4rki7hMKxYhGzzETc5aR4ZDJaYTMicHBo
tMqxNK1C86wN5gPs6OxHT6kHDEYlC7HxR8Q3K3GJ8IdWLvNR12rvCa4HIo27LQtR8ZGJ3Ixsny7Y
zGKJczmKRFS6MJxYYnUDh3oxMJWiaeZGgfuTSkbvL3BSWLKUhDxANPJkxJhMYTC47UOZj7wpJEW7
MrYOIIJ+tVfsAqiIR8u2Q0tXiK1TlpiKCIqe9aIxAj9aLhtQqwwR5W7LVkLg2Izt3okGrSotMmO8
VWqMmhHEoxieAeKRqSo+WWkCqsDtAqmMxdB8VuWPcvPs/dTxGxCTHTmVITdxUMoDO3IMe1SIzxUo
7A6jcuWoyuWyxKEIEAxwA3J79wWiBXUWCHJcpLWAdUrg8NMgg6oHTrSfEKgrVGWxwStEiDrDE7FK
BwenTwkgblESkSZbSjqJLhCEC0AjOIdjitI7kbU8SKLXKJEIGsjgvhhotjtK8tmdC3BycZNsQiIg
R2qlZGrpyVvREzxCgiAtLk7kTjtC1Qkw2L4kjIZDYpShMmMg2k9SqfIIkpoh960wwGCB6gCERUgO
yBJLA+FebLYCy/E8jbLDZh9KNjnY+QTiBHSSO2vS/un61MHArSEBtWpqlVqURMPHEqQtQEBLADNE
Gs5Y9AJxFAFqIaRzOKDlU4YjAnatciCGoEZEMDgqZoNjJcJZqujKlKFOtORTCgCotMwCDkUZcrM2
tscYlNzFtj78axKe3ISG3JYrVEvpPgOaFmNAKy7UZXJEAYMMUNIaMduKMCDG5lI4FHDuTDwjEok5
YqV0CkRwhRIBJEuJxgELZk5zZWpT8MqSOwELzPMiYM7vRSMSDGy0RvfFkzAjYVG9agBcjk1CNiAh
SPuSBLepahDRHOcg3qCEYnCjlSvS7IvmVzF6VbhxRM/CQpXoSMtVWwRu3CBEVKMzSOEBsCERiaLR
H7w+OW/YhdlEmJIfvRvC0RAYAYlEGBg4ZlqmDIbELluIEBQAmqEbgFuGFAntVGctvS31LzbI034Y
H3l+E5p43IloyOPYUYXAKoxhEsKhqLzJyM6sxkS3rWlmMaMo3IWJygHMmGXZiiSMEdGGBCcZMWUT
tCkFA8vbBvltZk5+h1DSPEAT2lSM20scVu+cG9M/yc9MiMTROUbuQw6jZdZ5FcAHaU9ypKJjRaUI
dAAy6SU6daWc5LUfEevu9Jj1d/Ub0Trf6Rlzf8nzH+2VzX+tc/aPp2zKdnCHmFhvWqUgtFiOqZwA
RndAJOexO7tktOlt6PkxEn2o3Z8JQa4dObFlERuSnMYRJLOgbtsk7kRGAERtXl3KSGzBPKeOWaxI
G9OGMVWMQn0R7WTzi+5MHiNgLL4PAcyaleYLmqR94URuTuajlHAJ4ERh7sTVCNiJtkYykUwMpy25
LUbhld9wBDzrB7VKfliMBtNU10mB7CtNsSn2BGOrTIYiVEBK5GuTpoTBOxVWP9pKeOfDFNkhcAjI
EkASD4IvmWohLSAwyVyADCAFN6I24lOcqMgwBOxYnXgIjDqOny6CPa+paS0syjI93YiH4S1NqFsQ
EBtjircrU9B9ompVJGdv3yaJ5BxDZRSjG5dtay4hCRjEkZlWoXrnnCNdD/tK5Pm7EpxsgC3C1F4g
/nOoQ1gTuD7sjDcVK9yph5E4E3YRIaM33KYsng2ZI2x49Th8GKF27xyG3w+papEWobT9gR/Dx1S9
+eXYEZXrhl9Q7liq8UTiFKA8Majv6hltWnMJ41uSpELXc47hqTKoHam8RGZXGREb6JjxWyKnYjYt
PISNZAUAG9XDY9p9I2sjy2mQg9TgAFGcxwgaRHtxK84R8zlp4xxCe0Y2znE0QhGZkDmPCgMQ7dxU
4Clad/Qao5VQlbLEU7U7CNACBtVvl50Bhjm6PKXKXIFiT7Udqtm+xEqRBwdQNkCMdygQeKQdG3zM
BGWVwULr8Jd47M/AV5ZLxBonXBTejC7I4UIXFceJq2Y9SxJ7eitAqCi82PhKPlEFsiWRt3KkYtVf
hhPy7YqWoSqBojBHd0aoliMwpQl941RtXkwMYzjQxlTUETKQD4xBclXScKMECiDgaK/bg4MvCVCB
kZayowvP5UOKQGZ2LyvIjCnDKIaQParnLSL+WWB2jLpcLzbfjA4gFpliMChdjW5CkuxVoyohHarQ
yKrSIUY+wKyO5cEQIv4dyDBh0fhYPpGIGa0ktpFQtRywUpTNJYE7Foh4QaIwmKj6UIxfe6M4NI5g
I6YCuanceuW8p6iUsclrJdsnUpEBz4VcNyTuaIxEnI6dILtsQgT4cQtMaA5labbR1bTUowuRaQ2q
vUJ2JzUR+xeeYPJ3Z+F+xGEsDRlZhaAEdIw2spzk2qBBid/TKO0FEZhSJxcoAqEehjUZoDAI9FRh
gmNVSoWg0C0mTsjseiYZLVPLJSEC2rEos5IQts+0pujsRJXFmKIwAePtFarZ8uW7BNchrGUoozJb
TUg4p5FhIuShbhIFvpRkc1b0xaVTI7Voim2ogloR8RRhbiwiKkoxgwOFFd5nmgLkYkaAdp2owNmL
EZRAPrV21fJEYEiLVX4rk5aqASitJBcZMVwxaO0hvrWq9PuFFuGCN28dMB6z2JzSEaQhsTkPCVJj
cnhf0j3ZRJIR0zlelsA0x71x0iPDEYDotmWD5qQOL4LRIiUNhAKYy0gYMjOJoPaLZLTzDA5E5qNw
B7ZNYuwUAIwIk3EKqUJwEIjwtgeqeYsARvxrT2gvw3McN2NIyO7Io25jHFXNFycQalihclqYOxkc
SqqcbcXjMPciMihGNQc0Ww0so7RREbkObuXDGMSxiIuSoTg+gBok0KImKFTgMIn5O/zLuVPRt6J+
tRqHBaVEZmvoW9pPI1QCPq6HyCJTsn2J+hsym2oBaiK+hb0m/qt0VW5UVeivp6dXm/5PmP8AbK5r
/WuftH5BjRYOmRJqT1mxT6A6ZqbE4Jh2IkjUTmcVqgOIYBC2wjAEORitNq6Qc1Lz5SMtxUpRiTay
BQhG3o3lCN5patmKcyY7GqnY6feZPCQITGQfY6oXTLwj1LiAKJlE12FCBtRYbkTYl5QOQCJuyM5y
xlghPlxrOYmaI3ZzFk5aCgZ3p3YP4Y4oHlYm3ED2801wRjHOSjG0/MP4gy0y5ciZyyUfOtjiyiXI
TkSGwNVAzeAlgZBlqF6LdqaNwHvTvRUIP9mpAVhb4Y9uafZigNWiJJJJ+xazMyzGwovESk1FK7hO
eKPRRGRHxAKy7UGPFmDj0FkOilZHJEE1NStTUFHzKk+ClcmHI8PcgI0AxOSwoowhFxiYjYrkhHgp
ExO4Moebb1WR/wAp3r8TcuyE7sXhCBAjF/pV29Y5k3CSDO3F4gx7XVg8/wArDyDAkW+WBB3anV4W
pXOWrwWxIu2wqMw83qakkdqMZ2yLgPs/WvItXRdhiM5DvT3zLXtkXTgv0Pl0AxOOKEhJ5e10bUxT
ZFG5iA0bYQj7RrI5klCQGqcjptx3rTd+LdPillHcEbV3wmgdeZC5KAwaBYFOMWYLiYDaqRlKBxk1
AjhctT8UCvMs3ZWwam2Q6EbZJbElRgO09ynKd2MatU7F97rIyjVGMJNsBoVv6NI9qYH0qIGUW+hR
5nl/v7eIGYULN+EpXbZo1GO9aG0ziKDsQiKXrPs7QFE4XBSQVsHxiQY5otlREjLFUqjHTVY8RyWR
K2bFXFBqqUTEE5KQhLSRSiMpcROKjsJqEZDAInaqqmCE40IUOZiPFj2p1dzwQ7FtUJziJW5UkDvX
n2ocRwJLt2OuI8EqE7Ebt6/CMQH8Qc9gVzmbfgmeHsHUfLMLz7PhOO4ppVykNoUr1vwSD9icUTq1
dHsoFStxo4d1DzPDJ3QMPCcD0Snaj8Q5gVWu5EgXKF8V5TEhkYUFsetAgHzM0OYZpRPrCDM+SFXj
L2kYiAkNpTSOljQK5OdTHBAR4XyRN16bENA4TR9ikYR4n4iaoEV3qRzZCRjQ4b1qMX37FrnTZtKe
LxD0QDapMHkcUGy6SUZFSlDxSo6nCR4hXuQs2YGdyWACiL3KSlaGZenqdCMwIWxUQj9vSHDuVKUD
wnJeYPDJA5bVGO5BMm2pgsVpj60wqTigKJmVVTo3qlFGJGn6ymjGpxQOCIzGK0gVTbaoEYjamMQC
T0dqti3b1EmpCMZx0kZJwWK8yD6DjF8VEEyOQDuAjLTInOjrS1ThSqFuVsxOLMXKmTAy1BmwZapw
Ji+AxVz8VbmLVxqAOYtuUpW/MlcygYkP34KXMX7z3rkiSGwdfDuwmJYgEhvUhC1K2DmTJ6d6AldF
ye40C4eLsTS4rmUB9pWq6X2RyHZ1x5cJSbAxBQhe5Y3MhKQMT+snuwjD83WZFNEdgQhcJY4gbFqj
CXCKElRtSOJYPsXL2YXADGFS68oEyEC0rssEIcuROXtNgEZSBIGyq1woN/T+J5bhvipiPaXkXjpv
woCc2Rt3BUhnU2mNIOqD5d6latRtExobkXP0KV28dVyROpw6Gg6Tsy+pEyAc0BCuQIZpOO/oAtRM
9MqgIWSQ4qwLt6kwUg7iVQ/zVu6lfkdOo/SVv9OIZM5QiMHQiMh6AyOSc59DqMBnU9Bltp01xKbo
YZIdA7Pk1PT06rdG/wBBVc3/ACfMf7ZXNf61z9o+jp1K9avpKgFUommKbQtenUfzqo8A1ZLyxKIg
dmLIC1dlE51UtUZ3ZZSRuQYROEZYoRvNaGZKEADfGZiPyJvKlr91kDegbYlg61C7H1rhuRPenBcd
ViHG9cMQOwJpQBWoWovtNU5g0tqEr053AMIkhlqtHSconBR8y5G3COGihTjmJ3ID2HLqP4eJs6RW
Ujih5twC29ZRxQHLid+AHFqBH0oQNgWScZTJZG3ejrA9qFQtFm3KZzcMjC5IWpjHVgtInrbExDj6
FqtzBHqTeZF9jhOKjaP7Hzu5gU7Sq1JqUHBEBjvTnsEQtUSwGSMfEcAN6II1Uqd6BlngNibZ0Oa7
Apan1s+7p3J8zgEz6rp+ha7mWG0pzTcjlDM7VpZgBRGchU0ii4wzUrkqmWG4BTltkSrfM8za8y7d
JkXJZgSBQLVyXFZHi5b/ACKUxIGXhNs+IHMEblo5GWjl7YDz2k5UV21/VICYEXhLDXtdQ+GbcYZQ
o/apXeWuUkeCzV+8kqOq1rlMU8urdrI25aoGH3jh29S1wIIJYRGfctM46TmuEgvknGKbBRMZEAEO
2aE7RcZjNXJ2Q90Cm5CUuZIunidzTtUbFmWvmLlJzGQzXLWz4AXKAZhk6syPsmQ7yKKQlStd6+NS
AwOfcvh3ZGGyQQ8q7GFtqsKhPKZvS2GQATyt/D92LGLdye1M2Z7DgqShMbXTznbtjOodS5blJkP4
7uZ3BEyLk4k9DxJB2hDVMkHaUweZaunJRvxAJjLUAVLWBG7DFsCEYXbohPII87yJjcEvvbYxO8Lz
LBc7Dl2oXbZNjmBiWoSieYtRMh+8jLT62UrIAF2ALSLlE4kYlSG1NEVzK0jxZlOcUyMUTI1KrRNQ
hkZgcJxTIHYUc4XRQ7JLyrsSCM9qGrh1YA9Ny0amNQmuAGOYdSuwGmEhgh2dBIxjUK3atlmHEexT
1lxFHmebj5pn4YHwgKfN8lDy5QrOAwI3DoDYdJjKsZYheba8BwOzcrnLSpIiiMZhmp0T5c4ioRgR
m3QYzoRgVGEcI0B6RCMRMjF0b1yOgFao4Dbmrk7o8Id960R4YPpB7UY3DrA2Ly4xDGgDLTA6dq+L
DzIkYHajKDRByC1XJjzJYVwWnJRAIiBISL4MFI2w2qpZUQGT1CEbdoiDAVD+pPHE0D1KOo1lggDs
CfqFHIshreMiXBCloJlKXikdyuX5AGcYDS+18UzOM1et2/CJUGxw7dLEOhixXlSrD2doRlbGqIqS
MkDKhZukZonp2Hpr0MOgyOS1ypsRuCgGO9apBl9fQ2EhgSnZobXQjHLE9AJwVDwigTmIJ2lAytRl
LaQmFAMgjM4ZIk5ZITlHVc+oKN4s5GKIIAEsSFAxtQn5j6iYgvVSFy1D4Y1QLChCJ8qJPtZI67VD
hpJcetcIMYvihISnEYaRV0T5khHIKZjzMxL2IxLDvqtR5mPmbCT9ajpuxuHMA4Jo6Zb3otF+GmWW
BfsT6U5gg8Fq5ixOdzJ20juWkCVsDAAfkRBuSAG0FGWv1ggrzbc4Ux1FkOZ/EREZMJCjdjujG1OL
HAEsCjLmB4jQRkGH1pxWXeU+kTAxi5H0BauXkbL46YiX96NryjxeK9cp6gtMcOl81+N5QNONZxH1
hC3M6b8Bn7SMTSSui5ATtnCq8uAFszOkgNLHa6EblsTLVJdQnZrauuwlXSUYu7xenROVvGQqtPsy
GJ2hbSpebHSchu/sHh8g3p129DqUu1SukO3Q3X0DpkdgdE5DoHb0DpKc1WFSgend8op07vQV+Qc3
/J8x/tlc1/rXP2j81fEiJdq4ICL7AvCIn3gKoG9clcEcInBGUAQcohCzbtxgPfev0LTa5ijVUvMu
zleyD0dPcsGYOBUpGMYRj7JRF/gIo4FFpt6rm0gURiJgEYg0TG5Ed64SD2H0FKJrkRIb1wWxF9y1
VhLbGiIlDzCcZSqVI8tIWhKhDOvvJiZxkKKVq1GMhL95I8Sa1zZwrEfYVGNmN3zn4pyPD9KHmCEY
4GYDlAW9XMxbi4W+lC3GwLJzNxwpQ5iLafagCQfUpSt2ZG3HGZovNNwRGwmvqQEZHioCQQEDO5GI
ODkLgkJDcVj/AGCcoizDXGOMiWUIiOiMamuKBiHJwWu4XAqQMSvMkKnwjYESC0SKstTUHgTGTDM7
Vj/7Jz3dAbFeZKXFMCiL5LanOAT4yNIjYjdu1ka1RnINA4OqVJoAhABsiUZkGlcUwoBg2C0vhi2K
uaAeEN66IbVy1n3LcQe1qrVaum1MOxgWk+QohzJuSjzMnMrkiTqfAF1b5Sdo2L0RUSL6yakiWace
IVBRtaSLsSRpJAdtjrVIaQPDEdBszgPi7mftIUDcgImD0t0HepmzeeZNBMUAURKOqAHjjQOqTrm6
ecCCcGWLbkDGVPaGSJiX1hgDkd6GsTMjjOA1BCYEpyPvDSyt3A2qJcMoC7clK/c8EQfCB9S8rmrn
nbNoHagDUjA4FapinshA3iInKIDyTTjctv7RjQp4zfdpL/ShAyMCcyDH6QtTCbikh4u4jFO2uy9J
gVHasMQpEih6si9QME0VLmeWnqlhIRy7U94aSKIStTMT9CF6kT7Qjge5B+GRx2LXalqGxeaIyhez
IoCi9ZJrziJzxQlZkJxNaISL0RcUCAgE+aaVUydFwiJCmR6NBLbENQEhkSHRm+qO7JAbOiUdoRE4
RlKFAc0dRo7ADAdMonMMrlm+CJxLxlEsV5VkMDUk4lC2ZATjRirnL+ZGV+6NIgC5D5lN0V6TGQeJ
xCF20WBqCMkI3uKXvhagD5eU8kGPED61rgGesu1AHEogDpMjUbsUBOIjLIkPJlAAkknikVK3coKC
BydDl7IEZ3A8yNhVrRIRjEOSTmqnVKQqULsaA4FAzGNHKY12FDUccgnjhvXlyiGHtIWxLRbcOUIw
L0y2dA3Il6Oj+bQK3KWDOFi/Ru6HXagNv2KNuI4Y0JQuDwkO6jPlYykRQ0cEbFK1DlxbugVlKT/8
rBSuTJlKZeRO09TV7pBQL8LsUIV3TjmNkkSaIdWiYp1tW5URfHoJNQFqyTNRbukOpE0EN+PQwqtR
qJeIjYgYRzZaWeZxOxOtINBitwRkfu413IXITFsYEM6t8pyx3GSoxM8SaFkeUlb1wHsyLHuNULJg
LNk+IO5PfRSmA9uZfVkyERF6tRM2SlDODU7VUONibLKIQgYkPRytNof4kPNNcXVq5GLwAIJ7V24B
PG20felwj6UdWm7dObOB2IylCDDMgUWi4T5kqCQAAUoEuw1Pg4UibHY5IRlcMhcyiNilM3JREcME
0b5LYP8A+6BnzNBSLyKEheYj84rzdZOnAOK9wRnomGyiBH6AiTCc4jxRkCtU7AFvYxH0p52Wt7Bj
9Kra4N2KaVuUY7cVprAH2zUepC/yRkC7yOAfcEBdIt3xQvSJWiF2IuZZfSpxvScO8ZitQmPMxExQ
wND6kI2zptW30uWMtqjzMCYkFpAnF+5VqoXJREojEFC2DGHLiTRtxDN0QgPFEV+bK/Jz8gforl0S
OZwQjtqUCcZVTZdclHpmY5I9DZp8QmTp8lTNBViHXb8tZN8hp1+b/k+Y/wBsrmv9a5+0fkzJvkbd
fwj1LctUwXOwrQLcWRPLyFoHIBEznKVw4yT24m8N5wRuiflDKES6/wC4uSjAYaQ5KOmJNoYSuBl5
flRcYyNAjblaMiM4VCEIiRlsARibgidkqFAebFzk64JCSr6FjUb1wgDsXxYCfatNu1GIONES8oCX
ijEsCvLFsfpHFRgb7WY4RiGKjO1IzlH2bhcFSuC9Hl4nAW1OcvM5q4fCRIt9aI4IDf41CMZXL9w+
OLU7qKMPJjYBqZTqpxvRM9HtWw7qV3yGtRLOTX1IS1Eyl+7iHkhbINuZDtOiNs3o6hiBVaxIafee
i4JiXYX+eLrY6Uzy1fQtMcStUssAhA4CpQfBS09ig+Qqt+wL6kA6YYoSnHVHDCiAFEXzRJoBmnOH
sx2o3Lp7AjKRaOAG5DVwgeEJ40amofYgSHOIerbytINMyc15VrxFVNBV8yQpMAJTnEO1WBKjAe0Q
EOUjzU3iBwajQIQmBcOZlVkLcaAeEBC1zkDw0FzbvBWkRPMcuaecBxQ/SGfavNslifbic94Rlb+P
YGVTT61oufCuGjSw9aYEnNzv6SMdyJs0kMYbU12LSwdRDUFScypGMjEZbUfJmJDZn2qMOctzhKIY
3BxAnuQ0cxHUR4SWPe6ZxLOJVjn4RJ8txOO7D7UNFwRnnblSQVyUrgtk1jNx6lGF0i5bl4ZduBor
965GN3mnaIlvXHbe5DxRJ+pa7UI2bwzYLTzNsCJoL1sN61G3Ofm8pM8Mti1gCUZjijtG1NH7uXhK
1Ri5jn1rn4mpAaIUxy0WtjikcVpjFogsGQ1gE+tNpY7QtUS8M1qtSpsKaQBbYjI4bVXPArluT5WO
qEiPPvEMZSOQ3BCG3Yu3AIosKoFb1VaSEYkNI4FFxw5FCrIAgSlk6uXCK3PDEZb1xODmD0HsUu1R
G09L5IX/AGSHKJjJrb0G5QjE6bl0MCcnQHMarl0jimJNXchCJM7U6wkfqPWkL8DPUGjsB2oaiRaJ
oY7FrsXpSgKmBD+uJRjc5W2ScZxg6azMTli2BHcq5ZKmZ6CBRPI9/Q8xqI8PaoXI8M6PvQu3CQYB
i2bIkE6YikUBMcRyXCKxLTiiDUD6EZHYhMx1RzRnbkYmOICMpSMtroXhAytvVcMQKYgdEjtp6+iZ
O11qJcxLadxWqOyoO3qMgo28qIyhcAtkuQcQhYjgI6QrWmINycRKU83Knf0gXbXEJAVO49TfkFMH
MIjJEPRHp07Og3bxaI9ZRNr4VvJqyPeq3Z/rFD4hlHMSqEdNJDEKqYrZ0MUScT1K0REcMytIFBim
iFphjmVpdwcjUDp0RqTgvrRjHAVkUIu1sltIQhqYlRuSGqRPCoThgQPqQhBpTjFpy2VwWkYlCMzr
l9ARgI6pE7GACY7WftCNyF027hpOLGrIRclsygiJz1ay0QclIRGFG7E8oNDaVpk0hsK4bcInaAuO
T/mr4dthtkWXlXLYNskPKBfDsUCDwCup1LmoXDEQABBpQKOkxM8hKJ+hDzQBIUoAFY5ce3IGR3OE
SMQoi4AdNQWTzGuUsIoXbRIj7UTVio1RmKahVYOuOEZNg4BTSswI/RCMfJiAdjgoxECH9py6IJkS
cC7N6kb1n4gHiDVAREZ6LsfZxJROsCQ2kryZERuZElh61IB3cVBBPcVK5zU525FtGuMjqHaKBaRd
E2GIJDetabswBkAW9a8zzA+TbclCxflbiBQ3wHk3YpWjahcmTw3wWLb4spidkXGwmOEd4qjG7aFw
M4No/W6j5lkmMg72zqI7QhKVmYtEtrDEjtiiYwnK2CxmwAHcarhMrmZ0B27VGXmPqwjEPLvC83zY
iOBBx9SJt3ARHHJvWtNu7GUtgIdaNcdXuuH9SqW7fmuvU3ehfpr8g07A6AxcqMdgHoCduCfpMAHE
qIxkGfB0x6NL9ipimbobYn6B006T1t/pd6Pyff1eb/k+Y/2yua/1rn7R6d3ztgmNQuO2CtMbUYxO
LUT2SbROJii5lKZxnKpRNgRuPnPJGUeYMJyLtE0UvO829P2TFyFq0CQOAlipzvXIxmMLYC+Ny0yd
oFF507Om321WozMdoIQMpGETgZBgnhdiR2hUnE9hCcV9JpuRExskHRjbhGAOIAZPOB2s5ZCGgQ04
ECq8y9KV4igEzT6FP8LONoTxcE+paPxE5SFdLtH1KJsShy0I01RLk/QjcuXbnMluEQLB99V4REZO
xkAowEJ35NxaoEAHtClK9ZhatxrxAgnsXDy05S3YIR5qJskhwBxfUjojclCOM9PCEJm/CuQLn1J7
cwe2i0RuwMtgI+bjGVQQxC0EcQoN4RPt5dqJJwH0oyOJ6K1RiA+kY70RtQiM0+SBRAYnLa29OqrS
7jKO0o3LpqPUNwQlKsj4Y5BGcjwRw3lRtvWbA7gmakcAsae0c15Vo1OMtgTxqSMcStReIFe5RsWo
tGBJ1bSozIfSXZDmCATgTkyE7dRIO+ZK1TpDM/YEAA0RgAtIzyRv8pcNi9mB4JdsV5X9QtmzI0F2
NbcvyLXFgTUXIYH1JpjzuW27E9qVc4GhCM5UiMUanRIVyKAt3AJQwjmoC9wxmGmcnQnB5WpVA3J4
1ThATALYOgGMDiSMSo3OR5ohy+nUQB9a/wC55YXoikpaXcf4V8S1c5W4dg1R7U/4wEOzaSD9KgOU
syMbbcenV9Shf56w8o5Cg74yRv8ALQ0+dUnJasXV3lbgeOMSpctI6rcnMD7pClytw/EteF9iMR7V
Y7pbEbcw+RUrvLAkCsgjEhiMR1I3GcA1GDrTK5btOPCCykYTjMk4gjpqqYdBtvRCzMvCPhLIXDFr
Vqplv2KuIqg2GSLCgxK3hVwCwTxQMfWmIAI2IwlAHesCInBYsy8qfEPZJyXmMHxfN05VaBii21RG
wdSodiocwZSANTAMzq2QOGDU3IThIGOKtWLUxM2n1mJdicuh3ps6a9BtXA8T9C0SLMeCW0LzbXBL
2gMCgSIkGhpVCcBwSqFGUQ5xQJocwsU/Q0g4TBGQYyNACgG4QXLBkbkgNbUBU5mhu4IxFdRqSjrk
GakURqYHJCOjEYomAcy8TqVshxKrLTACI2dERtkmxJCuS9svpCjcbhPCSNqOk0o/b1SpXZ+GJREc
RknmWQt34ynCNIyizjtBXk2IGFuXilLEjY3UnECIajyKMZsSfCyc/wDoonaid/Q5REQiTgFIu9uJ
aAyZeXOg2hfDi8TmafWgdUOx/wC5RlMHyzSRGDFfUnT9FOo8isWiPpTYBaAAwGKMLddslsGck0cO
nzJeKWCb2jQBGMiJSlijciCZ5Amjo29JcmmQ7VGYvC5KApAgtTep2rgkIvplAEggrzIyOnYce8oi
OJxkjM4DNTPcjamWE6d4XlXSRI+Ce3tTAawNh/Ko25tGc+8gITkZEjCqmSaXBwHJ0YXnEhtUblki
dwYxJb61pkRF/ZjxFGNuINwCpxIda9TPgCUARIaS0o7QtdmEjFgZBjRRjywaLAyl2qXnEDTSEsC6
iDUkq3GJrCP0oC9GQkKa4Z+tfBjKUtszh3IzuS1SOauHJ1qlhEIZ0651tpzdC/yFwRBLyGxA6Xk3
FsQmwEhmAncuuK5IttJWNVXpotyAPC2BjQoxFzQ5+8IOr6CjO1fhJ8JCRBH0FCbCZPilGUan/EAh
O5YEjgZQYlt4gVqlybxjhchE25d4qgJctqzM7Lm5E/nOjO5blIE6dRk0x/gAATiV24IDGAECPWQ6
Dcy4kXDRJujdQqRHNxnHD4xMZx9ajbtc8OYjHG2ZGB7mKMbXNxL/ALrW8h2EoytTo3huyE3O5DSJ
G4DxRnpFvuRErflyHhjGGqMv8VVGN7lhbJ9tiR6gUTd5bUBhMCUQfWCgJWYkSq8ZYdqc2ZbCxCac
JgkOGY07inaYjhqMafQUwmX/AESvvo/T+RP58PWnjegf8QTQuRkdgkCmcP2/IN3o6/Iq+llvoFrP
s19A6EB39QSRBiLPO2w8cAJEfWjEjTONJDayAwTgrTPHamNdhR2IlPmVUJvlz+gp1m6m/q0XN/yf
Mf7ZXNf61z9o+gr6J/nHVK3Ey2stJAZPOBkMdJNPUm0aWwbJAXb05RHhiwAQNu4wjhBmj9CE5TiI
j93beIKeLi3lCEq/SjO5KcYezGJEij5uoEeGIiTIrXeshsgxBKFydqPEaQieL1IA2p6zhHNeXfBt
HGtUBG5rJyiCU8LsaYglimF2BOwEFcJB9LxxEu0OtJiNOxkZ/h4GW0h0YsbcZYxgdIKAtW9ExUTP
EfpUrl/mLkycg0R9C8vlrkLcXc3CDrUZnmZ8yY+wZGI+tSAui3H2Y63b6FGFi3O8fbldbT3VClPm
QIkeG1CGp+8OotyflA43LriKlG5alOcc7QePrUTzMDZE8HLn1BHRrMY+KenhCH/cRL4AYrUL0G3k
BPbnGfYQUz1+Y42v3mPYE5oAhDF8U2QThUxRtijVkTn0EDBAbOhgSBEAy71RM9M952I3Ln3jP+iE
LxHAKwj9pWiA1XDmvJgdUva3LUayKIoWwJxUhOjZb1KXh1UiEbjVBZGMhQ4qVqBeIr2PkjG2HIDl
NfEiDQxFAN6fkDIw2OzIQjGRiMGESnNkn/8ATf6lx8qHzJhILj5Uesj7Fpu8oSDiNQI+kIz5WN/l
LhyjplbPbElGHM2I3oGmqPASP0S4Qv8AKRvctexMWcdzFGHOCQIwuRiWl3L4ULkz2aR9KnzBGkSw
jsZUXlX4m7yhoYHGP6JQ5nkZ64yGpmy37EYyDSGIKeKDoi7MuG0xb6k0ZV2GiPnWYzJDORVu1E2Z
SsF8BUdyP4Lm3iS+nUY03onmrEpgYT0uO3hXkc1akG8Mo/3qUYc0RGZaIlT1KULUnvYEgg9rp54u
GKtczGkZ8M0QTwTaUTvRu2ZAidTA0PchCQI1yaQK12OC63rRhfiRsOR6wEiTHenGB6SU5zUfPiJw
UY2YiMNkaLVNgTm1UxnXIEJwRISxZUwRH1okim3oYhwnitTLhDzARjKhHQbUy8WYOtVyOuB2I3LJ
4DkcQmFao7upKJzCnHG5E8IQtzxkhbt3Tasitwgmu5G7yd2XmRDmEqiSMZUIoRv6zoOKxNCpPgiN
hVfFDApti2qoIOw9QiNJHAoTveEVLlG9dIEDSAA2KVyUnbIiqnMwAhGkRuU7cgNBNJIyB1QGGaYR
Z808XMcijsicU4AHZ0wHaVg6cAkEepGEQ0Hcvg6Yd56sp5YqWjxPRedfGgRDCL4q1CZ+EKzG4Lyv
w8AGbUBxetTsO8YsYnccOn4Ycyoh5lsiWzamkBGqnEsRFmIzdajku3oIGKp3lXDHGQ0jvTvptx8U
zgEIctATasr0vsWPrVAjKfhzTRkJaQH3dFc+inf06LZwxZEyJLKoYJzQes+pG3ajpfGRxKEp+pMO
glE+xHHtTywCN0AjIHYFMyJIapOC0gupwIAiYsJ/nJyQBtV65ZrbJoRgWDOtEe8qiAegyyTnxGqu
CXstIFRuzBiRWMgWIZazIyDOScVPmp43KQGyIWxaZAEbChbnZcZSP5U4g53klDSwGwUU5Q8c2Ber
NsQnGL7pYlC4LZADag4TRNA0THJThImGsAhsAUddzW1I5AI3ploxFN5UrsjWRw2DLo3rVGOmJzmd
KFosZYyIwdE4OnPWdGI8I6K9FOtRu8rjugbguG6U5l2IAy4TiEJwLg9TX5cdfvaQ/rWudsGeZw+p
A+WISjhKHCfWERoOrHW51J7ly4SAwIYfYtAvMHcSMeL1uuHmdUcgTKLeolAQ5jU3/wByVR3okTlM
ZASjI9+pAiBkfajKMW7mIRErQhIYA25cX0oQu8sLcpUebxj9IUhzHLgke1bAlE95UZ3eTjC3PwzA
jL1gImNg3APEYRIA+kITiYxJyiZCfqBRjG5TOMpmP7SeHM8WDa4lHy7sg+fDJA+cQRQGMQPXVGIv
RIOEyDqCGjmAYDGOqcX+lExvO/hAuSp60PikkYtMfQ6PlOY//cMSUdQlK6D4NAMP1gh5tgW4nGQi
T9qP/biYHtCMg6E524VLeWCdY7XQh5DHF5SAHrRe1PhxZj9qEpQuRjLAsD9q0iNwy2af71W5pOYI
LqlxnzlQesoA3oV2F/qTxuQP+IJhciTs1BYj1qnyavp6KUnCNcF5jeLq16SidnV4SRIVBGIR1l5b
dq0TOi57MslouYZHJk4TSqETGo+pMgn2/NG7q069Oii5v+T5j/bK5r/WuftH5dv+b+IA9q1izDVt
aq4rYB96NCiZW/MkfamXKMuXmbL0IAf60WuTMzjIryRO3GD1mBxlAWLvDHECRBl3rWZShbj7EJky
PrRGm4IDwihPeSjO9Kcrj0tCLjvIQblhF8ZGMmC8q7CNyeZgWA7ypEcudEfFPVRajCYG1qIGRlbf
ASia9i0+aBLYaFN58H7QnhOMuwhYj1+k0zAkNhqjGEIxBxADLzZWISntkH+tNKBjE4xgdIPqQ8q3
5cxhMFyPWpGV25KUqklvsCHl8z5VsUEbcWPfVNyl4GRLm7ORB7gHWs8zK6QKRhJg+/UgZGoPF4TT
cykPwxkB4SYGvbVC3c5eIkA85NIRG4YqIvcrKE5YRf8AzMpeZCcBHGQ4o+sIQOuEpVAlE/YgfxEW
OeScX4NvkAtUJxkNoIKox+TkzmA2T1Ur0piIlKru42KsmiMghuTdDktvRkC7mgzQGbVGxOhtzTTw
FWREA2r7EWqRiVruY+xbz7SvMkNNqOWcitFsVFIgLyrVbkn1z2LU1BXt3lUxwWqE+LchqPEduW9G
3K8JS2s7d4WpxctyyC4Lcgc3V+4BwAgMdwV65pGUXKeFqIOLtgNpQtwwGfQAFpGJXEAdxC4rMD2x
Cry1s/4QnPLQ9TI2rHLCAjjIEl1ouW5RmfCQaLzBEytRLTOYVu7y10XLEwTp9sEbV5dscRpF0Y3R
q00lCVQoyssLMcbJwIKN3k5eXzMQ87RxH5QjavDTMfSnimNFERLEmii8uJqlYv0MT3I+dy8ZbSzG
m8KX4W1GzbcsZyx7GWuzcaQqBEkOV5NzVIAgk+LudG1zFkbRMOKhW7co8cRWRWq3cEhnAl2/Igwa
5EuHTHEKULgBORzRjIcD0PW0nLoJRPRokaZIW59yAyCEoRBGYKYvb3hAXJa4bRitVqQK4qpxRCUa
l8ChMAwJyNQu5PE8WCF6Iyr0iMjTAKVwcWoNGIq5KFy9bkM9JGSL4Sw6mNF5VwsLmBGRKN2VzzJ4
RoynbcCRLgKV27IRhEOScFdnHCUyR2E9L9JcOtM9Mbch4cStAoJGjqdo0IkUYZSClHYgR3oyAqtX
UkHYM5PYhbvjTG2ToIwPahbtiNcwvhy0k4rROoK02aDB0IgvJaYggjJE3HMHd41TRjIjbghIUEg4
foh2dBR7VTqEqXZ9aAuFpE1CeOBwULki7UkM2K1efHUz+X7XqU77MJUiNw6jynJ9ronEJkAM02zp
dQtuxJf1IWdXw3dgovSUw8kwII3KtTsRGBkQAgLT12YKImXmBUreq9LDFG9enHSXeBPEUfLB0nAb
kXi5GTrTbtPKQYsiZ1JqOoLcM0IhaTLTbj4pFcLShvUrVqIi7iiM7wdvWjc5fhasgVHl48xOlJRJ
OkoRB1TNNSqhEYksha0nUo2z4sZK5IYyAiEIbIsrcRhKkuwLVLINGOZUrvNFhMtbtjFAkM4duh1I
vgKla5B7kqgHILzbhqFdu3D95WTByIigULtuJjEeIkMSyfHIKIk85yDiDsB2rVM0HhiMB0CEA8ig
SBO9nI4DsQuXX0nAtRabMTIZyyWqsiCHZC4TQhwM18OMYDbMp5StzGYBYoSie3pLKvXonOSIYFOC
QuIrFNmMD0C1I8ElTA+mYgEb6rTpGk4hqJ42YA9iM/L0mXi0kxB7ggY2/LkMJQoU8jMyPtOPyLSL
sgBhwh1qF98m0sPokiLV4Y0nqlEpxfMhnEXC5/WCIOt/YYwI70BO12yERJv1SiDZ82HvaJR+pGZt
22BY2zq1qMI2TrlkZAD1owvRlanHEBpD6EJGNwQOE24frQPmgk7A5+pMbluQ2TYftJh5PZwpxatn
sQPlCJGcXifoRBtAviSShCRmYDCBNEJ2XtSGY4vrRMpyJlizBeXG60XoTHi9boGF2oyILHtqnF4R
HuxMgB6kR5muR8NwTII7lTmNX5r17kPIlIMOLzSCO5EcxOQi1DaDhEi1GYylKkvU6EtWu49bRi30
qINgW9XtSLhGJtecB7Vp1PTajAQxjcLS9S4rVwS2NT1qNy7aMbc8JAiR9SEYCU5GgDMUY3dVqWyQ
/I6a2ZTbHSHWoXYjcSx9RVJ7nYt61rM46dr0TQuRMtn/ALrjkI9pZcEhLsLqpAO89bBP1h0GRDtg
qvCGyoTRiZ96+JYkN4qtNu5OER7NWX3wO6UVXy5dhIK47UgPzSCmOqPaPyKlyPeW+tPEg9hTqW/p
foot+aJagTYHIryeZDw9mWY7Fqiddo4FUxRrQ5IasFuyQHzVj1MPQ83/ACnMU/8A0yua/wBa5+0f
Tb/QN0v8/sajYU4twfawTXbUZDZgqcvGnahcOsSGBEsOxHTKYnLGZOqX0o2xfIganhGo96j5V6MI
R9kAjV2kFSuiQuyPhgJGMR9K0+YQdonQDcFGFmN24PbnMA13KU+YJjEeCAg5l6nUdPLAE01SEmHb
gjbv2/MmM7NQ/ejdu8ubdkFtRNfUhG3G5MyyjFEyu6SMYkVCeF6Lb6fWtMb8DLYJBOZAPvCoX7PS
NOMZdoBWmdmBjs0hEeREOGJDgt2oCNsjTWPESx7CpEzuEyLkkj8iJ/EyhFmEYxYAetRHL8yI2441
kJS/SYoCzzRuOXmTNu6OoFTlGQuv4YkiRH1KUxY862BhMCMnzZigZ8ob0WeYhGUdPfJ0Dc5UmBd4
weU4ttCEjZnG2TpctqcY8IKMgZ6BQz0lg6+/FMaGj7diBHMW2OHEEDG7Ag4HUE7j0QEPuIljtK8v
liQ/imzepOal8SticGMjuxRYYJ93QXQuA8UmocgjM4nE5LhDMhI1GYR/DxEJDMrS/F7UlrmHA8Md
pOZRuTrKVT2IW7YFMdyLBzEVKMpCjuTtJRGS1Y7FxliFKVoaZEU2oEgsc1NgTCJodhV2NyIHlZhS
maGciUIxrrk/95Qtwq3ilv6Ng2qi3rcqV6OIgELVcui3LEsQxQhY49ECdQw1DBCUoEicWuiWBO5G
/wAkAY4ytGtFK5yMfI5q14+XJfXvi6MbwOuJaQliqBlGX9OfzhiRgInHVkytXQXuAccnd5Yn6V5d
ykx9KeKriFGM7hMI4B0IO8ZZKqpihYvE6jQyAcDtXnWwOKrhEA6p+6PtQD1uFnV2xKEbgtnxEB1I
wth4y1FvdXm8jzGi4Bx2ziJdyaEzOIrQuP8AmQHM8uCwrQgprsJ2TvDj6FoN0RIDgsUREuBgeqT0
aR3ox6HzUbkJMY5IE0K8ww8y1mtQtiYOMcwvM5O5pkMbclpk9q4M8itPNCmU45oWP6fbc5znQLVe
lbnnpBL/AEoW5OJxxBTBGlckbd3MURj7Jw6QJSbTWPao35yEwQ0nRu2gwNWHVheGMSo27UXLPIo3
HLjAjF0/9Q5m4YyrC3GRYDe6EhM3LMy0ZHEbj0P1DIVbBedPl5SiQ4kQWRu3aTf4cd685mmcSEDv
VM+himFB0GZqBkpThcNsjwwxCnbvkVo4xqnc1wLowmWgCwk7BAW7gnA4VdkRMHWBwowni+K1iuku
jEbGKFl3tyLEHBajajqFXboAeqjPOJ+goMu4ojf0P0shHbRAiJnGQcEB0I3aTqWzGxHm+e1ETJ0R
BagzR5vlDJojjhI6qbQer2Ltr0AbFqlKMRmSUJAvE1BHRK7dLRGG87FK7cq+AyAQuXPDEjh2oxai
ZsK0WoHSBkoWJFxE1CPkBvLo+1C41TiE56XOCJ9kLiiCBgtIAD1IFGCDhgMStMKb805qek/QjOXi
l9SEIVnLBQ0wlIYlg6JvgxJPDE4qVy1y85RGBAcFSt83GUZSLsRUKcLQLNUlSuyy+1GXqQgM0Juz
EMUDObrTGpOJ3Kzb96YdC1C0ZkipUrUGiTiHrFE3QbkxgXUrlyGm5kZF/UqF9vQ5NckXBMSvMEJS
1YBkJXXt2xVlE2Ji2Ri+YWu4TcuzzOHcFO/OkLYcqV2XtYbh0VXnEcc6RO5C8eLOmS8sRJhmCAR9
KAiBGOwBkYWgblm4XcVCMDWcRV8k5iCd60XLYrgQGIRtgvblWJ6JXDkERItAYRHXp0FEJz1YyGIK
hJ8vlWKej7V8S3GfaAVpNmDdgTeVw46XOn1OhO1E2ZDOEvyuifMuPLEuC/0LSLsg2B0h+9CQvRYU
0iJiD+rJSMLsZP4SZSiYqnMPL9On0hRbzNPtHhme5HVblKHsk2694iUTK3AEewYzcoCVlpmjOR9Y
WnmLejZKJ1Im3C5MDExGH0rULmkbJUK0i7EE7XH1p/Mi23UFwyB7D0sQ/amiBHsotUoRMtpFVpkA
YmjFPG2OzJDVbiCKgxoR6kdUBM7ZOSjK29onEQNCmlbckVlmvK8wmzjoIH1prYMJZF3URdkAIBho
DH1lRnbkZ6cY3C4+pGZn5T5WqIykPxESKGUmIREb2h8INqZTF0TnMnhnE0+tECY1ZP4vWnJvee+F
dKfTESGYcy9SjKF2Vy4fFAxwQl5Ig+Jd/oUSJxuiWMYio9SiBZMZSYPOgUY3oxINXgXQjatykZUG
rhC0cxb0E5guoxtgzkcBh9aMbtucD3FaZRnucJrMuEezE1Wm4CDnqFfpUJQlKNRqiCdJHYgTESBD
sQ6rZhX81vqVIygfzZfldfDv3IdrH8i4ebiD+eCPypgbN8ZNIfbpXxOUl2wdvodMfOt7np9KaV/9
aKeNyEu9l4X7FWBHaEzFcRZMKR2phl0aLlRlt7kR47JxB/8AWK82yXhmMx2qqwURKjIHaPllfQV6
KfIeb/k+Y/2yua/1rn7R+Ut/YtiHGwp424gnFgFqu2YzO9NG0Le+3wn1hEaJSepeRNVGRuzEY+GA
AYKUbF3TKeM5jVLuZlEm+b0o4RlQHvqpxHNCxA4W4En8ijHltRbxXL0niUZXZzuXH4IWg8O+ihK5
CPL2/al45HuRMOUlcILebIGMUbfMWvMuAO1mofZVCXM2TZhIsKvL1I6YXNEcbhHCPpQjG4Zk+7El
CcuYgAcia+pG7C7HQMZEsB609m5G4BnEutMZxkRiAQSmfu9E4ABOJZeZ5UNfvaQ/rRnLl4EkuaUf
sRl5LasQCQD3OpSGuOsMQJZbKomF6cIkAaWjQbiyaxzctDMBNzp3hiowtc5K9bAYCUjBjtpircRe
N6IBHw2iXycyZ1bBEZ0qbY1SJ/OVvXy0YEgmUi8yf8McFDzeTNucgSTMkAt7v96Hm8rdhcZ5CTRH
cZYoExuCTPKOnwje6BF01Dnhlw/pUojC1zEZTuD2S7A7VwyiS7iJOKDNbfGLuhEYqUpViMAmAaeQ
ZCRFN/SxDjYVKrCjDsFUZQFJ0HYESQwOBURGPhpMtvoowts+kF/tQ08UXNPe3rzJgiPswGa8mFC3
EcoheXYjT2rhWkVfEpk2eSb1ttUpREYwhQzlUBG7buRvGNdDEO3evKlwRd5QFA6NjlxGyGpJn+sq
/wAxclrnIExf6ygFbhbDznESnLtqgAG+tOqpgt6c0AzRt8uNchjI4BPGcoQOb6Ir4vMOTiA8vrVa
y2yf7E8Y2jDYKE+teXfkLE44iRomsGfMyzjZiZfTgo8xyliPJSjXzJyecv8ABFwO9XLfOTlb/qUi
4vy8MzkGFApDnYnl7EDWVDKf6K18vehc5K4GuxnSbZfSrPL8jGXlVlc04A5KVu/E6Yx1cVCUbN0E
AUBKcJjkgbktLihWmIePvISBorlyMdcidUULdz8TYuRDSjAgwJ3OtPLwMbR9qXikokXNF6FREGve
tflAyJ4pPj9Cjdt0ykDluK87lZGxPGhIj3ELTOcZAe0TFSlzMxduSDCIqAhejaGiGIPDqHag3Lys
QOZqD2KYtisA5RBoRj0m5AAQGZWiVCE4HAcx1MEIDNA7l+FvwiImgK8/kyxxYYLy+YBtXRhPIoR5
mPZcCAtHVA7E/Ly8qQzeqgecukxBqTgoXLUxISyCEhgegF8EbkRxRTdAIxUbZpJ+tMY0ojK3MQkK
S1KNt9ZBeRUZwLhlHl3Bu6tWnMDrOMUbRukWzQgMKdqJasc8UEDvUT0iMpASOAKMYyAlsRjWtC6N
y2YgbCviz7QF5dmZ8tACsftRJBfJCRDGRqtYzWiPDuCIlTtUboixjgB0wlCpkVKM+KQNQh5IkxoR
LegSpDbXpp0RG9W4b6rRGTkZDojy5nGN23RiWcK5ZjMTu3BpEYl2fM9R8+hs9iMz4QjLLElXI2Qb
lp3wcry4y1R2Gqc6fUgLppHABYoS5eEZRkcSy+6if/XavugBhgjOVuIiMS1F5shxSLoaeXDGuGKO
nlhtNEX5fAbEfgVYEUKP/b5bCtJ5cClU3lM2NFW3hXvQeGp8VECxuwKc2Gq2al8DsxTeRk+aBNmk
lrlbcA5KtvJeZOGo7Ef+2Livco67EtILttUI2v6ZdhGAALRIiBt8Ktj8BdtPWM5hjPsojHyZAkvI
5MELfkyD4Havupb1GQtGlRvWnyZQnEvSo70DKEhE5j7FphYnKZ7ENNmQMR4jiFW3I0ocyjKVuWs7
KLggTtMqoynak+DjBAW7WgZuHJVbU9WYotTGMBgFqvajDYEI+XIAeHCq+6nq2UURG3L87chG3E6Y
hnKnykIM+MyckS7binBrsKfV/haqtxDkxDEBaWJB9lCMQz7GQiXlIoADUTk6MovrnUh6BARJMs8G
Qq8tygRxT2AhDOR9kELRb8UsRitnodJzRmDQ9LnoEggoP8z4ppxEhsIdMLUGz4QiRbEScdJMX9Sb
y9J2xJ/KoxlcuER8OFPoQnG6S2Uogj6GRlauQlqykDHT2MUdPMEyGHHIBDSJm4PEZSBgUPOhGMDj
OI1N6iiI2hejlMAxdSM7Qho8QMgJdwKEjC4I+8wb60CZkA4ODVNC4Cdhp9a4pxj2kBOC+8devVxV
MV9ioAOwJpgSGwrghGJ2gVQ8yIJGBwK1CAJ2mv1oEx0yGEo0K1SjrltmdSJhGIbGij5RMSTQjahG
6YyEcwOL8q1RaWYD1Xl3okgezPFACWiR9mVPV0tIONhWkNIe7LBtxT8tenaPuSkdP+ErRfqPz4gg
96a5y1mW06WP0Li5cx325n6iqT5iB3aV/wDk3z2gFcNy9PtYLhh3mq+zo2LFGMqhPA8JxAXCHO7a
tMgxXDioieLKnzO6Hpub/k+Y/wBsrmf9a5+0fQt/aaodNiNmSJhbjEnFgAvMuWITntkHWm5aGnZH
hH0LTbteUWbVAtL1rggfMxFyR1F++imbtyd2c8yWA7gpk8xKAI0xja4Q29QnG6LUYPxQrMvtyWqz
dcmTm/OVW7Kozt3ZX5ltMtWiMf8AC6jclcM2jS3Yw1b3ULlwCEY4xtgSme3FCR5eNmA8V2YeTfov
9iMrfKFwWN6YMYtt00Vw/hTduQwnGlvvVzzrBnO2H+E8o12nJTF+yYyjHU0Hnj3UR8+BtUeIHET6
l8bVZDEjVVwP0XTynK1Guk3A2pti1C9ojkZgxdtjomN+IAzlwj/mRFq/CWnEvRGNm/CZjixBRFq9
CZjjpkCiLVyM2x0yBZEwkJNixBVC/Z0VD9qBnATbBwCyErtmE5RwMogo3rtiAIFTgG3gI+TZlGYk
dUtXCQ+xOZGJzg6eNyYhixx9acX5fpDxd6J83h94kl+5SnAgyjhqq+SlI2xIZCWI7AFqnYdzSRDf
8oqoyucuYyOZpFR12ZxJqXp6tquTMZAECMZNtChb16dLl5Bge8q5pkAIhwTRt9VfeQd6bShDUSZR
FfsRnLwjBEnxGgJyG5eVDM1K0xqcyic05T5pxjuVsYTeWsZvqOPd0GXLyMBOZ4RgR2ISn4RhvVwD
CTB+9RtY6pCI7yogkRAADmmARM+YtndE6j6ouns2L94ZSjbLeuTJ7XIyb8+cYn1VTxsWLf6UyfqC
JJ5aIGNZ0XkxnZhbzMBIP60J6uXmTUCcZP8AQuK/ZiNggSB9K4/6hGHZbj9pXF/Ubk90NMPqUYXp
Xr5NTKcyw9SLWY3IHxOHkOwlAA6uWlhuQnA0NQU0g0x4ZDFeTz0TO2PBeGPejeN+YlrcMaCuxWy+
rhDyOdE7VFAVesxiZG2WMhtzQnIPawEk47imkgHogHRNwvDYarzrMYmObioQ5vlrhjcgK1euxG/z
PxJ4tiSe9XoXLfCRwwZyCpRgayyTTi0hliD3JvJgZbMFwcrafaWWkyt24e66jMXtcgW0RBZBuYGu
YeQMKdiN23V/E2Hd0k3AQJYIjCqMJ1BR904HqG5LLBCDoHPahblLVb31QkYhyKSGKLDzbGzGieyQ
JjG3JSlwxuZAUK8rmoEjKS86xNxkF8OZIGWKbmLTtmKJiTA7wpQjcjUYFSAwJcN0xkMM1GQLv1W2
q5rpA1WgDTF2BQt8nOULtzMEgAbUbo5w3OaNZaxQntxRs3A04HTIbx159iCBJzUJbloYjtUpmukO
ylOMdIFBXBQHmkxxkRRkXOqmG0qM7vhm7wDLRGGhEAiO10PLuayPUo64vmUDHHYjIsSC5CoKGiMn
YjJDgEIbT0tKek4xcZoT1CLe0PCVxAEjMZp0Lsayj4k4qdi0norknUhb8QFFERBMnr/erdmwCZXJ
CNN6+LzM/NkKsxj9K/D3GJFYzFBIHooVXBPlkEfLqRVkJGRjNuKO9Rc6q/dx+1CxYDGXiHbkmNbh
rM9uS5i1eteYSfhyGQKPLm0JTrpnmHz6GbhHiK+HcnDYxzXw+ZmKuAXWmN4Si+JR5W5MERPFpRkL
Pm6iznIBcXLSAwDI6oTi2FMUeMhtoVLscHYlPG5EvgxFUS9StEcTiUIjBASiZXMzkEw6nlRxOJQi
MloB4Y4larIBtS9olmVvlbI8ybMTlRR+DIv4pxDxbtUbU7cbmgMDgabUJ3WEYeCAwCjCIHCK0zKG
s8QrpGSZnWpnbBgpS5mcrd3K2Iu/0LQ8bcciQvNJBJ8MmqewKUzERMhxHcowDY4AIHQC+5EeUIVD
yIxUo2oR8tsxRkJQINy54IkDh2lSvGQEfanIgBP5rtiAfqRsw5WPMWxmY8XezrRetw5eYwjcAb1r
zTC3OIwIAP1I342Ya8i2To3DCIIoSyMo24kSxcLSbECD9ChdtD4cqMwx3JyA+ScB5mgohPmSIjKL
DUgDbjI/nFBrMQRmEBO2xyIovJjbA2GOJQN6I8w1O5D4YpsTeXCMo4Mw9aIv24CI8JigOWtgyGcR
VDXEx7elnr1tQxC0mpFCqEMnMlToZRiNqjAZD5wZU6NVy3GctpAJTGzAjsVImDYNI09boSlcuGYD
A0/ItUZRuxIbTcDfUibN6MHroDt9KlrN43fZ0ngXxLD/AJ+Pr0qM5+XOMsRF9Q7UIxhKUzQAMvLn
CdueLSD/AFKt2IOzNPG5GXYR1adZ+l+gGBFu7CsZZPvWjmbRheFCYikt4KF2YMLMPDktMwLg2SxH
ZJauWl/+nOh7ivLugkD2Z/YgH8uZyl9hW7oYhxsRGI9yVQj5M/Kn7p8PrXxIkDKQrE964vWqEHow
VB01VSmK12TXdReVzA0XhhcwftVTqO35Ju+S7lh6bm/5PmP9srmv9a5+0fTY/Mlf7GVTMGOSMYwi
InEAMCpC3bjDV4mAD9qlO3ZjGcsZAVRveRA3D7RD/WvNny8Zz/OqB3GiFydl9NIxciI/wiiE52yY
xwtAkQHcEJES8uOFoFo/RX6UJGc/Ki7Wo8IrtOKEvNnG0Ha3CmO2WJVL84WsNFvhJf3pVdEQ5mdu
z7kAxP6Rf7FphzcoWvdi7nt4mWn8fIWsg8n/AGkYnn5C3kAZA/WiJ86REeGEJS9T/wDugbnN+Dw2
7cmftky03eYBNqmiBaL7zmtRmCfdGHetUdJMsqU9aEaT2kDBCIhE6fb/APRRBtgCOMmxQlbsaYjE
s5KiTyvBLCIfUVpnyp1HwxDv3oxnykjcGQdvqWqfKmWnEmrfQoTnyx0jCZFe4KUrnLS1kcIIcn/D
kpS8iZOEo46exS0wlUvGRy7VGJ1RG04FCdu7KIHskcSLcxIRPtFypxjdAzc4nsKhOMoEAtIe0TvU
YyjEwlhGJ+1SErQePul1E3bRMpFjspvRtuXi9STpp3K5csXgI+1E1iUNV23bE6PF3+lG7KcTKPi4
qnsQtRuxEvCBgOwL8LIgW4AHSN4eqFuEjDSNRmMmQHNTu80BndmSB2AJ7FmFsfmxAKbpnDAzIBO5
WrXhjckIhvdClORcyNPyL8Ly1KPOZwiF8UG7LOUyR9AXBE2pZSgShy3MHUD93PatY71OyfDPLYcV
O1L2DTs6DauhwV53NcvG9akXhMuRE9mC/EyHmWqCIgal9iPLW7JtUMg5eg2spaoHVNyZPVypctGp
i5jLOqMJ1i+K1RzwTFaJB9hC0mNXfcgIye1N5aIhy6u2jExkZuRuXnTDgYDemcW5M24rVy10ahUx
jiVG5cGq5JgNS8zSBpNJRxBXx7QuD3hwyXBelal7twOPWE8y8f8AqW2kO8FapQhzFv340kO1k9nP
2JflWiUjZkTWijO7cleq+kBgtMABCIoFKrsUAml3FEuCNyaKYhj0OmGPQLUzwnBMQCCn5eWi5m2C
EOYeJGFwIQeM7Z9oiqF7lZie0BNL4V2O3AoW+btRIPtstUIAPgYp7dwxO9a5T1gdQzv3tFyJGmDE
uF8GYJ2Gh6oIoJUdRlKURZcHU9SOxaMOFo9ycq7O0RpJZxg468uxCILOmu+AGhOCYyrbFGwJULko
iL4AbExDg4omIaOIiKBURDOol3iaRCEZYYlkZQlU4R/KgZsCcI4qBnSM80BBi+aEThLFGURQ17FG
UKCjhRxYhwhqxzVCnjMWwzSJDoG2SdBqDR960XQQTg/RWsTiELliWkgvpOCDY7cn6HRV29ck0Y5p
xIDtBVm7cux0wkCf/RUbtqYnbkHjIGhBUIWiJeVFpSG05Jo4qocHBPIaRl0OMVroXyGKNy5CZuyB
0OGAKPMzrXhfbtR8yb3JeyKn1I8xKPkxIDCXiK0Qg7hpSNU9u41o1IIcoQtChxJxJR+lFw4GBXk2
46ZyD6tg3IQg8pSKhbxIxKbEhcUAc6hNK1HF0eBn2FcJlE7QcEdF2QbB0ZC653qEjOEvsWvyxKrU
zUtdjDBRErRD1kfyIvGQOW9HQXKM5TGqW1EQIJ+pCyIUkayKYGqs+bhOTdqEYhgAwAwUTCmsPIBC
3nmclzF2QqDTsRJqSgPX2JkZyLRGKe3b4LdXKEW1XMhkAjG8azia5AomR1SJZ9yiwEBsdygDc0xz
G1aouBvUb8HIiG2rTG2SdgqpQuz8rmRhCYMT9Ke1KIekhi4RvR4LkvFShKflrrbolkbNy15sTR2r
64r40DoHiiF5cp+SCPboPWjG3ehNzi6tWIHUcS1cELdsOT9AT+K5nM5di1A+Xb944lUEj+dIt9SG
i4X2BfFmBKGLrUJCU8HiHWkWzukU3mSA2At9S1apGWZJQOo0TOgbhqM1GQLhUxTTofeCrUbU46XT
HBaoeE4haojtCYBim6fPlXZ6JvmSnpqUXHbjI7Wr9CcwaQwkJSB+tGcJSBNC7S+tSNq4CJZTjh2M
iI3g+REpRbuRB8yd0HhIkDD6VqlAzPtR0GneEDKEZE4xDiQ9aiLtqUNWBBEkNeqD4Ej8jrhuR76f
WnjKJG4hOE/XrXtr0tKo3oiQE90qt2FarEmPuS+woQuA6R7E3buQjI+XPf4fWnBcbegzjmtJDxwM
TUepPa+FM/qf3J5g6cpRrFfEpvC1RkCFWcR3qkgexCcWiCnuXNO3AKhMzucr4VgdskNMQIZ0p61E
x8WZCESXIHUfrn5t5v8Ak+Y/2yua/wBa5+0fRdn/AAAEY4lOJAnYmNCMUbsgCVKEJR1xxMWJCMdQ
JiSC25XJjGMS3qQHv1kib1omIDRIP2IXQOCB8I2qcZARiWqcmR0FwzghPKWkbShEF2zHSUxqqgFs
HCNwxD5yaqk8YxEqiIAqd6IEQ2UAFbFyIlPaylGFsa5eKTYLSIBtrVQmbcdEA2GJ7FIeWOPE5jsW
snRdgcAamuxDyrJlKVIxqf8AlCNvmLU7ciHBkTGuwKMwCZhxp1GveyuXpyEdTlnJ7AFG5djpjHuc
KYEjqlgcohfiYGMRAGZ1nTJhXAAqN2UzOdwEycuRlUq9fMZA3CBGRFCBsWnMrRPuO3qSiM6hfBLc
zysnAzKhZPLxE7dZTOoDtKFq9OML5pwuPpQEy4NIzzB3pruJwiMSoSmNAtl4gY95QkYGbltgHaVK
7rjMAOdJeIOQBVwjYH9fRRG3cDxOIRt8vIy5cnUxyR569OGicDCMYlzUjH1LVqDDEurv4Xiuyjpg
1cc6KHniUhOQBcHMoWy+ggGqcJjRM+Kt34kxIpJswvKtiR1x0sR7SnYuDglkVqs3hDbEp53NZZgA
ETGpjUBTEi+7tVVpEt6IEnAGP5V51ja045F0CAyE2445qL96I1eLLcicUGTYy2IyJqU4LFB4glHR
KIu+6U04Eb8lxYKioWKMJcccnUrs8TV0bbAWo4yRnZ+LZzCe1LRcGNs4LRzEfLu5STgebY2p7E22
wK0S4bmwqUcXClAhmPSDsQMJGJ2hRuWpTlE1FXXHb1AYvFfGs1z0/wB64hKHaH+pNG8NQqAaIScS
lGgAQg2mWTUKPLWOYlZ5YD4lw/s0qjdjehzVu2ePT4o9oWkGox6B1G2prgeJ2ZL4d2VxvDEoxuhi
cWqTuUYeXpGUjj0ymAScmWkR4jmqVlbdwvLZia03IykdMsgtUonSKPuQtEaYjAu7IzNwTYYbF2dA
NwYGm9AXIEW5eGUahaoXSYbsU1svmVpxdaSWONFplIyANCcVTFOfWmgHKicNo39FFKWxGb+OQDdl
ejTAEncHVu0btyFsAaoPID1BfeAdqGi5HfVMbkKYOy4rsT3hfexHegJXAR+bVS0wu6R7UI/lQtwJ
kI4ajgN6ELt/TABtNoafpR0QD+8ay9aAyComTZHBF/aTBAXA7YHNAW4iJ25n1p03RToJzJboqnNT
v6ah1WEaVwCJ0AQBypVGelmGSMpSMQfCjLzmAwYVR5axcLjxTkSwCErfNW5G3WBJIL+oqNqdm2ZW
6TGlyW3iSuX+YtapNQAMAAom7YaZ9tTgISiSO1arhYDLF0AbjGSMI3YGQqzocvy2mYPjniIoAsYR
8MRmdpXwoDzG4phRD1i7kK3G3Fi9dpXm3HM5VDYxQJDRepmgbUomAoTkE8ZCQOxPGIBOwL48RIe8
cR3p+Uu+Zbyt3a+qS0czCXLy2yrB/wBILVEiQOBBcKi84xiZRx0lpepEwt8O+qAt+W5DnU8fqdSt
XITjOBYyjxR9aP4W/GT4xzQldsi5EbKBDzrUoQGQTa9P6QIRvxmZhqAbVKci0SaR9AYHEYLSUxqn
tlcdCnEgdxxT4dDInNPAsVpu8EtuSeNRtHQKUUREiXZ8gf0D/Iq/Jd3UouIA9q0yswI7Ag0CCMDq
NOx0JxnONwYSLSHqQnrEtsW0g+orVZkDDO2JEfWuIzhbyZplGN+BEfZnKBr6kRG3Bxi5IfsdESBj
IYgcSZ5b+E0TxmG30TaovkHCfpp0MahaTESHuyw7jknsHRL3JGn+GS0zB0j2ZeE9iAn8Oeb4Jwxi
cxVYepZo6SQ+RrHvCMgPKl70PB3jJORqt+/CoWi/YEz72J+lfB5aAORZNKRiDkKBVJJQiInT7yDs
+ZZyuJ5fQmiG6nZ8y7uivo36eb/k+Y/2yua/1rn7R+RbP7cGWmjM6lcAeWERtK/D/wBTmLQlUTAJ
07iyucv/AE61O4JU86Z0x7QMfqX4i2BWkoiRDhcfLU2xk/2I2NE4SuRNJMpW4uBA6Q9TRMKneqEx
OwLyvYJ45MgBQRDJySIL4ZoNzIzlgAiIg0zUjngqpz3J8sIjaVJzwjE5dy1kPcl4Y7BvUZEvOWAR
as5YprjgluEZIWwCGxEgxRJIEfpRlIvHYFDmNL3741SmcQHoB3KVu4NUTijy10ky1GMDlQrQA+18
kIQYDID6yhfv+AeEHM7URcqJBm3Kdy1ZEteVziAH5r4KNu3EQhENpjgnZlpNRkVxYbU4qN3Q7V2r
UL3k8zHY9VL/APiErtwA8MbIjpkPzi4KHMeWLNnXqiNkRgFIE+LB0b0yZXMKhmAQsWAPMlUyOQUr
nM8xKAFZNxBTtCIJjWFwDTqG8K9CEdUhjEnSwyNUTIASiWLVHSYyDxNCEb/Lgz5eR47exQhy9yUb
IjgCRXerN2cjOczIknJpGLJpxBaofaFbjP7wigGxERBMRiNicVTFMS6jdsgGcMiMQvxMIGFy2eII
TcCQ8QO1GUWI2hCbPAnJCQ8EwCEGwlUFM+CAGe1AwtgRlWUk7cWxSkzRG1M3AjHUqByqU7E5qelw
WK4q78CtF5px2HFauXlpOcStMw29b+ggFnQtXbRgCWN4VHegQ04SwOIKN2x8O7uXk85AmIoLiFfN
sHLMLz7F2Nm57pLVQjeHZcimkPOt5NipXWECagIg5dIA2oQyiqritxPcE8rIB3UVBKJ3FG1ZBlIZ
EstZkLBy2/QpHlf6iLYesdUsfUVK/wAzzluc7xaUYzlrl3ME3ifEoSGa2dDSNUwzQjkMegGJAI2p
tchLOUQ6rIy2auhymiKb0RAgFjhtVyRg4q5ILetCRttLAkGiEhUHBaB7Waua+IAFhtTjhOxGUTqb
FOF8SAkvIbgGG5SaTg4BO3qT+pEBMMVtKfqFHerFvMvIpmLpohpSoHRuGIlIBmXFZG9cVoVxoCnE
Ih9sUWjbBO4BHhhTcFoja4YlnDBQtcuX11kBluVR8SVZH7OnFMT0UxTGhVcgiD2oEHBCIVMUxNUQ
MUYsZHBRHr70y+vqi3DxHFCIyRYPGH0lCzMcJ8ICZx2I2pNGdyLat+xGUiBEZq7ct+AlonaAnNFE
EeMsOwKMLdmMjejxagMhkgIiJBNBsUTKFQKsWBUmMwTgXwWm1zEwc3wPqRj+JkIx8IJITi6SMDVc
I1F8UDctAytYEBmQlKLtsomMDqIruCFoA1xJwQgLp30opNdhFsiW+tRhYkL0iaiJda7xx8MdgRjI
CUTiCHCM+WuSsS2RrHviv+4s+ZH/AKlqvrijO3plcliMx2gqsQ+1Ey8RyC0CJgJGpAxfeU8rbD3h
Q+sLXZ5gkO2i6NQ9a137OuGL2jqHqKlJhG4B4JjSUTAaYjAdNemir0AqnQwLLiFdqcYbVQkLxJ5A
ELQS0t+C1CoOxMQvzd6rjuVE9suMwc1qhSQxj8qf5wxKeUYk7SFxWoPtZvqR8sytiWIiafSj5V2c
XxdpBSDxnqwMnBH6qIBE4nAxnpb1o+ZKYPsiIEx3oxvw0SGGqJr6lIi3EacidJPY6LwLjEBvtRYT
LYtFPGYH6VPrWkzjXJwiARKPumoT2j5cj7Mjw9xWmTiOw1iUJSgdWYj/AHp7U6+6cejS7GWK0WgZ
g4sjI2/Lntg30xW0ZEYphGclq5jH3VpgNI6jfNNPR06nN/yfMf7ZXM/61z9o+gf5DX5M39miTktc
aEVUrfOWQLESR5kS5p+ajzPKzNy1eGviDGJzimFB0wLgDAd9FI3Kz1HUd7q2JGUbcATqBapKnKVI
nwjOm1AjIuoSuhpTwtipPchIFhIOFoIDKOnwnFeVbAMpYug57e1GIkHyCeRaMUf+XYEQzk+GPbmV
5l0vM5ZnctUm1D6NynKcgJQpHvRnrjK+QNGg+FjmhK83BBqBnTVIFaICI4SXJNGQ5fmiTAeCYDsN
hCflzK5I4UIA/WZRuytiFuJcF3JOLlAYn6SUJ3fuwXlv3BAAMMgMlg6xocgn25pk85CI3lcF6NcR
4h9DptRmM2iftTaLhHYPyprHK3bhyJoPtU+b/qE4wkTS3EuwGVFcNq2Y8TW94KhakXNsAE5LyLZI
twrOY2qdu4dflyMRLcpxj4q6e2IdQI4gRUbTm6+HAROFEJX4mF3DXAs60WaRzJxTFk4jrnLwxCa/
aAjmY4hCUSDCQeuxDm42hdtgvct4fUox5WMbdqONvDSvwsAS8tGoVDktRC7zNvXcMWd8Arl22Tou
BhHYVIEvB1qFQcCmwRBAJkGqpC4OC5QjEIyjJ7M8CMA6NqNt4k4EsChAxjGArIY/Sja9oVgdiPK8
zT3SVtBwPR5PMeE0BRMb+qJrpavrTCPBE0AzO9EzYEimkMpEYP6HhLLTeAmN+K1WJASx0ppxbf0a
JQcHEJoAzsSxtk4dilctWQLMPEJVPrUbhi8ZjvBWqxdBifYKaXw7v0FaJjzLW9a+Wkx9q2UwHl3R
kU1wMemqlDb1YXfZlivLgTIjFk3I6oXDRxU+pXhOT3oRM7s7pNAFKGNcUBKBEQGBHSJAgS3o6Xnc
2vQJ5Dt6BKEtBHiGZRNuTkdgf1LRN3wqnTHArjLCVAHV6QpxMAVAGVIhpDatWXu5JgGAw6HNQDju
VAwlnktOnS47XRG0pukod6J7VXKqbYOq20qMRmWULQ9mMYoyBpGoUZS9kE+pXLh9o0WA9SrEJ9LH
cVQkKRBcRWoBhLAKNwVMS9VGfvAHpoUx6NJUoplRU6XAUXNXwRhIcQkfUVrcMRn1iXRuyxlgmj4p
UCa4NRlipXLAJuEUJOHYjzvOmRBkdEQSMFK7ywMLoD4kgt2rTKcjH3SSydnOQQE+EYgBQhiRh3qH
lz0XbeBJZ9qGqpHS8JRjCPim4QHm+ZMZbexSeBNqPhACELMdAJ9S1gmezeUZjl9UpUfL1KV7mQDK
QpECgUxGAlKWD5KNqUtF0Vk1UY25GIGaeN8wJxDOEIWebPl4sSQx7kIw5nVCPhlqb+9VGoR/RYpp
8u4GMdJdar3LaCT44uJIBzO3kJ+L1ppW2CFuTggNHt3o243I6oigKNqUoTGcXCMLZeL0D1VQCTR+
jemHoN4VU/QYnDo2nYmwGzo0XBrt5h0JctIDbF+JNN6bVToCEw52xGaA4wc3iadqAF6JJwqmcetV
+dKfJqdTEp5RjI7wE8rUCdwb6kTb1QfERNPpTxlcD41H5ETAwuRIwmK/QifMgAS+kOE0hKUSMdVH
RhP4lvDSQ6BswlEnGJw7kJgGJFcF4ImW4snmQSaM6M5eKWXRWvXb0FfmTH0nNfynMf7ZXM/61z9o
/OdP7KgbSyZmiERCJ0lxLsRny/MwhKBBlGZAoTVQhamJwZtQwfpdOF5pt+XG6HiHd8imNWwVSVRC
9bAk8dIlI+AjGijGOADBOaJm4cAjc9oUCDey7pyMXc5rSPBiE7OTh+VCZYnN8yq8Uz6giAalQGnW
ZjiFXLphExIrpOxEAEFswhTGXF+qjQM2LYKd22AJxBIJqKLXOI1OzB6okjiOJ/IiZeEYnZuUYwDA
LSEPIhGV6ZIeVRFuxPeticTiBEim4hR5riNsliAHIKEORhqJ9sj7F5nMTlPsr9JojCYjGUc7kj/c
iDK2CNkQfpVNJ7B+QrTBz+aJEfQURbtQnKOMJvq9RX4eVvyL+GnDV2FRjZ4IktcOYCjbhiSDKRzV
+0aSE39aBylPVE7d3epXv6fPixnys/sK8u/y1yF0ZNj2FC5zJlYtR+7gPFJT5iZjbtjhs6gTO5LJ
h9qE580TWtOHuUY3PEMJK3bjHUZmQkcKDYrcjd8y0aCETgMU162YQlhI19ale5K6ITkDqgN6hOzb
1C1cjKRLABi+aEgatgiRxaQ7DbsVzmblkxtEkiLYBW78303A5CcKqEZetNpFy2cYjFPaMg2EJB/r
Xk8xEW54CWRWodxWmQ0z9if2L8LzQaQoCUQMMkx6JxdjiCULRk+XpHiSCtHMRExg+a12JgD3StMj
VUxyVyzMNKRRhGQlbL8GcTtRLkklPKbTC8u8Nds7di87lZNmwWm+Kj2himmPMt5SGIWq2dUfpTEN
0AZFCQz6NU5CI2lEWwbp3UCbS0Y4aR9ZQ8q2ZvmEZkDzjQdqle5gPe5vin+iclajy0NN25Iym5ei
pgn6HDE7FITgzYJjwE4HJOcNuRQKEhhmhej3odFsjJRnEYjiG/ajrbc3RXplCdYmoXlSFBQJ8OrE
5Ox71IHLBDepOqF+nagdihby/Kp3oR1B3iXDsEbceInEAAKVycdIkNL0zQDHt6nZiiThiULcA0AW
pkNqjCNuPCGqAmwGzpcdDJ1qyzT7UC9ESuzpGAJxEs0Xi4NX7VU9yKfpxZafYj4lXAI3RRzQ7Fcj
MkxFXK0RkHUuXmRb0HgJNCCpRhMTuyDRjGtUZSKLRYx8IXmTEjP3DgfWpX5xbUeEbFqOAw6ZxvVi
cBA8SMIwlaicM39alcNZSpFxVNIF8h2oTlAA4ntUYajCWJZaYRlIjM0TTaEAzNj61otHDGRxWuEB
K5PG52p5cUymjGWrOUckDMkEmkpMGj9qrVgmqTsCjegCAAxBzXn2xrty8UWrE9iPl1HQ4K03HBOb
siIX9E8tqlKF0CWIk5LrTfuGdcy4Q6H9AwDov0aisVsG7quCxXl81HXHKY8QXmWJeZb3YjtHS7sc
lG3zVsTbCZDlaxEN+YWH0J4SlbG6pHYSgbfMybIzJJHcKLhMbgo2EYtvzRjPlnGyDyJ7DgtM7U7d
Wqz/AKq0iZGWogiPrWkXIGWxwsPl9PTv8kf0bdGi4HitNuLBNcgJNgdi0yjTBwnOruontwrtNfR0
+XUVfRv0P6Pmv5TmP9srmf8AWuftHprj1H+eqf2HdASjQe1tXEQAdqvm1aM9ZeMRirdycdMwSZRO
IrRCYz6ad6idINrliXk/FxnYmyKtRsHTKcqncA686UtMI1l3LXHB2IKfI4MnKlImgOClsU38K1yD
mRJERkN6bHcFqn7WATzLJo+taia5BW7t0ycCmnaEdIwxlKiN2UjDlwWDeKTbCpeQS7eGZcFG3MaN
PDKNOxSsx+GJAAkYtsQiC5zkd6EQGiKknIIQthgPp7VVOA6j+JEIw/OkBIvsCjGzMTs3Rqi1Qy8m
Ja3ckJS2DTioMAYnwj7ZIi5c8qyKPmewBH8HYlOP/Unn9QXlyvWbcvdJqj5XMWp6cQx/IhK42sEg
kJgCOENMbVG9I8dtnlgRvRtXi9yHDI7QcCpRlWdkt2g4KPNAfCucNzcUA/CawmMihC/I2r8fBdjQ
S70fNEeYGUgRCX1MhMW7cJR9q7c1/QgeY52BMfDER4Y9jKNsE3ZRDPGLD6UdEfLGQxmfsClzHNFg
zRjsBX4kXRbsAtEtqJK865zELtpmNvFicH2Iyjy+gAfeQrE9qM7c9N+2aA+0FGNh7V8lp7uxXzzF
wzEANMTvREg4NGKuCYAtx25Iwty1wOQT/SvqK0vXahG4xCePDPJSt6yGrCT0Q/ESAY5kVWmYacfD
MIWuYDw9ma1iYIREaEbULQPCMWWo4ZemcFNcHeFqszEmyzQ86B3hPaBIIwRgLTHMnFOccgtVygWq
BJhmELkcWqETA0zjktcBolnsK0Xo8WRRbDJazgq4xRFkCEdpxTnUfziaJ78jcOzJStwiIghmCv0e
VuoigZtFsAEIWuYl5QoNUYybsMgVLz5yvXJHUZSLldnTsW3oNueIFEY7D0GJwZAdEoEPIVj2oQux
MQzEfaozjJwa9bURUZo9WQzFfUhdhUGh7U+YUbwDgsCFqtRMQcY7CmJfoPRK4z6cF5UmAlQMiQ5n
P1pvJnpJrIRJDLyp+O3j2JzkiRgExqdi3leTA8RxWuQ4547kXCpTees3QycBU6STRDUQBk4qiLrm
4/AR9vQYE44I2pFmdt6Y9DDE0CY+I1JQtAt7xWhhOIFVK3y0RAnYo8xftid+5XiDsFKdq2LdyAcG
IAFNvQBOInF6gqb8rKNyA4ZQkz/SvPlccuwtyOqSFuOJQiOgysx1T+rehdvATkaRYVRnzII90YIC
DPGjbFbsWpOZ+OS0TloA9pG7IuI4E4q5c0a5RPDE4OiKQ1VkwwUoWpAyJrI4qGmZ1DLFa5GRnsNP
UoTuQHmAIAFgMVorp3Zp4xjAbxVNcNuQ/RRFoETOGklT1bVh0UQFyq0wuyMWYwm0vUUSY6jtKa3E
R7AuNiN6oAN6dxp29V+jzJYnBGeR6BcuSIJwiPQ6rcmKa4Bau++PCe1cQeJwkKg9GK0znKIylEt6
18K5C9E4CXi+xf8Ac8rLdKBp9rJpyNqQxEh+RPauxmNxCdq7UZTtRlI4kjFEsYh30xLB1qt35QLu
DjL1unhzEZthqoD2xZNK1Ce6LAEdpkmuctKIFCA5l3UVROIBYkxZitIuxc+r1ptQfIOPQv8ALH+S
t1H9JTob0FfSN6f7Omvy7mh/+05j/bK5n/WuftH5S39rw+D1VMMlETB4XOLYqV+1KYus4GomNNxV
t7ktF2ht5UoUbc8D0mQxOKnag3l3mjcBD9hHRECRhOFYyxRsTk4kCDLD6l5c9JY0Mdm9B6kGiBZz
s2onBzUBVQEHZq9q1O8p4FU7ynOCZ3KjAGsqdiJjxXNu1SiZAmeAyDLSABSrIcu48y0SJDcS4RJL
AYlRnZlUzJkBkHz9SErmOUftKjbhh9DbULcB+lLMlcJ7l2IyxClc5iQtzA4BI1PYCymJS4IjgJ7a
qHLQBibpoPa0jbsXkxmZ3JVd6RU53i9m0dMYnAry4ACjBgrfMgRINJUVzzJNG4xhLAOMlK3c/wDx
7peM8gVGJkHI4ZIxjfkYZxAcf8qlK44MmG9ojNczcgNTBgNpXx4k2y4u25BgxPsrz+UJvcqayt+1
FMGlttyxCbl7jD/pXKx7im5rl9J99tUfWE8bYmM9Ey4/wyWmZuwOYNFrsxAJDxnMutF0m9OJaNuN
I9siiBe8u37ltyB6k0eaIOYLhEl7lk4mJLf8qImRG6QxEgxftQN62SwpIZhH8HExvH70TLlC1ekI
3DkKsokzPkT4gR7St3rNbT8WTIt4Di2CcVdNKgyKBOCBBcLiD7XWqydEvdOC03BTZkvLuAV9krXb
kw9wq5BqCiJlifkNQf0gahcWm/DfSQT2ZmEvdKBaozWwpjUIxwRi9NicZqtNqeCd3KOqQceyqwqj
clHSwxyKEWYHBDolIh/MFB2phEsM1HlyAAKkjEqDMRIsdtVKJwenWdE7eg/oodIMAJSjiMyNikfL
Nu3HGNWT9VkxPTsTEuiNquQbVDUXiV4ZRkchVMTI7injPTHZinjXt6XZwronR89i82c9ZHhDMrEL
mBmNXYhbtwEYCjAK3zViOmNwfFiPrVDQ4IREXgcSvMgHejBGUxXJHmLngicd6ERngtR7uh+pu6a5
5qiHTXwhFhRb8lSqfUAQomRBBwPQSjdl4R4e1F8cghjxVJU53S0B4XzUrhidMc9qFrmwQATokA9C
cCpWOVBkZhjMhgB0N7WSrjJPmtcvEekRkSxLMM0BGgGSldlxZADapHSIfnBDSdU5VdGdyZNwUEcU
ItRG9CgDBtpUzzBHmn2TsR8thq2CqjclI6Y+8nRkckZmgUichRFsuiJi7GhlkvMGfUBT4pxwlVFN
qYJgtqcDTJUGobQqgjtQADk5IRMSDvThCMwG2qlUbtzwxqFTwigHo9J44ZwOCewdM87cvsWmYYjI
pswniW3JjAXJjIlijd5jlxdtxoKtGH5VaiIyjCYecYHDsdf9pz8rYGIuAs+wk8KaPl80M2lEfUy/
7jl5w/QeTfQFS5o/TGkJ4TjIbiD0YrXK3GUtpAdapWxWpAJAPcETEygDVg1Owmq+FzU9OWuRcepM
JwuAYEAB+101zlH3wcg+rUvjWrkDmGdvWyHGYvhqBC4LsJf4gnBcfMe/5Bh1G6lPRv1K9V0fTN0M
qdVh0P8AKKLf6Dmv5TmP9srmf9a5+0eq/U3/APAAW4mm8OnJqjb5SENZHiuHh+hC7z/Lw1wNJwYi
vavI8owkKRkc1pl4o49MdWAIdEZJtq0mJZEOpPF3+goURDMAnVaIZAYlARDb0ASiBnmoxh4iaLyC
WltwcrXGpHh7Vq8w/o5L8TakbcwamJIIZajzE7sRlIr8QYR1jCme0ry7edP0k1DM+KQ6KY70GXlG
3K5cZyI5AqA8m8OaDRqzfWmNZW/pzVjm50hIaSTkhftkkFnY4qUfMjbljKGJfeELV6IjKXgkPDJR
tgccXL9ynaumlsuw2qVm/CUrXsvUhCXJ3Y3bJwtXKEdi0/hQW/Op9a0zEbEM2x+1C1E0zkcyq1GS
F6MmFdYGB7l5to+Vd96P2hab1oczbHtDFkTbeBHityUgLIABIjp4TTOi0XBrHuXPF3SRu8uTOwPv
LMsYqVyxPy5XAATsIUo3bgm5aMUTLSJ5NmdiiYfd3HEo5OEZRHl3PeGDocvzbztHwzxojf5U6Sax
IwKlIwlK5I4AErl4GOiUIASgcQQi+eAXl2bJlGI4p5OpXGpEtKJThaZYZFM9UxxQWm4BIHansHSf
dP5VUFhkcO4o3Jx8u5mMQVIjAfIqUT57VjqjsKaQ8q5tyWq2NcdoTSBCBQkKo9IlEsRmtF08YwO1
CEnMdijciWBroOPQBbuzFojUGJAHqUTemZ6aB1AeGMvawHepAS1DIjBAjEVVu7iZRD9YscEU2JUj
kzIJuu+aqjEY59YqZ/OP1oGQBBoXVwik4kGJ+sdSqlLcjGMXlIuVrAYihCAtAyuRL8NWUYXeXkbz
ZEAH1o3bwDEMIDABGzPwSrbl9ifYhGUoxfavJiQREsCF5esaz7CBkKjBDYMuhszgt/XcZp9iLVbF
UzQiM8ej60wTs+4oUAAwA6BajifqQiKAIzl4LeK4mjsEkQ9DgQhMASkXBPYoziBGQLUo/Q0ala54
nAL87ILXPwD6VT1KtV9ahG7JpeyELUYmUpMXGxaBGhqRKtF5cRwQyiM1KReIHCBuURoNNobvRkxL
k0Vwi3uhkV55ukT0vODZ7lGEjoJLVqtJmWzBdj0RGRNUISkIsNRdSEQ8SKHf0nXQHwRzJUvML5og
dRslQpjXcnj6lUVVFToaQcFebbLbk04iW9OPV0+SAANufpXFDtXl8w0gMJnELVaOqBzCLoSicFK9
y0zG4Q07egzjI9yEefjP4lAcCCf0lqsTiRsuDU/anlYEwHLWpaG3xqgDfvWGymDMd61auXvk++BG
Z+gIE8vODf8ASlqB+tNG/ciB7N2NfpdDXCF0ZyidP/r1IeaDbP631L4dwVwcGP1qhB7OnYq17U8r
MH2sFSBj+iS3qK+DfuQGYJp/y6V8LmRMbJ//ANzqtqF0ZEN+UIi9y0otsf6KJjG5A4nVH+9cN6Nd
tPrTxkCDgQQR88P1262701fSP1a9ZvQ16K+ip00XN/ynMf7ZXM/6tz9o/LN39rRLYnq59lOQz7VK
3N5SkGjCIc/QoczOeiMPYlEg97rVZnEkGsQXKEo5oSGIqEZnE1QkfaD+rFGEJaCfa3YqAjIzYeI5
qLY5phtqVMiWlhRqoOXJ2oyOWC+s5IADhGa3qRJ7EIQFSiRW6cZfkVuNmErko10xDlSjzdqVm2Rw
yIoDk5CjGy8p4SNNPcjbsyN7mQ5NuOOooyvDSJSJbaULcKvRhnuCM5MbhoB7oXYp3Z+G2NRAxRly
1uFq1HBw5Y+86AvX42wGHBw1PYyMuYuAyFTKfhJO3ajMExkS5MaRHeSF8M6gKGLgkhfhgfvQ7HGJ
2hGzZkLkBQO0m9eChfvHVcqb0sq708qTEgbe3FC5bYSMQXO1l5wINwlpxjuzK1irYrSSw2b0BCR9
eCjautQVuBaZRiR9XrWkVjkvyLA/Sm0yO9l5xifJu47ivMixf2s1plUHBSYfEtYj3ooXIf8A416r
bCuGMpkfQjcgdM3oJkMyEnEhB5OPakdiGsMSXL70wiJWWqDVASOrl50D+yVDmOWLXobMwrlyXBft
RLwwchDl7pHlEGRGbDJGJgCDiCFchbtgRm8iGzUjEvbJqNicVBTHuKqUzoJk0g42FGFkaZSxZP8A
JtPneWfdlgnu24zj70Vqt0Owo288kxCqqKqEghcdkCS/RLVESlLhgN6865PQZB4xAqmHjsz4juPR
ptwM5bAHQ88GM4mkXyQYv00WmRbVQKRyODJySScmTzYzyiozOJxZDruviDhl4W6N560jsCPb0ROZ
iAe/qMm2qE7chGbOXRgTqlKsjkr3N3AJz1NB6spW7kBhSTVB3IjYW9S0y7QcwULd8PEYXBs3oCzx
XHfXuRk7HMhCQkXFUBKJoMqrxEdoQa4Pq+tOJA96fprXoZEHFMckwqU4zNUBij2UT7M1TqFyjdnj
LDsWiOMkLEcY1md+zolAPKMgwGzetFmUbkjWUJZL41AMIxwWmFAjLTrnsUb1qJ0kkFkJ3ziaRFT3
oCFGwCdOSwQgJjWaiOaBkA8cS+S8uxbGiNDM/wB6e3dEHPGHYyG5RhMAbZFSjY0yicztVRFzm7FG
Ex8QZqNvSRI7BTojdBAtwqIipMkJMNTY9AjEVjVCV14XYBgQHBCFuFa1J6AMBvRiaGB0gncjEin2
I3bPFHNkQcepisemiA6WNQswsVQrU9NnyB4HtBwTXB5N3KQ8J7U8ovDKQqCtJLCW1RvSeNyOEonF
COqU2zkX6GNRsR1WYucxT6kfIv3Le53A+pMJW74y10cdwR/E8g3vSgH70QJysuXY0+kugLXMCQBc
PxfVJDyJRgY11AsT9CJMDdap1YN9Ca/y5YYiAJI9aaWq3+mG+1NbuwkdgkOv8SEZthqAKY2ox3x4
T9CBt6rch/iHqkms81MH3TSPc2COmduYyjIknuNEdfLeZEZhon7UBetTidkat2uyDylAHAyH5HXD
dgf8QVC/Z824ejPySvQ3paejp8m5r+U5j/bK5n/WuftHpr/wH7i3QZAVOJVwTA8JY78lD4chGX3k
mphtRsz8JND0iBI12plsHMZf3pp1Ariowsy0WogcIwVTU5rTHFAGu1Sldlwk8ENy0jA4BCLNHMoB
NbiSM5Gg9ZWucoxB2ESH0IzkRK4c9nYtrK3zAA87mDKU5Z4kAepGEwDGQYgo2IRErRkYg54qVwSJ
kfHI/YhCNPqAQuyHGfDE+yq4r7VKE6wkDqdXrUCNHM+D3hp3KPKgeZeueISqI73CFq3M3GoJT4hH
dAIS5zVcvSDi2/EB+dsTDlzEDExNQrV+3ccWmOo7NilKRBuYEiJJpvWnlrBmciRT6EL3PFwKxtha
DQihCuG3E6ZnU59a82hjKuqOCEohpDHf0RlIPEYqTFiGYd6jA/Eus5fwxG9A3eZFsnCIlGIQ8ueo
e8NMvpXlzlqn7RLyP0I6CblqdGx+taIEyBOBGCAuz0yOSMrBEoCI1EbVoIoSdB3grTCGtg8yA5fY
nuW46vZg3F615nKzMJ46JFeRc+FIYkDFkNN+M5mkYxfX3hSjP2sO1eRcy8BUp26XGqBmvxAvSHMW
3BtEAAhCV6WjtVw8pIEyDRlkHUbnmG7qkNZd2dRlDitnEp8Vj2JnYpy8e1cVyI7StHLmMjnI4Ikl
/lD25kbnp6k1+2JfnRoU9u6QfdmuOAlHaKqhMCntyEwuOBboMcgmNW6BAH4NjHeQmApgArtvmaWr
gP8A6CeQ8q23BEy45byrs5yE4SqNKN63ylyVmQocHG4FGEwYzBYxNCOpGUgCY4blWoG1SJmYECgi
qkntQA9nHuQHXOahHQwJoCsQTm3U4inCYI2bHhHjkncSGa+hW4Q4pMDLuCYhit3Q3cojeo2mpQak
ZYAVdSjpM7U/EBT1KUeUtShKVNc2p2M6rmq47FLYBgUSwqVqiWBxCA06pSYmRxdDTcIL1dcdqF+P
ZVNetSsybensX2Zs18PmTSoBdUmJ5ZIxnaBbGn5F8WxIbwMlxCUTm4Ti43bRUnE7ahE5IGDFPgFE
H2iwRfNasseqIeyPEnNAFLmZY4QG0rVIuTV0HDjahftxJESgbkTCQzFCntXRdj7sjX1rTdgbJ2mo
/WCcESjIYiqMbIka+EEkKXnCMYHCIFXTnLBBCIgZk4Nh3oc35kSBjHMDYrlw8UZPVQjaAldl4hsC
t64A3p5mmnsUrkpNGOG8qWogSPh2oWtPm3DUyI8KLRAvwDmTbFGU7xiNVS7BnWqDmIpq2rB00cUX
mIntWnlgZHOZXl3CJSJcyTYnYFquUGQQlE8QwARuztgE1dGV0g/mj7VptsAMhgtXhubcisHG0LDr
v1mQGz5C0A52IwJeOcJVCE7fBLOGIPYiLfMX7EpHwmLQ7mR8rmrN18ImTy+lfF5QXBlKJr6o6kTf
5e7AjFovH1lk5uGBGUwR9Tp7d6BH6QH1p4kEbQX6OO1GROZAdPDValtifyr4PNa/zbg+2qe7y4uR
22zX1VWm9blbJx1RcI6Y2SZY4A/SjpM4E4EGifl+dlT2C7fWU5jbvjOvF9iP4rlpxHvRBb/mR1TN
o7JhvqdGVq7GQGJdPEiQ2ivoCZWoE7dIXgMM+EkD1YJ7N6ds7aH6mXw+aNwbJExP/wBSGoW7gzMQ
ZS9VFrvcucWEhQd4qQj5gMNOYeQPeEwlpJqNQ0utMbkZS2Ag/MDdFOtXo3+gr1R1n6W69PQ4/MDK
nU3rmv5TmP8AbK5n/WuftHqU+Qb/AO2zihWkmnYqEg76pr73BscgfQhPl6RdiDXFXLV2QkYVBAam
KBOIoegHbRAnJMA8pZokiuxGXuhAbaLREcYdiUZXpYFv/ZRiCyjcvEiMsI5tvQgIhhhHILTaB0xD
EDammKAOTiVwRB3kF15N2BuWQaaaGO3FH8PblqIxmwb1EqN65ceMakNQLRbHY/1oXblc4xP1noYp
o4bFK7KJmfDGIzJVrm7AqPvIbBsUpsYCZpI+JhjVT/qXMeCJ08vbzLUoFK/p03LpJeRZgELlvlwL
eU4wcetHlr1sR5iApIUEh2Kcxai8jxAjPa7FUtgDYJflivATt1SAH/KtWu1A95RFyMLoNDoNfUVK
7yZIlH7y1n6kIsQR4mohIDU+C4T6ig4GqRBOa5h6hxXcFKYeRkaMKlebZkRH27b0IQ0gyuX5NCI2
KcZHidpg+yyJhxHNk/M8AjhIFihy3JRMok8Ujie9eSSCLTxDYOalcxZBYSaQA3rGpRrgsWlkcU0o
xMD7TB/WterU9XRlA8cC4QmD29q/EcsdN+FaZqELsTDywxAdidqBrqM5O6L4MoWpAzncwiNiJbTA
5SVTTamiO8ptRW8pwU5+V8MiOwrjAmN4VXtn1hcMo3BszRe20uxNGhkjDJSIPHKkR2rXLx3OIkry
uXHnXdg8Me0oc1dlru7B4RuRMn8wmi5a3fFCQZApgGAwCtTgAJ3IvMDdn1Zdi04Ap1KxL2sO1GJo
qKtN6oQezpqjG3q4KnTihJ/LlChEse1CQoMztWSxDDNAGel8JKVeIUByK03CIRHtFeXCTk0lNRna
lRqZuVO3dA14uM0blSxoMqLg4htQOfRRDtULO3HvXC3lk+J8lDlolidMXULELMZADiJAJJzxUL3L
x0QuuJQGDjZ0O+Peju3IifhH1IvLzJe7DEKVo+z4XxZEHEIGQeJwRFyIkCKuE8CbdXovh3dYJqCc
u9AcxZPEaEUoEwGgzpKUtyeMhIHBi644RPcq2wHrSieJlHsXw78huK4bjjeUKCQxydDWGlAuyHmW
5C4MxgpWzLgltxQiLoLKkwSMQ+CpV1TE4BVrKVSV5Ue9eXGlu3QDfmeiEQSL06tiAN4Ubd+DRH7y
FY96EjCMonEheZalKDmgxCM4z1wFNJr9CFwRlb/Og4QuSum4R7E6f8wQ86JtHb4o+sIStXIzG4hP
IY5LVgNqZwTsdeXy1t3xmAjcuAEbMSvJAIlk+5GMogiHs7VE+UIAH2U9uJBapK/D2eHXQ6RxS71K
F0kcwG0iJBjXagYTM7eds1CaVkH9Av8AWtNsG2czJap3BM4sCh5dwDPQEx4Y7VEga5A1RuWpkxlj
EjBarh1S+hEBbAc1QJhWWxS5idxyRQDAFaL5gSfCRR0RKEo7xVcFwdhonFRu6WAdaphuq+ZwRkcT
8icUKGsaiMJZqJtPC8MsihZ5qcdXuz/vTxhGOyVtgh5HM3INlIuPsTPC+NjaSe2q081yIiM2Aljs
YIxiDbPvcUWPbKiP4bmpxkcGII/5WXw+b1DZKRP7QKrZjejng/dpIWm/ycxtMX+0LTMXLZ/Oj+Qp
o34OcKstMtMwcixdVtCJ2xon5fmZw2xNR9C9i/H/AJv/AKU3M8vO3LaKx+lUuiP5sqFPO1buA4SA
H1hfDErJ/NL/AESTcpzUmOMTwpyY3InE+KQTX+WJBLCbED6l5d23KE+5vpZaTcEJCmmX9y4ZxO0O
FT0BiQDE4go/CjXc6HFMRBcRBAbsOKJtXyH9qTmQ73Q03fNiBkIxL73Q8yzGW2UXl6xFPe5e4D70
Y8P/ADJpk2zjxYesOgIzqcHcP2OmcPseqbqV9I/odnXqqdDfIq9c9FOo/RX5Q5DBVkB3qswvGF4x
61SQVC/pOa/lOY/2yuZ/1bn7R6r/APAZ00QXzRkSmjoMR7Bz71evXLJF4eLGWOxa7NLVx2ByKqi2
VUDtQkA8hRPgU5Nc08BQVdcWIzUxbDtWiFy64two3vHYgJnRDMjKi4JPEHHaEdJYjNSslgZgtI4O
gZBscMablPmLhELYcuS1DgrgBOky4STQDNMKvtxK8ydRlvTnDNOQpXCWhAPKRwAVq9H4tu57cC4A
Xl8tL8ReuMYiILQ3ydc1bnImBIAlGTVIqrlsnV5YlGJd8QrfkNK9ypkJW8+KWLdijoMrfMQPim7S
Locj5EpxjSOqAmG/TCtSMY2b9wcUhUgguwxUZXeY8sjGTBigIQt3m9v3uxiE0uXtj80kv9a1abdo
ZOHP2p78I3IDxGI0zG//ANBQu2pgXGeE9oORRuGErN3OVsiUT/hxQt3LwiRgZQIJTzvU/MjI/kQF
oSqK3J1I7AjbtuZSBOrByjKduJhmGY+ta40jICXeyjOE5EWy8ZAHhOTKV2do35Gnmx4TTbFmK0Wr
UoP+ay03rhJxIkcBvyU7oAndHBGQNHOzaoasZGqhdNBMaSVSiecnifZwCeBxxCaQeMkYwPAa1Tvr
i2QbuUrXsTqOgXrYAuxrhiyMLhFq9bHEMAQEbAg9sS0uM00KCAYFM9N6pXp7EOq3yqhZC2K7Sapn
8KIbvRg72bJeRyomiTb5WNKUM+zctFuIiN2fapDMVUbkox1woZEBwyGiVQXBBQs2rA5m8fCz+ssh
e/qFqVucsCQ0W2BDqaScaIlqAoRjUnJeZGjEV3ryb403RQSyK0yDoR06onxUwUpWTwioXF4mqEdM
T2ppBijccgnYo3YSJJPFHEqlCnyVaRNKpjIyGLMiDITiQwgRUIkggZOqqpdSunDALzJT02TUtsWm
Aa3h0k9HmTPDD8iMIUI25rXKWlqhA8xqhcjQtFwVEwiYWYeEHEk59EDciJAGqPlgCEi9AhEU149i
1QLELzHeWaBcxkfFFShrMgcAtJLlbQjKNVb1w16/C2SInbaOIQlauStkOgIyF2IxBQjftEE4tsQc
6CaVQ0SjKJo4WLDNSmT2DajKWJKHn2jJxkiDbBAdnGSA8sBtlFwzkE8b8u/YiRPXoNHTyYjaAEXi
JTOL5KM7dry28RA8T9iErheMatvRu3ZtOVACmE4naHXmcvc8ueNDwnuQhzMKY+ZCo7woecPO5d3k
ImqkeVgY2ThGdWO5cdsdooibUzDYDVPbaTZxLFNK5ciB73EPpQhKcZxzDafqXmQtAuPZmD+RGMjO
OWmIcIxmDbtRxOkklG7ES1vwhjUIylbMTkQGQaDQZjqTQBlLYELxGsA8UHYspxtW4xnIPEkVB7So
WvJja0RMCHAEyjKdwQvQ/dRq4/SdGNqY0GrTZA3bemGcoJ7d2UTkJZLzGFyGWmjr/ubM4y25LhuC
O40TRIPYnl6ujyeXxPtbFEWgbmdwhR5IH4kjUDeoRcyvPVsAokHzLgDyB+xEMYSGIXDL6VgT3p5k
RG8poDXLbknOOxV9XUbIfJQDFyuO2CfeFD6wtVicrchhmEPL5nzojIgA/wDM6+PywP50T/l1JpC5
CecTF/qWom1MnHUQD3utdmUrT1+GR9BxXweZ1jZc2fSvi8uLg962f/dNdE7J/OH5Kp3t3JfnAP8A
SnlZFcdJMfqWq3OdrcC7dhKHk83OQZh5kiG9WKDGF0DOIDnt1Mh53KMc5AuP+V0DetXYE5GLj1lk
JTlEEe8NMh6lr5W8I5HTJ/rKHlcx5sBlJgfXVAXuXEvzoEkfQCgJRnCRxiYmi0m5CcZZEg/QiJWY
h/aAYpxdlGWRYU9TLVCUZ7zI6ie9NLzt4tgsPsRJuCIdh5g+1EG3C6MjGYf1B0121cg2YiTH7Ewm
xxaVFwTjLsIPoWNRsKI8uLHEMFWDgYAyLDsDo6JmMSXwBI7Cvh3pTiAw1SMSPUoxOi4M5RGqXfgj
K/yxIymKesVZabo0FnccY+hAajEHCUg0T3phdg+TSHymvWf0dPkNflQhAdp2LSAJXUXk0dgVZFY9
GKpI+tUkfWvE64xq3riGlUkAU4IPVbo5o/8A7TmP9srmf9a5+0f+BhEqasCiAexVx2KcoQ0aosJS
w71EznE3IEyOneVGeZx7Uy0HGBIT+pbFIg0Ga1AV3ZIykW3qhNaoylQezEZIC25uk12AIEnVpFTv
U+Z5gaoRLQgcCcSStMrEBsMQAR3hGzzE5GILRkNmTrSeMS9k1daYRAc1AXmTLQzO3sQYcOQCndm5
EIk6BmoW5ANcPBbzZHkOal5c78QLh9kPvU7fKc5ZnYm0jbJ1D6E0p6BLxaJERPrC4bkX7ZfW6821
KWlxqjGTx71//EuXmYxNToLVzVmBhK7MxeUtqaMTCccYSUoWjpvDit9yPIcy8XfTIgl2xUfJnKcI
Gv6OYUzZkbkm1AbGGCieSsi5fl4pSGohshghzF9/OYxNqILFRt2+WpHAykQfUmnYPZrcfTFQvw5c
2bkcZ4g9zIETlAnEGNEByd2WmIDRGBUTem8o18uO3eUJwHDFzdnlJ8AEJXA0co7U0I6QnFyQ71xS
lI70S8uLxAZoaAaF5VUbmUWf1rytVDxWp5di8jmoy1wwkA7o3CBCzkcwjKEtRGWaMDhkmwORRtyk
XC1jGJcKMxmH6JTt8N1ss1OdyYiYlwDiVVOyZlqdYt8gf5EbssTSKJOJU7kZNdlwxAxqo8tpAhKe
q9MeOe51o5d2gA4Zm6CDmrtm3gS5bYqE6AHkjcABnIsTmrwugECJIJyIXZ1GZztwRgS4IZSmS4Ao
hatERjA9tVxxAnnIUdaZtciMjj61ISiYAjHEIyExJ6CMaomVsx2yC/Dkm3JuEjatDuxx7E5UbkSw
OQCJiwnGtM1qGWKAuHiwC8u4TGUfDMVVTGZyICMdAIO3o2RzK0O1KBREDrhlElwESQ1oF6ABOxMX
Z+jtRVyMDXJC5dGmMMnxUBf4rFqJnKO05BeTLl7elmBEQCO8K5y7uLZYHdkqVKJlHVvzRlqaI2qE
8dIY9FVrtXhG5HASo6FQZA4D7VHVFji29P7OxMz7kJ4Ng2IWpzI4ElNEE7SFi25MzugLlsMMAKYo
G1clAgvi6DXdUSa1Q8wU9kYKuAQjrAkBUHenBBfp0R8UsEADhitPsRWqEh5cjiTUK1yXLDVLAKMu
YvQFxnMRH6HcL8PehplHAgmoR0zlE7XogLV4sBQHIo6Lwk+LqU7OIx0nHuQN6y7+0Yt9SY2XERxN
inlbkC9AFpsW9WZMkSTpBAJGT7KqOiMYj2iB+RNbk0RmCiLZBltZEzuxiTViUQZOBgmnxbCU1sGW
1slSyYzkOOU6upQidFuOJGS/D2pGQ95GV4aycBn3IQtwMHwcufoQjajrlnFndW4yteVIniCacNR2
omzOVrcDR08pRvxGIOKInYEZHYyGmemR3/lTxujSPeRuC0Z2o4yiF5l4kW89ylG3dAJGJotfmBoH
iuivqRvWp64xqQRVCHMW5ahQSizFCcYCMGppx72XFUJjJtzqhke1UGlU6hJKMjn8kEYhyV512ksh
1WuxExvCcW/LlkY/kNF/2/NXIh30ksPo/Iv3d4ZOXLf8qP4jlpwIzi/rr+VNKXaJinrqE8YxlsMJ
Yeor/t+ZnbbANTvX7u+M3YH/AOlf9zy84bTGo+llS5pOyYITwkCNoLrTciJjZIP9aadiI/R4f2Vw
a7Z2gv8AWieW52cDsILfQU8b8Lo2TL/Yv+45KF6I90P6i5QF7lLvLlq6QB9CiLHN3IhvDPUdPqQk
Llm7DKQIBO4vKiBuW5Ro5LxburVDTdi5yzWs24TJ9ogF0We2CXMYMB9S+DekYtQTkSyD2rdwN4oQ
eRO+qBv8tctXJU1A8PrkV8Lnp2iKaJ/3FPau2+Yj2h013ln2mH/ooC7CdsnIhcN0f4uH608ZAjcX
9EQwY4jaq2okYtVvUpaDKEZYxDEfSo+VfmYj2Jkt3GKIt3o3NxqR2GSJla82AGZjGT/4SUBctUJb
TF9Q7kWEzp8VMPpQkLsA+TgH1J4SEhtBf5Fv6PrR6z+n7fltOtG3APKVESW80ipUpkvX0dCuGRTE
uuOLKkvWnBB6ea/lOY/2yuZ/1bn7R9A3/AWEwXcV7QVvUoyqCC6t2hdlpkTHQ5ZnbBStnKo6CDhc
Dju6HFTmpkeEVIRMa7V5YGkHNQi/gFSdqcgmAGD1KleuSeUqQgK03qRk4GQ0upcvfOm3MvGRyObr
X50JDIRIJPqUZWbbRlJ5SJGAoAyGohxEB+xB6WxjuQjHhhHABAGhWuNYjGO0IX+TueXfh4YnLsKF
n+qmxGQDiZOm4d9E/K2/MkMbknZ+9TsylojbzZyex8FI24CcwKaqoRpa5glpW4g4HNld5O4KQOqO
xjiuXjDCr9hVvmYB9BaZGwrzLJIk2qMuxW+dtPG/UHSWc5+sKJnamR7RkzI2uYlp1eARoDuUpQe3
dkQIiEzxPuxRF64LYiBqZ3O4Mm5GEWA4rkgwH1I3ZC1fgcjEfQQxUhpNsxNQDRM4L4ExCbXo3CLI
kk3JCpBLBcVvRKJ8OSc9FOntxUoxwliNiPKc3F7YPDI+yh5duN2GUiXKEDDRAZDD1KN2I1Wya7wm
0m3LKRTEu+BQmMRj2Ib8VK0cYGnZ0i7ENLNlrueHIbU0QnATSFE4RHQLkQ8T8uEfWhCPhgGZMjci
DKEG1ECg7VbsgO54iDkvLswERmcz29Nu4RwyoVTTB8gGRNtpwNZRKly1iwRaFLkoPIn1BHpoqiql
dtEG57UVK3KLTHc6LjGo6W8p54anXF8MHE4lC1bqPeKIFJg0lsTyNdpWo4DALhPAMYnJO3CcUY6X
ByRnAVy3ImQc5kVVAX7FSJWqZcbE0KnbsXmSLtRYepNLw7E1gz4vFGR4e7orQ7ESpyuGpwCe2aKN
6dYkaZxzITWtc7hFIaW+nBSuz8Uy5UycfZTPQ4gqTY40Wg+IBjvX5pwKqeiMQIh9gTEVyQgPFOjI
RAeQxKAfWCKoQhTUpPJtK1zNNoRuTJlGeB2IxJJIo4FChC2CSc5UCjLmajejdsRIgwIdAz8A9RUg
AYmWYQNq6YsKKIE9QcvV/rRN23jQNRGVwEMPUuCXFLAZrQBQljJDiYEKxzF5qyYS7c0CC42qMbJB
lbi0yNpyWiOOarU5laIeLatWQz2lPCdIgvXArTFxN+Kcahl5N+3G7bicQNJ9SndhACEaMMfUgbci
zsy0xeT4BHVMQo7vRRALieO1abseEZLgiIB8Aou7Hco2xAt7RzQjGZGkVcIxtSELb1O1cHFI4MiI
1nLEnJebdJAOWZTWhp35rVKp29B2oxj61Wp2lPKNTsURIy1YiJqAFETti5CQ4DGhHayt2GHLwkak
hqnaVB+Y1TZxpwKvcnd5ufLS9hj8Mk+9SiFm3fs8yJhpCA1SbfLBab9mQH5ocrHypbJhitUZCQ2g
qsQVSic16jDFOS/yUTvxJiM4oXbMtUT3EehZEztRJOJZj9CM7cp2ydhf60TY5kSj7s3+118Sx5wG
JgwPczoRvQnZJyNfXmv3cpHaNJ+xarRlaP5pf1Ovhcx5kRgJ0PfQpr3LiY96D19TprgnalmJD8i+
HcjLc9fUenanuWoTO0iqBFvyyM4f3rTbvTAd9BPD6ospG1eiRLGJfDcSibcTOIwJk5HYBJab0ZyE
Rjo+skIC7YL+0QfsQPm6CciDRAiUZQlhUEFNK3GmBAAI7CjKM7kJn2nXwuYBAykT/wDVqQjdsC4M
5RBb6H+pNzHLm3PdHH1MV8O4bUh7JLH1SRPL8yT+acPtR8F0bCa//Svi8tIj3oD7KoC49snvH/Kt
UZcO2QMR3ak0ZAnYC6w9DitU7cZSOJIqgRAQkMDChRMLk4TObhPa5nSwZqse1yUaicsqx0kfQUI3
eXETjqrId2l18a3OB3B/rZadRicajLudUuwP+IJxXsr07/QlV6j/ACuvpN3pW9SPMcwQLsxQFAPQ
/IMVSRVS64ornGxjyXMybstkrmf9W5+0eo3/AAJlG3Joni0moqiSXzLoxfS+YVyF2xG7dsO0mAJG
OajzEY+XGRIMei3e9yVTuNECNlFgC60TFJ0J2BEGGuEtiLDSDhHErfs2LR7mWdVCrxx71I4MXIZs
+xFhVgT61e5eYBhAPGmC1yoTgNgQtwDDMnADaULcQ0Rnme3orRMBReFijGcBOFkCMnzKiRACc20x
ag7kYWbcbvNTxAj4fUhc5u3GVk+LQKxQvARlcHtBhIdrqRfSD4pEvI+pax4LeCuRxgxKnESY6mHY
TVWbhHwoRkWHvPVSlavQFyVSSz6dgUzdLgS+CcyyM5z13YjUXOQU434PcdoADHtKjGHLGUDWely3
0hPauS5e5smCB6wSiOci8Z082JTipGByR1Al8wtYEpTyBKdgOzp3Lf1NUTou7ciqWzJs4H8iHwJy
u7Zuw9a08xxSJMiNhKkbTjThvRhccShhvRMvCQzIwlgcFGZwnQ9qcYdERk9UA1AqdLdLYwOIK12n
BOMcvlsrshU0inOaCnycoA+Z43rqVy3btxgRLECrdTUMY1QMiQAOFR80l5lgTsQt2YADMgVJ2lWu
ctxEJzOmQFH39NU30Jo03hC7KRmRSlEw8WzNFhrG7FMcc3WSoHO5B+FvWg8hG5h2rVObR2krVYa5
ppJefAFiHptUp3A4GAOJK8yERbkU5nXauOOreF4ZJoQ9arh0ROZ+3qkok7ELtuJnViApSujTqwBV
y7eiJx9kGoWuzDy5x2YIP0ncEJQLEJp8M96cAHeFiW7Freow1ELXaaU27UTcHFkhCUtLgkyTx4mP
iQJxAZSBqNqlbpHSXi+BUZiZeXijGoTQfTvxTjHagCTInao8vdmIRZiSvKszF2Mc40VsW4kXR45H
pEBniiDEM1UZHhtxoAnhMGr1CMdRlpFFa1WjPTgCjbM7tuAxDlkJG5pnOjS2qUYSEgCtMAwzI/Kt
o9qW1aQOHBkYEaXzH2oxthic9qM5RErvskKUhGRkcNNfqRGmWkeJgmAYgM71Qu3JGZlVkwoLeSYR
bacyom6OAFyNqiLUIksPLODFCV0a5jE4BT8sGMDj/cjHzNJ9knDvQGBjiQXBHctcB8Ptoi4JhHwx
GClOdsmIyARnct9gAZCRi2rYg/DqwQDh9iJjUqU7nHKNQ9AjfnB5xGRZ+1R1WzMYW7cQrV7+p2dQ
NNM40iO/Fedy9/yYeIShIMFO9C7HmhFouQwlpQhzHLhgaiJb6ENE4W5EMIYet2RgbQqPEAK96IiJ
a9ruyA5fmiSPYnQLTdhG5vhT61sOw9DyLbkThHIdLfI9FiLy9SjbuWCA9ZYj1hRtRGAr2+macRIb
CHXFb07NJI+hPy9+VvNyHP0ELxC+MmEQCN+BTX+X8thV5D6NqAnISJxjKJLfQtcIiuEoFf8Ab8zO
22ESHCcxhfAxcgHuwX/cWJ29pAJA9bKlwRfAT4frTxIkNoL9TGieVqEjtMQnNkA7Ykx+pPbMobA+
ofStPLc5JvdlQfauIQ5gHtkR6mT83yzQ2xBifpWmVqcY+9Q/QtM7gmDlKJ/ItXwwDhOMgPtQly9+
UCMCGY9ulk8bgu7iRh3t9aPn8sWGJi/94Wm4dBOUg49cXWmOiYyiD9VU0JygHcaWcf4mdERvmROE
pEk+rBHVGN0jABgCO2ikLlry9IcgkuW92lVojGZkzkaTTtVbkQdhIdOKg5jpPXoU0gJA5Gqraj3B
vqTCJgMQxP2unt35RozN/lITW7+v9I1bvdVti5HexP8AylPdsMxYxGoHtqFp0zBzDD8qcTYfnBqr
hnE9hHUp83P0nq16o5i4Gtw8O8hStRkRC3QAIa8gyoCVUfIuf/8A6fzn+1Jcz/qz/aP9ia/2EhOB
1aXcDClFG4RpMw5it6veYTZndpGUovHBRtwui5KBOpqYqMtoqpw2hx2hRrqIDEjcqLW5cOz5lAuQ
DsNHTxHFtK026yOZyULFk8Qc3JkUA2lPG/PzBmwZ+xHlr0o24lhrI8Y2l00CNMhUx27Vcvh5zuVe
Zc9qFu1gc8u1CIxxlLMlMDRbk5oN61c1eEAatUluyLq5ds8yJXIg6I6SJGWVJAKN2/dnOJmJXK0I
eqF0VjGOoN2KfMzrO4TVFwNOxE8vM2jsxC+JzEiMwAyEIDtKMRQkUU+V5gNCeBy3FSsXB5lmS8y1
elCBL6Dj2OoQd4s4I/OKMxwi8dLnIAOVDmBHWJe3iQgbhexcGOICacYkSwlGiJjxWsTE7NylB62z
wncUycdTb0bU7NuTEstMiBLIbeptQvRzxQdi68xxERNXoylEziDiC4QtXbkSDQHUuGcT2EIm5chF
syQmEzcIyiH+lfCttvknMm3BcVQth6KKoqmueHNl5lmQlDfiF5gAlHPTVlWnykRGaFqPhh9fRqlR
W7gLVYoyt4SAJJpVNcwOHTKG0Mp270BKVs0cLTEimDZKMb9uc54R0AEy7lHzeVucty8fu9YIfeaJ
+kyiHIXGKrRINvWqFE14P+cMVSY7JBeOPqXifsTWw2/NR1VD1KMJxeGQ+1ExBY4glCVs8B+haLVG
xO0rTKs5Z9qMbuO3ajEVD0VVsTbVtUCg3SU21S/9ZqMZyAkck4zWicmfFEWpaznQgD1pumR20C3K
ioSOxVmUHeuBKEDJpN3K7buUk1DgoyjWQDHtQjepKWDIHzC5LghCdqWoZnamkGOxMCyxdNGL704D
nOSjMgvsCYBiOkozIqfqXlxxl9S0gkQBZkASzhQjAPO4aIGRaQH0ox0AvmyjcuweOxyNXqUo2iRq
OG7tUI2j5lsZOw71Mczy44fCIhh61HzYShM4gBwO9eXbvRMmfSCpCLFwzhDUCwoylC2QJRGD1U/K
iGnWUnq2bIwkJRmawl+VCHMx4I4TFQpzgMTTsQvXs/DFa7uoD83AqNq1HCgGwIQicfEmjgELUQTt
UoMeIVI3ZIRiGGxOnOCIppQJHCMAUDIYKOnHMuwXmiYIkKxUo62JwjF81IHieoB3qU7VrXOFCNxV
63zNlhE+HSOHvRJt0GIDupGAMSM0ZSLnaVRCFu4RI4PUfSjK4HJxIKMoGu7FAy44jEKFu0NGnxk4
rTIFveC1QlrH1JmTssE6cUKf5AAKuhOQ45V+SVqnnBzg4ofWEDae2Rk5kPrTWeZFyPukV7tTom9y
xnHKQx79LrTde2f1h9C4RbkT/hl9hWq1cuWjlpK+HejdiMrhJdAXuWMhmbYNPrQjPVaJo8wAO+qa
F2EjsEh1mmBIbCHQGnTpwMaFaoDiBqJ4H1MjOLSBPhiKx7HNUdF29bgSwyH10Um5mEm9m7J08rML
sPzK/USv+45FtpkA/c4QY3YH3Ig0+tPyvPAV/eSMSO4/kTny78B7Vvic9xCEb3L3Lczup65MqXRD
dPhWsabhw1Av9SjciNE44GLD1ugYSIYuYkvE92S+DeJDuYkDDdIuiY2xcAzJ4vVFCd2BlqD/AAwT
p/SfBRlIG3GXhlJm9bpoTBLt3+kxXHCM+0ApjaA/ReP1FaoymDtofrTWuYMc3qD9BQAuC5HOoJ/5
gviWBPsr+ySgLtqUdWzL9ZkQ8g2Lj8iGm4K4PT61iPX16dPZ0P8AJKdfYq9G9N07/Qxswz8R2BQ5
W2QCAzDsUxAGU5SIAGKHNf1IsGcW/wAqNjlYCMI0oqqvRT0/P/8A9P5z/akuZ/1Z/tH5xp/ZrtwU
5AsQKOoaS+pwXyOIUYzkIXAGMTu2LUKgqMI1uagaYhXSISYCsmLGm1GJxiV2q/YOEZOB2oGWGaIA
8VAdi8sxd/oWl8cBsWiJ36ir9mZad2MTAnPS7/WnREJxnKAEWiQTqdWzdugCIYgVkT2KItmUxLxN
Qgd6hGzZuczKYeU4hsMRAGpZTPK/0+dyxHB6S/xM4HYjKHLRiC4eQZuwanQtz5kQhPxESGodpjEf
Qh5/OynbDkxrKPdGZI9aIlzNwUZ4tXtd1KYvTu8xdnEWxJgKF5UG5GWxfh7pe7y48qT46fZPqUrR
8VuRBCp1KlNd8WTYprN0acozRs8zARujAxzdNKOmdp++JUuR514AObc+1eVO6OY5YuBbzbc683kz
5tidfKliEYnXY2xIMor4sjchpY0MadpUrtq4I2ogRI3BYvsOSELl2MTsJCBtzE4nYQfq6KLVGsh4
YjftUfML3G4m29DyLAbULsCSYsYAZr8ReBld9mD0AKBkNJzHSylDaKdq0Qjq5iNCDgFqvXCRlEUi
O7ppOQ7CVxSJ7S/WERmuIufoTGD7wjpcbkADQlCNu7putWOIR1R8u5lIYFNMd/yiV+WJpFEnEoIx
lMROQOa8oUJ8IP2KMbsJRnEVcepEzkSDt6kjbf4gOChGJkZE1CldmBKcGEXyUrV2IMJAgurloYQk
QOwHpxVeiuIWnow6KqMoB4NktNwbhLFOJHQdmC8q1Anec0Lko6Z7E5JIGQXDFgaO6c4noaIeWxPc
7gmwCLk6RVlqtnuTHEY9GxAd6G8pzEyBFCKoCb+ZpzVvmr9uN3mbw1GUwJaXyDqXN2bcbVyDGRiG
EgaYBOtMalcRWk1CeB7k0gzLHuUdWDqNDpBCFwVWk+E4og8RPhjtXmXrbN4S5T2ocEAzrS2qBxCc
nSTkU4uADtVZOfX9Sa3FzvoFF2J2NRGWrQ5dhgEZRnrq0j0t7McUSaAKV2f+ELztPHiz0XmSDwan
ahMDiAWkhkT7csBuTkuSgCKn6kAAwC4gCgbluJIwoykbZlbnKoINB3KI5TmCQ/ECdP1JrsddqArI
ChHaVO5dgdJLtHJRFq5ICR8BOSjIyDAORmjDlY8MPECWCNq6GmK0wqhJ9dsFjbkaIwNsxkcGLget
NPmQLhDsQx7KprZ1PiXdaY+I0VayOJVFv6MarXcwyiq4LyrVLY8U0LdsEwDNLLeo2oSB0jiIwDKP
lOxYSmcO5C1GzFmqfaPernkXCCWIerg5FCF+YMR7IAAUpMBEYq5ERHhNcgpR2FnVVSm9RjZlqntd
x9KBbRd94Ly+ctvsnELVaIkNmaLBlIXpadmxaoSplIKh1DamuQEh2MUTCh90ogoxmAY708JPu9OL
1wcEa/KKdDThGQO0On0mByMSfqLoHleZMWyk9fs+hVjG8MgGA9dERe5eVpg5cj6HZ0DKUCZZSFe+
iEoAiJqDA07k9jmZR/Tc/ay4oxvbBFgG7XCa/ZlabFyD6tqGm9FzkafWnBBBz6rZLTKEW3Biibcp
25H2hIo6LkZ08MgSPpKB5iwLmkFyGjIdjFAzsmwHYXAXr+cieV5qcbpwIOkfQEZC+LjV0zJkD+tR
SE+WjpGGgRFfW6JEJ27poSS4G/hDhAG7KQBaRPF6tS03bQOwjhJH0haLgMJM7gicfXFcF6PeW+tO
Kj6EfhxrjQLSInQ76XLA7lI25NqwJDyj2F0fiynDTw4A6t6jGcIaTjKLybtQjcszEidIkzQPYZMj
CYMJxDmJD02gxcMqTDnAOx9RVPSbk8rcT2hAGJERgASwdPakQ4ZpDUPsRFrmpH82QYdxcpwBMDET
kC53MyIuWhGQwiBIv2FAXbU4SNKhh6yyMTIwlsl/cmFyJ71QuN3yfGvVfqb/AE4iBU0Cldn95IOS
tVomUjNgApc7zQEr8uJz7KvCBaEKBE7fkfP/AP8AT+c/2pLmf9W5+0f+BdqECxjISl66KNmMqkvK
I2NRGFw+AufsU5sDcgwG51pPepThKdq5EE6oEh22qAiBN3EjLH6EXp5gdOSyjc1ARuRqcnCfzYvs
BdAyMpHGIA/KmhZlI40NEJW7IrQAvn6kbUbcYFqgwBbvLrzJ3TbJPiFA+5kOTuc5ej59PvJSB+lN
K7LX7WnBRgIylLtx9SF27y8JXMWNRHuKMrFmFuUqExiAT6k4p2dFem3E3DblakTEs+O5R5aM5XIG
IlqIA7cF5fKRjKV2JEoXJaBLTWhANUb0+Qui3MNcFoxuHUNgBRj5crUhjG8RbP8AzJ5gwifaoY+u
LoSiRIHAiqp0apGmxGMS0cickL0PvbVJNmhrrKPDJGzdtxlCVbZkMFK3O3ovDAgkgjvQjGkcmTzA
PaEYxjHTmGWrmRpj7MYmpPYoW+VjK3bgGk+J9XQLlqRhKJcELzDw3LZ03Rv2ogHUMXFUPLk25Gci
QR4iVqEjKOTIwvRGmukoDlzVsCtZhrhLEBOaKdxqg6YvmVKV4Z8JNHHRShVwnM+jaAJOwJjbkY5u
CmMW7ELduJrmVsEQjMGuSEL0ddvB81qtNctnGGYRNvEYwOKYio+SiIzUbUPDDHtTJyanAIGJYjBl
a5gFr1shpJ7ksIuT9iM5ihLAHFYMCnHRC5nEoXYxjEyHiAZa7REgfFHapW+Q5OciQ0rmIHZgpQuw
lbn7UZBi/WkETEDvVYv2FVBCcF+gQgalaIF5dwR8yREY1xxTmPeKFaoEkFCERiHQfHNkJReRzbJc
QMIpoCu3pbIrTLDIqM44SoeknYGVuO0oRcHdmtRQtcyDptjhnGtNhC8nloyFonilKhk2QC0xxP1J
hjtWqWPQypiE0h2IUxQiS7DNGzcgw9aBtECJzQF4AyHtIRgeFkY6xp90VTdLrVIUyVE0sNyJtvpk
Kg7eimOS3nHtQtQqTigAS4LaVET8RAdWxGMgJlzNiAy1eWSQA7YKUrgIuM7GgCJAJ2IRkGarKmPQ
6eWWHQbsgSI4gLyo2pW7csTtREPAMRtVoypE1IAoArnlPG3Gg7VO5YkNRGGJPYjK/Gcrp8RNSoi/
YuSujxOWHqUBZAtzGDBg29ao3IXxH2MC2xBvM5Z6O7xWqxdhzERtbV3uv+55eduPvAOF94InZKhT
iQOxMMclqnjkiTgp3xEi1AY7Vc1+ECjUCnKMtI24LzbkjG0acOMlG9y54IbcQjKVgmQo4IAJUr93
xSyGAAwCN2VAfCp24EStyxcYry4hhLEBSnYPmRNdOa03ImJ2EJojFCRGOxAQLNmo6pHSRkV5lmbd
68rmgH95cBBfAFarYePurTc4JZiWC1WwCnidEx6lUMc1QIicNW/YjKFRmPSxgA5J+hRtRyx+UU63
EBIbw6cx0yw1RoWQFicoVcxMjpPqRMJwuA+wdUiOwlHz7E5QA8UIEesEqIuA2xLwmbN9ZRD25E46
JMT+qnsXJ2jsqR9YXBdjejskAD/6701/lTvlA0+1VMoH84fkdPCcZDcQesXAL4utBidLuwJDHcvh
XjAuCJMSaPQ1UtQFwg0FBEjtxBUvMtGAh4nkH7WzC1EQlMhyTH63CE9Gn/TLA+pDyZGyRl4h9JQN
m9rMfZkNIRnC4zV0xmfqC4rcrkRjqiT9TLTfsEHPT+QogmUCMXD/AEh1wXoHtLfWqEHsKY4bCiDA
EHcgTGoDAglxnn2r4d6UBsAZ/wBVkfKuAxegkcu8fauKyJgDEYn1Er4tuUHqB/7snEiMqgpoziTs
cP1vs6X6mK0zAlE5Gq+6ge5EcUY46Ymn0oeVdlEjI4H1Mi18bhWh7wVWHmS7tLdyAu2tAlhLL1oi
WoEZgOECTpBwkaBPrie8Khf5a3U3dSqbo/E3RSPgBV2NqssAyPOc0AbmNclOFosJOAjF/EXPyTn/
AP8Ap/Of7UlzP+rc/aP/AALuExMCGoc2zCiZ3Y62wMg7ZYq5Kd0O4BI4hEdoRn5pFrCUwNQPcp3I
wmRbIA1NEy1e6Kox5flRpuEwtzeUtW0xDB1ouwYQJNRp9aGmYY0YZepNcuERGRLhebM6oggNuKEp
kknDYFpMX/OzQiIBoDhcIERjEjB8V8WPGQNVQA47lrmDIDCJI9ajzBkBCApEeyFpjj9SF++OOXhi
clgqKqxWK4iyoX7KrFcrzAGIlAnsYq0ZSMIiQ4o4h80Ia7d6PszmDGbdsSEJcxGMrUv3egSDdtVH
8VZhb10Ag8T/AMjIcz/TL1y1HxSjCZlGQzLSeqFy1z0tMg4E7cJfUIphesz/AEoSj9UiuK1YnvE5
R+uJRj+CjLfG6K/rRCnb5jkbgccIhOE/W8ormLc+SvgEggRAm3axQtjk78RE0JhxE9ihdjy98Thj
8P8AvQI5bmP/APH/AHryp2b4nsEQD+0iLNm4d8yIn6HWr7uApqNREblOEdRESwfElCU4EQOBQAAA
GxczeETpvtG2+bO5+lRiCYGe3Bu1QtzlqAzCnZjMkGpdaYTLY6UYzB1ClUIXQSDg1FwkN7oyGSYu
TLLNHVCoPA9e9RjGXAPFIHMoyuziIQ4QXqUZWpagKFDmYReB8Teh02Lcrh/NBKEr9o24nbU+oLyY
25GeBlm6hzHnxEZgS0EGj7UJXRbJPuuCpamYUXl2JcOZFUbcI6x2VQJgYvtXBqG1sENXBc2hfEi4
yuRWocUdo+SSvyFTSKJOJTkOcgtc/Efo6BaBYyGK12bnmg0nE1C83mS5NREYBahYMrbNqiag9iIw
D06JDc6FrURCBYMibhJtuACVGFsARACs8ywF0S0vmQgel0wRW9YKqodO8Jph4g1JpReZGIAAYAUQ
vRfWS7qUoS1SzRfLYtRFdi1yjplh2hPEttDp3Vanq1QbJMqInaVCEcWVCfMBwQEfGWB7Soi9aF69
MA3JzrU7Bko3eWGi1MkGGIB3InNYOmw6aGq0StmUTgRkVSD7HQEayljLYiWGs4lkYSLhF/C6k+Qd
/sWNFToZihEhu1CP0dQrWcBgjI4BG5MhzgChPRHWfazRvXwJwtx1CJ2rRpBj7rUV2zCIlZDODUAS
qpEwBtwAhBs+1eXoeMixlgz7FIO4AcHOu1HJs0CuzoAGGZRFuIJzBrRS84ESJMQDQIy5mRgIF9O1
YUwXhYbGQlIlo4RyRJAjmSAvh2wW9siqPMaNYFZAEBu5TFu4XPhQlGQm5rAVLd6idBhqzi/2LypD
VAZTDlu1EXbGiQxI/uX/AGs5O9A+C1xkLhO3JNdtEEZhDU+h/ChbiIygRWIXlWY+VbBpEDxKE70O
CIDWxj6kOX5g+W3hOSly/LHWZtqnkBimEtX1JhipWLngkCBJsHTG4ADgRmmhUnGSxWi7AS3kLVZm
D+YVKVQMw9EStMg0hgU4cJrsXj72xarMvNtbNi0y4j7kse5GohLYaL4V1o5DUnnJ9+K1HhhnKVF5
VjiPtTWNU4Xuz+gppBvRABC9MccsPkuPpyZWoEnbEIiMpWwS+mLMDudPZ5mU4tQTkQ31oC7agR78
dR/ZQ86E7czQDSWl2IgiJkMYzDSHrTwGk7YF/rdE8vzJr7EgG7sWR12hdgM9QEvoTX4G3R3DyA7a
ICE6nAEGL9j9auK1TgCTQkOHcNkvgyNulMZAdlXQELgul8ZNh2H8q1X7LF2aLnvzCJGoafESMPUu
C5GW4ELVpDnNqrXO0DLv+pEw1W9wqPpT2OYL7JU+pNCcpN+cD6nKkb2kiOImAJfQyY2RLaIu7LSY
TEtjD8qpdETsnQrguRl2SCY1CaVuJ7lIxBiZAiTF8f0nQlG5IxzgSR9MSF8OYMCcMSB/iVbPmgDx
Ch9QdCV6Okn2YvIjtoE8dWn3iGiO0phciSfzgnx39duuy+7BfI4epCcB5ZGyoPcUTbvTiT3RfsQE
ZjUK6jIkHuQ1xiYnEwBkiblmbYiURRu9a5AiJLPSnaMUOMVyXDISbZ1jbm5IxIwRGvR+lRCUJCQ2
g9D+ndNKQdMHTO3bRUr1IwJ0xfiRswIaIRldFJmgKFi3SUlEDBgU3yTn/wD+n85/tSXM/wCrc/aP
S3/AEnzYUx4hRapczARLgTLiJbFpYFC9KUvKJMYz00kRiwxRvQsylbBZnGvtMdiuXbViOmFdJctH
bKVB3K7ctiMIwrDgbVHdqdyrt6RvWYkCVpwxO5gBijcvatU2aJkZM3aaIxM4iUwHrQNtVuM7hj5h
4wBSmxSF2MpwgeKIOkkAoeVyw06tWgh46hmRgV5kbUIzwBEQ47FenZt6mixAFSVA3ImEpSJY0IYq
inbNSQonNmPdRMVCBqNQfvKJxUiaRAJJwopweRBPATsTCsz6yd6Fy8Hfwg571UkwP/Kq12FP9Co4
6DcvS0QHeT2I+Rbt2IZTu8UzvEVXm7h/QiIj7F8HmDIiui7EF++qib0Iwly59nA6qPmgBkrUObi5
MI8cciQn/pvOnR/05Ex+qiFznZm7MUERJ/pKlG7LF2i+3JDXg509j9JjeuwtFnGsgfWp3bl4XSDw
RhxKcrdq5cvTxdhH6yUZ2OThIbTGU/pDKdyUvLhnEEQAG4CqAu3iLBrq1E/QUPNuXbpzDiI+gfav
LFmMrsxw24vcuE9lVGUofhOWFYiLazs1NgoXZy8wPq04mRQN+MrUjjGJYUz0rXMm41RGWCiLQiIQ
DEMzDcpids24xwc4goytnS2T0RlcGmNa71Q1RJwyQkGfCqaOCa9HskhEMzMFMMZ68AMlKV2XGHIB
LU7EIwZn4gKk71Ozask5cYRLMDXqiNm1KZODBCXNyFiGcRWSHwRdkPaucS02oiA2RDJivxlmAlE+
IM6PNwuNDDTjXYjYL6gHMjkFIWtUZH2iXqrlu6QbkDlRwiNOmYz2pqPsK2bVtC0y444NijPlzpln
bKc2zCfZQ/IhEYlRsxwjj2oALzLo1E+GPQKcIxQtWzxZkZBAy7jmUCmBYbulip27sRKMnIBC8uLR
A9kUWmURdjEcJwPYjd5nl7luyzQeEhEetDd0hs+jf00TFVKdnRAIAbDMo+WTGQxiUxAHYGR04jAF
HWga4Vy9CXToKA7E4gNQzo6rTeojnJ+TciACWJEmzDKELD+TAvqNNR67HP1JwEJCQkI1MQhcjIAZ
oyNdyBwY4LQfC7oeWx94L4hAGQVBVYsFj3rW7g59QRGJQAQtRxOIWkanfhZQgY67pAoFC9YiNUfH
AnEbEYQ5Y+dsJo6uTlDXPma6vdKMrpeU6DoEZmk6OVKUI6oGTtkQU5Gk7FjTo8q2NV2WAXmTIBav
arItAmIk5fNcSZuhzQBGEC1sYlNGgRhEkjOILDvVBRWp2neZYtkhE45rVKETIZkIxtweZxTCRjLc
gY3KHFAanIzxKEjnsQBBHfirctcZkGkZ0HeVo5gC3HMxl5jdoiFK7y09E3cCfC4UYWpAxapBxK0w
FfqQJGu5tyTSjCAGYDFCbORgUGNNgQLVOCNahEjxFStTL6sTsRPLnXHMZppAgpoTIGxPORl2p4lt
y4vh3cpDBCN4a4ZTGPrWuzpvxGIkOIepNcsCExm5IWnWP0Qnge0FVBiduSYNIbE+khaLwplLYtQ4
oHAhUr6DzLUHMcC4Fe9Rt3eWiSB44uYn9Vabtu5buDxR0uyBhejXKR0n1STguNor8y6bgEonIovb
aR9oEv8AWhKzf8sxDBoiv6VV/wBUvhwiBH1oxuWNGmpBkH7tqBNxhL2SC/etUYRmNoJp6l8G9K1m
4Dn1ujpuC+2GpogjeF/3HLmIGJiX9VF4jE5iQIbvwTwnGQ/NIP1dXFccBJ8XClKA0GQYsxH/ADJ4
XjKHuEkfTX6kAbYlAnxYkfq/kWm7ExqwIr9FCgDIRJwEuH604w3IagD2h1qbRP34HSVqjKYuO7yL
gneibYtgM2lzjtcqJvQmZRoWdgBhxB1OAuSjMVFSSOwghaY3/MiMRJhLsGtHVajci9KgT9UfyIC5
ZLZiL6o9oIVDJ9jIGN2PeWP0rgmJbQCCjE2xpJcjInayJgNOrEMCPpUZRuzlEGsJSIDbAY4ImyYm
L4FzIdhKc2zei1SBpL9jlCd2GiJLMC57wiYEmIxlpLDtK8ca4VCcVHUr6DYqrHuT6Iv2BCTaJDON
ETrlqOYLIiEhUuJ11IaZiYGLeL6UPNgTA+0QxdNYBhtJxTyuSLb05Lk9HBMx7CtN8ahlIYptZj2h
CPmhzg9E4II2joboZblTDrRs2ImdyWQyG0qNmRe8Q5iN613DoBw2pxdIKeE/MH0rjEo+tDVX6FQs
d6EYcUjgAjCMjblHYWKN2/deFup1HFCbiop2KQd4xLBRMdnyXn//AOn85/tSXM/6tz9o/NDdav8A
Y1nrsXlzvQjMYxMg47UbZua5xGqQtjXpG8jBQtcufPnIE8JYRA94lQtctaJuTJBlcpCLbw7qMLHL
xGqegXySYlsdMaOjCwYRtGQgLrcT5sHUhZlejYpFwCSdsniE9vWLLAadRgZjMyEiKoR1xtW7YazC
TOMNWrSFovXhGApC2AZCG1iSMVKM7k5agxYRjTZGlFoFosQ0uI8Q2FCEeWtiMcAIhkBpDDANgoRB
4ZSr3B0JSBkqqFyJfRL61IywkS/YVbFusdIrv6L1y3JhAAPlQVRF+QlGEuEgN0HUGV6w/hk47JIG
WMog+teaC5hXTQClckBORNuRZmfSg5M/MFINitMYgSOwZ7AhdnEyiDX8gXlyAgcv7k1zihtWgVhK
sCqp1uRlImNm3SIGPcvN5gOcoflRhdtWoQOGoAOhe5S28iTUeEDcrsbFiUfLtho4kyjV6J1yUbBM
bsoaJHfDhKjHmJyjGTEa4sWOdQhM8/YESHfUBL1YoTv8xPmpD2LcT+1Jghb5PkzICgEpN/yxEl/2
/K+WJYHyyG/xXCyfmubNiIBJaen6LTBedzXMSk5ag4i2+TozHLG9aBb4kjX9Vlaly1iFskPQVrk+
KPLSuwjZdwRWVVGxZcxOMjntJQtQe9dFBZtDXLv2LVekOSsHG3bINw9ssB3IxtmMZ/8AUd595xUo
3YiYYvvXm6mOMYE1j2IGR1RjX85NpIkV4WGZKfGtWTCm4rTN9JyTRk52Itqi+Lsg14CTeElGJJLU
OgFz61bs2rEpjETOJdSa3G2RJicJAd6kL/PCNoh6FpdwKgbcrnNyAYkAgDdWqHl2I2oxk+qQLvsq
rn4i7KU5B+GgdGxf5eMoiLC6RUqXM8pIiADygzssMEZ80W0kNF6HtzQu2bMbZPuycHuNURSLBojq
mMg4OIR8gny5FyMkecu3IGFyOlo1ZG7d5m3GID+IOewYq5zVmUoW3+GMKDapEn/uLdSNoWmVJChW
iY1QKMrFyRGcXWJBGIKBMTG5tCH4iLxOBRny0qe6UTQkYxzVH1DGLKob00r8hX2UZHEo3ZDDBOfU
gZAgZDMpgPiEUjs7UZGpOJQbGJqgerG9AtIDJPqlrJwQleDyhEEA7SjbuREoyoQVfswpCMjp7Oo/
Q56zoscVgmNetXpxVOhhmm2IyjiBRQBkZCZYhW+XsB7t6WmKEOY5i5O6amUWEQdwIX4eZ1BgYy2j
rsgMggYROv2icFh3KJaiYD1riDfSsPUqDpKMTh0koyOJw7E5wRuXCxkaISABcOvNPYhKNN6JzQgC
1QzYq07kf3dAvWpapwYmGdERqlEkNGJNHRjdgWwFGRkeGIxdGPLRc5yOAXm3ZuI1wxJUo6tOmpI2
BXbekG5HwyNSybo1PReXDPFCAxzK0QqStV0ucWXBZ1xz0s4R0aowNdMwxB6PLtuTmdi2yOJT55BT
ukHW3BFTPNAR1UEZYoStuL0/AAVHmJ2rfOxOENIcd8ioz5nl5cnfdmoAQNoipTsXT5Z8QCN23JhX
QcT6lOMQTIEmhaq1XISlCNJR0vhtKkb1oQHs6TX6QnMjCGZmjG1MSGwFGQIJ2YozlLVI+oJ3qalE
Tuxg20h1qt+DKWaBD6XYlabkCS2Ua+tG5CB0b1UV6dJ4oZxK8zlp6J5wWjmo6Lnvj7VrgdUMpBaZ
E9qymDsWGk71wnUFxQC0s8DjEo3OWO8xzXGGIxp1oxAckqNuIq1T0DzbcZt7wB+tP5Wkn3SR9C/7
fmZWxkGb1mJC0xmL0dpIP7WkrRzPLNL3g4H2hNMTtyzBD/sunhdhIdoB9RTio+ZOIA9tUJSeMxhK
NCiLFycJGp4i0u1kTblCUW+7nKUvUh51kyHtRhCTj10RYXABiTGg7aoSkITEhSVHP2rVAytnJi4+
lDyuZMxmJU9T6kfNsC5EYSiatvZ0Y3gbUv1h6whonUhwDwkjvVOpRVr2o67Y7iR9SHk3ZWzEMP8A
0GQ0abshiZHHup9aM71lgCxjES1duxu9HSJACpJjQNtXDOMtgBB6GuQjMfnAFCLGMQXiImg7i6Js
X5CjAScfs/kTztwv1rLGnYNJX/cctK2BhctxIB7RJNGVy0ThKY4Se5CX4mL+yAXf6kRCczseR0js
4itFzSTGhcB+7B0ITsGYIBBETE/S6MpWjGILHiGr9VDTcqfZYuvh3ATswPqKk0Q8qSbMHayE4DQR
liPVJ09q9KNXMH0xrs04Im2RKnhnIyruNENUdRziImJHfgVNrUmth5OQCB2FDjqa4GieMgQMW+1b
vQP1tF2OoInl2nDIOAV8S2YjaRTqVVAndimjOQ7CqXJetfey9afzCmM37USLhG56Ksge0LihE9lE
x+HL87BPEiQOzpnf5k6bRtShq2HVGX2I3BFrUaW+wFbunjAk+1PEGB3IRB1QlWJU/wCpXwXAIhFX
OZuNbsly52KVu3MmIzC0SlqgAwBRmcSX6zgUTken5/8A/p/Of7UlzP8Aqz/aPQ39sWBqvLN6Am7a
XGKMJTcxDy0jUIjeQowsS8+Ug/CQ0R+cShCxbJm7TnOluHeFctWOXjKdsPK650gEO7f3oW+VlExn
HV5lyIBj+qrP4ed6Mi/nXCDIduH1LRK3KMtZkeYkdNyUe+Tq9bNwWbFwhxNp3JNtIUj534e1KIiY
WiSZb5GSiZzlpjHSIwAg4/ONVE+XIiDiETIsHxwQbl4cPhBDgdjoCMYgCgYD0NkvwnUO9NKifFsF
KAFMYupCWwFW7dutMd5qSiBIgnNXYX7fnWtRE5O0jVWhyETGEjxAvj39J5q01qw2mVyftEe6BsQn
KQnGDCRjSne6e593mAQSdygOUhptxlxGWJOTKJuF5CIHYwaiBmWfLYEABoAozUXDFj7wWkl4/SuH
AGj5JyExoVJthUI+6DI9qEiajFXL9yImSdMIyqAE1i7oj/07gePrV6ceXs3JaCxhPTqpkGKL0Oxa
wCbFks+QMqqz/T+bfnDI1vTGk23yicabU8T5sRlGf5EI3OTjGY9qb3B/zIfh4W4xy0AD6lQogmhB
B71LkL4q5lbltC8yF+Nk6tREqAlEDnIwjgZRcD6mQjGcuf5iRpVoA75FAczehagcbNgkP+lM19S8
uyIW9ugVPaTiuLibaUGEQdqkAHBoHWs0k7UwQZzI0RtsxFa4shblejGUg4gSNTKUxfE5RLaYhyex
RjZtTuxlTUzV2B1O1a5YCT8BPs7mzQvXZCwI0q31VVz8RzNyVvGIf2tpdPeuEiH7wnh71ct3LsJw
uVHlgzFNrOja5SyQ3h1Yt2Bf9xdNu3IPIQodwXm8zCd6YLx1PpPcjZ5e1GOovpgMZFaL0REyqYgh
3QvcpLXElpRNCyFwcdo+KJxHertyPhlbJY9iicagttQOgW7wFRHh+pNYvCcf+ner/wAwWu5ykoXB
7drjB7kLXMHTvkGKErVyMwdhB6HNBvTRkCRsRF+UYwPvED61ctcnLzbVyhkATCD5urkjMXrkzSe5
GcqDahzFq5pMctvcvxfL8NyH3lvMJiK5rYmuBpe8FqjxwyITS4oZxKBty0SzBKeQ8ueUhQFPdhXK
5FAz4ojAhPH0giM1GxDwwx7VGEcSWURJhT3gyedyL7I8RWmxHT+cfEnJcnNMpWZ+GY+lGBwVQnie
k0qKqF8246wKyIzCFyziKEZEI2+T5UyuNWYqAn5i3K3cNTqDOuxN0VTj0O7qY9GxbFisUVisVGIL
ueiQlg1XXm24gSyVjmDFxakJHsXnQvQ0EVJIDIeTLVbtDSJDAlYrH0VFh1iRgen80Y9AtxxkfoQ8
uJlEjJW7ONw0KHhjE1MifqQs3CaCmxEoB8KntUZw8UQtMwxCeQJBpwlihdi8SKvJRsxi9yFfNyC0
SaQzZXJ2YyhGQ0gkvXaiLd7zdQfUdquR9uYYBPdAjflQB8uguhGHgjjvUr8svCqYp8ZHE9DgrGq0
QqSt+ZW05BClcaqEI/eTAJGx1HURclcoI7O1ATMpXWrIH6lLlqXBiImmO9eVzNowmPutDSj3yxQt
WTKFyZ2lu9GwIjmIxAqIl27U/MQlYun3hRfDuiUDjpaqMJWokHGgRjF4RyrmtELzTOFEdMhqAwBd
0TO2ZDAgjBHSIxMsSMgvM1He4/Iox5qfwcpZd6AsTjIbAQnIdStEPE47kbcbY/SzRhanrbEbExHQ
4xWm+NQ97MLXYl5lo4xxR0fCu+6cCVxAjYULfNWx+mFqsyLHMFwqXD3h00uKj0XC4K8u3S6dooe1
HXEwL4AU7lSv19IX4k1kcBs67J7lqMjtIr61qgDbIyBceqS/7TmjGOcTT6vyJtI5mO2mH0Faea5f
y2xAJMu0BUMmzJiQB2oNci8sBqFfmXVO1EnM4P2sjOy1skMYkao9o2KMLN03Ig1DDUBu1KUuYsyl
bjhMACXeHURcBtavDIkGPrGC0G7bm9NOoF00reGBBNOyqazdNsgvrZ5dhqzIvIXSPCAIgSH0ELRd
5cwLP4gXbZtT+Yx90guFqgRKO0F+sTO3GROJIRuQixIbSQJAbwJZoGHMGUPaB4T3YhcVqM4EsJgu
QPztP5E10kS3AmPrICDFnwEuF32OqV7FXA5KtqJ7kTa4AQ2kjUB2OUI2bxnEYxLAtuJR12zctAYn
SJfRigb9pogsDqeQ7qFMTB5DHGXrKjxCcY+GcQBL9aKlqGonC5qkJDtGBQAuSEX+9gHl6gUCZyla
fx0B7waoC7GM4GokC9N5gtUrXA+QkC3eE411wp/enEgJCjSofpQk8TcGBBD/AEIyYwmS5kM+0FPa
ueVIBuEBi20JzMyuP7RGhm9a0ShCDB3Lse8ICVueolhRgewkppHyyKtPhK4JgnYD1d3WIkHicQcE
xgIH3o0R8u4DDJ6FVuxfsVJxKJtxEwMdJH2p9DbiQCuK1LuqquD01r1XtzMewr4zmQzCbVpOw0Uo
QInKQYjYhPYUJxwPVtnUNduWGbKHKWI6IxAB3o2RJrZoycnrxG0oRMQUYmIYoyt8UVWJWHpOf/8A
6fzn+1Jcz/q3P2j/AGre5IQH5xZeZO5EQOBd37FIyvCBiHIlwltwkyjcuRnG1ItGbY9wqpTtWx5c
cPMOmU/0Qtdu3+H5eNJXJDUX2AIxgIwtRqblYynHZHJ0Y8v5tvl50lKQMpDsOKHlRlY00FzwGY2m
ro2pzHlGtZudW0tFyg3Mgj246S0u1pIAzm4LkjSK94KMTbMgS8nkeI7SzJo8vbbN4gk9pK024RgN
gAHyHRhIVid6IkGILEJn4ZfQgbZbYQjGUjMigdW7U5ajAM6M42zM7Ap2+Xuw82R1SiTXFRsXG1wB
JYuHPTYFrwiIHfmrhlhpP1IcuJny/bg7gUWqdGqBkFrnn4Ibd5Tk1KDn19GKfpaeG1RuZOYlcBDG
uGKkJQPlyqQ2HYnjOJ7aH6VEFjGA7Q65kRDAzcd4dX+UhMRhcAkQQJYUo6JNkSJ9ojT9S1SszttU
TtydvWmJ/GWBi9LkR2rzeUuMR4hmP0ooQ5iOiR8MvZKYUJVidwiNswIhI0GoAvVRhyUZc5ciS4iW
tDtngn568LQxFmFI/wB6ha8sR8scOjh1bcla0+GMdJfFwnppOBZPemIA0cyb615d25EyFSQS+1TF
kTlMDhhpxVu5Y5bUHaRlUE7AyItxt2oNQMDHD3tqM7vNPOR8OokIG7KU/LoQS7lT0WwTLEGrDYFC
UBG2I4AhgKoSu8xAyFDGB1EHsCAs253t5dn7FGEdPLiNeEM+6roXubvyuCbmYBYOMEbNiIlcJfUR
qbvXmHim1ZYKIYOPaCt8vCNAwLVJT8uYgirDFEynoIxJxKEQdTZpiHByV8RDAQLBWgavIP60L/KU
kMYheXzIMZimr8qYSEuxabsBMbSAtVqAk2Q4T9C0zjdgRiYzkPrK0y5q9Eb50QNu7eubWkR+RNy/
JCUsjcMrh+lCEoxs29haI9QUoTj5kyOGQw7lpvQMdYpq2bkI26k5KN22dEogAge1uKjztggg0lEZ
HetWH5VVMajYtdvhnsXECGwkMELfNDVHB1qtEXbRxgUZ8nLTP2rRWm7E25BORqG2OPqXDjsNCmkG
PoZX59kUZHElCZi7+FDXBolCQw6Wj3lOaDbmiBxTA4TJG3etiMh3LVYkRIeyc0x8Qx6JR2hlK1bL
NLFeXMmQFXVsWx4g5IzKlcmOO3IaDmtnVxWPUxWKz6dirIKsli6oCqCmxUYfSsW7AqzKqX70ygd/
RcuRxQtXTqEsNytcny/ju+0cAMyhbuTmbjeMEY9inytyb6WMZbYnArIqo+lO5CoQtq2J36D0burT
qb0wLk0CbPM9HmXDQUj3I3BhkUJk1iXdR0zEJRDGJoaKFu1LWYPqIWuWGACc5oxI1W5eKK12CJA1
04SBQJjJgahlGJnoiA5B/ItFukHqRiVbAiIwn7XtI6yKGgGaIlHTHIoRdaGE5jDNPKgGK0xPi2Lb
LIKUIB78MQMcXQAD3XYg5IEHt6CHwWiFXW/MlAM8jgFxjXL2j7qlzggLsgHj2oyiNVyWRwiFG9OL
ycEzxDoahKM/dZ1O/duCxIgC3GVQQNqNuzIgHxEsYnsTQLzPinsQiSS2JKa5CM/0gCVKVi5O1I4A
Fo/Qhq0XYDY8pN9CEBblbJ8RmGZcMhO5jQpn1SOJREhwhG5Zt+ZbkXEQaepf9xpsj3BUoCxxW8+L
7CmtwnAjGWmnrQkL2qWJE5t9C1SDx2Qi6lbhbEJGhkSxRiKzGIRBAJ2EoTiQCclWJ7VUJ4EjcuMe
Xc94YLRzEfNtHA4+orXy0n/MPiC4SQ2MTghG6fLub8ChIYGJYqmKAz1LTOOu1LIo3eVLtjDMJpcM
t6YihwOSiZnTE5rzeR5qM6eACne5UpczaldtgVkBEEHuWm78GTODIgx9aPlXIzbFi/oastVy2Cc2
JD9rIysy8l8YgAjufBDyOYuXYgUjKekhRF6FuUffAlM9+lAXuXuRmaOA0T+syL3BbkMYzoVqtzEx
tiQfmRzagXxeIRETKEZF9IYt2OF8G/O5bApEy0yfvDJ79qMx7+rD9LSjC60CKiQOqJQjG4CSaAuH
7HQlKA1jCQofWjOBlbmS5ILglHRdEoNSJ2/4gUI3LGqJZ5gEAerUtEhKJNBR39VUGkK5Gh9R6tFp
mNQORqEBKGAaJBNOyq0WbvlkF9eni7CXCl5kjdkPABpET25haLlnQQHLyx7KJhIg4MxdPEgjDocw
iTtIBK1AG3LbGn0I/h704E1Idon1LVblGYasZyd+x0fNsgafZ4nPYcF7QkfZ01REJA5GJoe8FMIB
tjlvUtQDfmsCPpQlbA1DEGgPqUqtLGIBBL9pC0xncMnrGTj1SwUh5gIiH0yPF9LOjptayD4WLsh5
sDAywGf0sjblUjKUX76OtMtDENVgULbkwBeLM47JCqlLzJEEUMiQYnuxQhb5iFwZs2r1IgwFyIqJ
hw+5qqPmWzET8JHEOxeJu0ME4lEjKo61OpSi2rFlxwjLtC8GjfFcJ4NpTdYwixcYHBeYIBmwiiDj
s6ItknYttyTTkAdi0W4mR2pgNMVpck4koxgHapOxaHrgm6/mSFMkw6a1XFEJ4lk8ariiQsCB6Dn/
AP8Ap/Of7UlzP+rP9o/2AZN8xPcmIDbIsjd82JtxxkCGQjK7ofCUgYxPeVPyx5sYeKQkA/6L4qML
MJAyrKVyJYd0cVIw5eMnOmNwEt+rin5ebyEXlC7EVOyOlfFs3RclJ/OGrhG6IXmyPmU0x85gW/Ry
QNy4LemojrJD7aBPzXMeZJ31aXl2VKM5znJw2kNENuYKJlaMxDwxlIkDuQ/7e3TANT1IDSGGAb5V
+IjgWEjsODp5YHE5KIdoglmVHJJ+lQN+Om41QpGRZgV5j6LlyTxIpJR80mU5RqTjgmyAcnsTxjXt
KnPlbmnF4kOCRuKe9e1NiIjSO9HmIQGouxO3ajOZaAyzkUbpOOEdg6GkVihVbkxRFqTSOBGSkL1y
RMfFqLiSlKcWhclIjsGCNi4NUJUtzwZS5cylCQAYkkCTh0PMgCfeZGVi3bIArKTsF5tyIgbkQwiG
FKK3GBaUiwqyjG7xU4s0eZtHRMYg4SUJi3ouTdjHaCyHMcrPy+ZGYoJdqlY55rV6AeUZkRBAziSh
D+lvdh/1Loa0Owlie5Af1WZ5gYwt2zohE/o01d6ly8527crFNIaPD+iFK4JmcosNDESL+6JMrMuW
szlOcsZRIjHTlqwPcpixy3wpkS0yBIgSKkyCvHmOYFuzKhYAvui2AUY3+YnKQNCDQDcCoE2hIw94
u++W1SnGxEHNhioCMNNsFhEUZUHmW5FmO9AxuCDDVKOr7FE2YynPGQZmG9CNm0LMZ+GZcEDaUf8A
+Ic1KAhSDMRXM6U4h5xiGBnUEnNkZxgLMTgIhvqXlAVqz7lK2IESjVhVagGlEtgvKux0E4F3BUjo
nctx2YIPbMDuLeuiaA0zdwTn3p4yeEsVAxLSXEKbVfYmMhAjChVnUWiJRcnAVX4e3deeAORO4p74
EdksD618XnRZ2BtX7JRhy12V22ParFz3oR5ex/iYyXxpCMdhIH0BPzF0ybKFPpKa1aYZE1J9a1Rl
piOxAO7YqUJdxR5PnaN9xdOR2I25UnE0IzVmHParVqZAdiKHMIQsx4JYPUyO1EwwzC1R7lVPmcVp
uRBG9G5YLj3U4JiRkcEDc+HcGJFHQletC/ZlhMYhPy0yB7poQuOAuD/mVCYH3Zhwn8sHfA/YmNDv
6wiMSo2Y4RFe1RiMCQuXtwaIjiUdRHlgNB8yowzboEYoACpxOfQ4xXxIgnaKFCUZyi21ahicenTc
DwniE1tog5ZrRHTOAwjJ6d61XbExYGGiMtPa6I2FuihfqYrFeJVkH3KhJPYsFSI71SirIqslXox6
WCpEnuXgZZBVkAgTJ2PRdjPAo3I1lk+Stc0Q4g4l+iQxXmnmLcYb5AH1Yqd+1A+TECEDtAzVQR0Y
raqEhYuqh1WixVG6Meo/U3rWe7oLYmgUDbDy2KWrxkvp2KUpzEIyGLPivMty8wR8T0KfSBHYyOkM
dilA4xOB6Hi77kJWySxzQmJiNy2GuRK8q2dUm8WSEMZJ54DALTGWgjB6B0Y3JAQj61ITLyFHWm29
dq96WxDW51GvYoczYkYzkWMDsUZW4wGocUiM960kufoVFohUlbScSmFZHAIxhISvGkpe6E5q+ahb
5S4DekCZwcEN+VSskaOaHjjPxHsVw3iBEhh25LTaoPeUTda6IFwLlQiYxjbgS50hgOxCMMs+h8Uy
qTp2BarkBJttV5lqRtnIBlKVxrg3kfQyAuPAyNaEoaSDE4VQlHPFOcAmiAYEVfArz7cayrKH5Cms
S05mEyQT9i0XgYDaR9qpAE+8Cx+hCdwmMszLBRNm6CM4iSHlcQPiBqjKdnzDtAZlruDQNj1WppGJ
zC8iUoyGT0KN3k5GldOa8rnIESFNYFR2rzLUtUNsULc5kgUDoEFWpe8xRF0OR4ZZqdsz8k25MdpG
1RA52Eb5wiSCT3BHl3F6EsCMkITsyIJocvWhHCcqy6DCduLSxoHWuxduW7oDCQP1sjOEo3o5wlIy
Pc7J+csSjD34RLDcdS0wkYyZwJjS43J4kS7C/otNyInHYaox8mMTlKIYoyheuRmc3H2BHyb0DFvD
ImT+sLTdsyuj2mhpbsOa+Jalbi7OSHB3xWnzQ+2retAggg4H0VfkNfQGMgCDiEAdWiJeMNVA+xSN
u4ZAjhMpyEonuURMQuZG4BIn/Eyl+IiYacJxBMShESMZS8OoGL9ia5ESzDh0TpMJHGUSX+lD8PfM
GAAixALZmpXFGN0v9FdjFHzrUrZGPqfNtiYTAOyVDVPEgjdXqshKQ4o4SDg+sI6HhIlyXJBO8ErV
ZkIkRbR7JO2pohKUTckKThpYjeCKFGRtmMYliXBI7RimFyJ70GqFRAzi5GBwI7wiREiZ9oEk/Sgb
N0QkK6qgntTSEZBqSjHU535puZsygdoGPrTObZy1hh9C4ZgjcQqhzkhqFRgcCO8IX4yJjgQS5dPE
kEZha7vxITHsyYg45p3HlmnvEDezI27czoOLEt9KEoEiQwIWqUjOOYkXCjK5FoyzFapyARLMA/Yg
YSDR9mh+uqkYXCH8MXYBDTLVL2tRp3Ii7oi1alnXmzcQJYEVTi4w30TwnGQOYI9CYziCM3qgeVgA
+LlaLjPur1TM4CiLYqzPltQ5uQHnDLU3F9KE5RErbtQ1WnlbzAh9JKNiJjpJck1Ndi13ZkyOKeMa
jNbELMY6rgOIQ1t+Iv1fYCn64fwjNCIyTdRukah8GJ4j9iFqVmJADCilcrAnABHy564qo6vP/wD9
P5z/AGpLmf8AVuftHr1/spxzjE7yAnuXoB8ACCT2AIiV8RMcQQQULghKVslgQRqP+F3Qhbtm0GfV
eia9gijct2ICrCYlU9kMUBaMxEivmQMq7tIUibM5ykeGdYsNwJordy5dkTAUE5Akd2CkOavgSkXL
Ey+igQN2+TEUEQH/AGnVRM1fFkHtajg8iSW9a+FYhDeIhcIA7B8xStzDxlQhSgawBrvBwUCC8CSw
Vu5PwCQJdahUbU8pyictJWnmBHmIF9BMWZkbgti3K2WkAiXGmQKB1girNuVyb6i8iIDNxSqkbnCJ
ychGvBEcR+wJzSApGOQ6cFQkLF2XEG7VisVqu24zlkSAUI3H0YNFgtESNwNChcEgDDCWbL4V4MMH
kR9aEOYviNr3YVfuDLlbgiRCD23ljh/coSdiJCqPL81W0RwXH8J7UL9/mYi3LCM56QfWoW5SiNJY
iAMgN7hS5b+n2JTuPwXTuzEVdnetm/zMW0zkWjDdGOCM78mNvC2OGTbeFWJnnpSlAATiaSi2UV50
5TuEYQnUPtL1Klb5BpAR1Ttnij2F1atW7WkQHDagKA9yuG+WuTqYPkvJF0EHxEAkP3BfDkJx24fQ
pSlciBCsq4KJtk3ScRGjDvZGzy/KNGYeErgIb845I3L5MQ+Ao2/hUeb470maQmXeW0qOi2IRlUkj
Ed6loAEy7uoW4tqDCZ3I27R8OL5EpxMgfQoyAB1U1NVAmbGVFKQAkI4hcRNMFIWbh0kO2S+OIkQI
cnNE2AB2YBS8wiUI960waMo1AGxVo/qU+Ts23Jjx3Dh3BOhyxPlcyS8L0jw9iFu/fBhGgeRIbsAQ
jdhK/E4eWQPrRuXrBtxiH0yLkoRtRIAxDYJoPqGRDKhxUgMY06jXH80/dRj4pHYhL+pw+PANajJm
bfvQ5fyI3RbkIxMhgVCXMWY3JsDxDUA+wFS5mxaFuUhpkIinayMTW2Vqj0P0NIMdqeAeG1StjitH
xQl9iJuSNi+MKYrhu6ojACpQhcGNBqqEDMi3qwMS49S9m9HaKFHyyYH3SuKGqO0JqxKeJdMVK8cB
QI3rnDbJrIqM7dxzCpcFa9JmQKPgtUmLYCJw7kLcpVwco24xlKTtVk8283PcnyGCp1sVG6MYlNAs
ArJvYGrHNkzU2KflARE4iZA2qpVAVQKgWLdy8RVT1K9FASnjAkLwFVIC4pnuCqSVxCnag0IiOdHT
wAZM1dnTRMhuVyZDuWAUoSGmQDtuUOTt0J4py92IzTCUxcA8b/Yrli74oFn2jIrBNKAdGUniNgKM
rUyGPhOKykqwNMwuIH1dGKcFUkVi/anI+lbF4lQ9vTQrSMBigBgOji8MKlEwZ4jDNG2Q4zUuXuBr
IwnsO9St2ZeZKYamA6CO9CQwmPqVVaI8Vw4q3GGIDl80ZCWkXpOaOxdSjIPDBydq1246oFqoTtz8
oAYZoSjMC6IkuT7W9a/BcFDKO5CEoQlM+0zH6FK1fu6C1BgrlkSEhE0IQmfEfCPtUYk0jgOg7Vpj
UoMMfEUIRrM4BStQL3TSc9m4IAB5HAIahvIFWUb9iUo3oFyFEAab49uJ0yBGwoS52cr9vKTvIdoT
2SJ9n2rVKkBiUIxoOnDqbVqz2KNucNWZyb1IS5aZsyHbL7UDKP4i2MwAFpuxNn9Ij7ERakJA0oVH
IgViRj61IX4M3hOfcyJOnmLTMLc/F3IkxNmZOEX+paojzobwx+laJSHL3MtXD9LLXGcZD9LUD3KQ
5ixrhmwYj1owuwEHzIC02pnSPdLMpRt3AxLgyDovKU4jw6SwQt8zyxI/6grL1ha7BMoZ/wB4TXPg
3dvslam1QylGoQliY4OtMIxiWZ8UZSkTKWICi8TGO0qEjbMhEeLNaSGbLrGMhqicQahGPlRiT7Qx
HYjPl7+iQDACIr2uUTdtSvQA4n0xr+ayEb8fIJwJLjv2JrV2M5bAfSazbiZ+8wdGfFDV4hE0KErE
+EBtFwkgdhUIwueZEeIxi8x68UZ37BlF+GZaP6wWm81ss4IOqP0IRtzqcHBD9jps/R1+RnqGMg8T
iCuKBIGHFJh2VUvJuAPWMpOJR7CChEyjet4axF5d4oiOasSjslEEP3SQBJt6vDrGkHvTCcZbnBT+
WIy96IY/QntXZ2y70b7GKJhPzPdEiGPrH2oRuWYiRzBIHrqEYSJjMFtJBNdzJ4SEtwx9XW1EB9pF
UZweJlSQjge0KMrc5GINbZkQG3acETbAlHZImUvsWq4HDO8Imn6QKfTKIJYSkGD7HVJRJ3EdDSqD
iCq247iKH1hHQZQfsLL4d7XEZEsf+Z1x2zdt7Q2r6FOPlMBV3Dht3QYACcCXYrSOCGwLeqUTYg5K
DR8smta1bYovpgYj2BQ9rqRMJC57MokGJO90I3D5ROFwVHejES80tScaxdCV9qUDYJiTpGWSoViQ
mlMmHbghchc8yRODhgGzWjTEBnMqsnlE0o4FE5BiDgZUTag4yfplKZFA7PVERaMMGz9aBOWZx6jD
NB/FKpUtbsRRkZahatO5nJf9xzdycvzQw+oow5Tnzbu4AXQ1e1oom0fxFvKVs6voRjdg1wZGipbA
HejDS0j7QQ/qPOBwKwgfrUoRPBbpEdcR2oMKnop1hbgOAeOWwIWrYYALXLAI6ThiEXNCsFUMqFUq
ywXP0/8Aj+b/ANqS5n/Vn+0f7JVp2pzIAbyE0+Ytg9q++17oB0TCFyTbgE0OWEnweX9yLWvKpw6B
rrvdERLS92MCJHvZA6b1mGEtMq/80kTK4DGX/Uk8u9AXLsbchXXAkn7EZS5iUzLEmIJ9ZT6JGXva
iD9C4rQukU1XOM/SnhZhEjMRAVA3zUZHAB0bsqAUAyQiKCIcPtKCHl6pCJqAaIzuHQBi6jyZ1eY/
DIx4SSNq5i9zFwT11kTSICMeWOoPSO0JrsJQP0Kbk6TWgqmEJmRDbAEIxjptxwiPtXhJ7lUEbaLH
6FSQTgunTLBticElVeiZ0K0RgbjQlRiafSgJ3g52V+pcXmGEXIlGLiRCnYt8vptRkJ+ZqqG96lEY
zmIsrdzmubEYEahGdAAc5SULfIzuTEKS5i/902egEOozvPfuQwcMH7EZ27UI3Z01iI1etSBYRlB9
WDHaoTu3oQlJ/GWBRnanHU9ZWy30LQL1zSd//wDciIkQMvFcmXkUZyMI3LnDbMzipQvXJXpvWNsO
JE5alKHJcmLUhjMxMjEb3Chcu8xG1dygXgR2svMm90ZvVztK8idmIJLuQ7nvVDgmLEnF1phERGwb
VIYmCMJUi+JDrXGQls7FO9OcQCKxJq+5fCjQZGrr4REYk6q/YnvjCgDVJ3KMYDS+AfFTt3IvKIcb
UdDi4fzmULs2MbjsXc+pA3B8OVNVosO8KV+FzgGRXmTuR1mgjmEYO8Tg6uTuxHmQi8TmowwEiA6h
IzMrcWJEcXG9RgSzigTxLHajaOkxmeKRFW7VOXLwjERIYtQsozvXo0GoREQMV9S4vBcxW0ZHo8mx
HzeYPsjCO+RXn35ebzMsZ5R3RCMo0uxHCQi12MJCQkRPEsclGN2YhOIAOqmCMLB15uK9yM71kxtS
NAQXUY3QwkHDrVFYrFx0NKo2I3IkwObI6b3lt74YetGfNc/b3QtjVIox5eMm9+TOnEiexMJuPdK0
34UGJNU9mel8sQnuWwfz4p7M3/NNChCQ4iWURzRMeypfsTwukW4YRIb6l5Vq6BEeyxi6aVYkNtZG
OEhWO9AyAEhngUDcnIhMMTiU3oJAoXJeKNCBmo+WQDbLxZcVuOtvG5b1I35S1TkaojqUBKpAlOYi
I2lCRkGOYVZriJKwXhHqR0gUWlYdvQy39DYOi5oiczmtioariHRIZGqmBQg0KNyZ1SIYNgyjzM/u
5RMLm4HNeZLmYaSMNVfVirt6AbWeEbhQIyEgWyVRXamTinRRMQgJAOe5ONQ7E0blN6LASbYuKBHc
quFl0ULKksFVisGQOcqnoJ2KdwFiS9VO9cbBogHFTv3Z+Xy8aUHETsUp8pOWsVMZ11fR0ttQ0kGU
Ms2Wpi2CE5wkbBNZe6dqs3ZTYEMJAKwG83UXBCbno6DHwyict6iLcibPhGpC6A0ZxD9oWqEiCgZM
OwMniSDtFERF5XblAFOF+2fNBrFsNidmkcX6N60x7ytr5po1kcAmIrV5K5dhbIE5HALSA3MPXUPA
EBamTIeM4uUYShEmObMUfLPlzAcFG3I+aImrVKEo3TY5gezKkSd6EOahoGAuQrbP5F585jy8iDj2
IQ5aFQamdfqTWz5IbCOPrXmC9PU+Ooq1fn4pCvQzqrvsCcDiyTkvtWCOomuCPmQiXo5AdH8NKcZ7
XYD1KMLp8+3HB6kd7JuZBgRgGJdSuwLW3o+XctEoebPbn3L4dwacoSLy9aA5kOT7FyLHuKE+TuGL
+xKsUY3bIjkZxcj1LSJRuSlWTYokGUbm1yj5N8Ee4z/TJfGsagPbcD6k03jLMEEoeRMEnFmda4kD
3gShO1J4HZxRX/ccpblLOUeEpxy0n2PT61/29sWhkBUr8TzkTCB8NWJXlanjHwk7FKQDROHojrtw
lqxJiEDG5cttWIgQG7KJ7NzzADjcmSCNhitPMWQA7GMdRl2jatIkdjmJAfY/pWNQdqMPLiIy8QAZ
1XWYjCOqg7F8G4ACXFwvrCAlM3IAcMrcAXP5ygL/AC5FyVNRIET+RS8yXlTjiDV+xk1qYlLZgfUf
mJpVBxBwXFZieyn1KQtzMBLaNRHYXCAhfN23nQa/+b8qJ5ixKUBhMBi28B1HzIG2J4ScGPeU3mwf
9IIawJDEH8iMmMZ+8DVRNm80RjEvUd7p/LF2L5Crf4fyKIvRlEkPSoD+pPGbZDU8frTkhtr0Tggj
aK9NEQagrToGl305PtZS08Ik9GBZ/dUWvSuWsxQSH5U0rQlB2E3q2WpnQje4SQ4I4o+tDTLHBwQ/
Y6fLouSlEFonEVRyVOivREzjw5E4KjMmNB9CoaJluRnbZxXTREXIvE5p4YbOkNI6cwCQPoTym8DH
VFjqDHeiblpwDR4kg96BvWWajZt2FGQiY6wxJAkPtQa/pepBp9SM+XlU0HFqKnq4jMNXrRjAOXyQ
gS8wKjNWI8raNvmIiJmH4RQuC7vVaJFyatsC3IXR4LjSBG/JWTYuGFwxBLHctPP2IcxH3gNM/Wm5
W9KxcOFu7UetRu+SL8Il3t8QQ5c2pWZxGnS1FO5tJPXiXq+CHZ0cRquCBI2riiQqHohYt+KZZ1GE
RxZnMlV8IRrgixon6m5VwXNzGH4LmQe+2VzP+rP9o/KWdEbKfOPHcjHtIC4+YgOwv9SI8wybZEr4
Vqc95aIXwrAB/OJP1Ixt27cS2w/aUJWo3gMywY9lEDKUpuKi68YjsXxpAg4cZYIeZzQAGQi7fSh5
lyUpDAgABPolMnHVKh7gnHLW33habduMBsAAVKfORA9os/0oMayAftVZAnIhAHHJQ0459rqcZkAE
Zrk+XsXA4czujIDiXlytC4MzOpK5fmuXaFmJMrkOzBW+X5S3EAAm5dNG2BThzYeMQCCCiTZjKOA1
VPamhy9sf4QmFqA/whajY1PQaYglVsRD9y+7Md4KPl3JRXBc1BcUYyHay+JHSBniE8XmfdZPG2Q+
ZFPoQ+GIg+0KpjccbqL4uqcdhJRkznOJqF8OIi+xXbWGqJwTHFWbnOXDfkIjRbNLcRlw5q5oAMh4
Qm5iZjbnmYmQDblos3DzPMTDRlGOmEAVcNw0hBow95G9zUmhBjIAsIvko3uXvm/anE8EGkQd9QhC
1y5Al4JSeMjvUuX5nmPKgRqJxfcGqo+fzE7xPhIcRA2ASUZwiBKHhk1VS5IHchqa7E5EB1DRBoEN
XEFSMaTjUbwoxvAmW0BC5ak4KY+tG5AVJ4mTzi8DIOMHUo2ToIAIA7ETeYSj7cSx7whbtS12n8Ww
IzsnTcjlke5eYRx2RWIGL5q3K4DG5q1AHHcvOidM8wMVLTAuMXxCjO2QTmDX6EY6RbMKsKOVG1Jz
GtHzRhGAbHehDVouCgE1fAwFtnyUDhUVQlG40W8KF6dIQwG1kWwW5EPioXDXRwSG7aniXjJaTjkV
ov8A3YzOQUhybwsikr5FT+gPtRtcsDCeJlKspHaSvLu1ydUXn2OG7GtM1GFt4Xnaaucxelrnq0h8
gFKFyIkGzUYyaMh4SvLllgVqj3hMqdBOIWkyeWwUARJq/RwlkSalYU2J7nwyMKlarc/UiLmintOx
QnGTv4dSlb5pwBR2Jr3K/wCQ5B4h+igI0kpa6mOB2qMbsoxADscfpQnARaVAU0TrOZwHd1W6eKTJ
rYc7U8j3BTli4cDehCQbUaMhfnETuSwJDsFcvaBG9aGqMwGNMk70Tkkrw+tUiPUmAHqRiDXOi8sg
GKoWHQ+SFGfJMc0RDHEIE8JzBUiQ0GoVIvRqISubabWTg6lggSMMF9aINBkmPRWpwTjEqcjsQjbJ
gJVcKfmEy0FtRUrPKR8BYlC5zVkxgwAmGMX7k+a7cujUe4BOHB39H5pxC1MwOAT7EdoWiVCz9FVU
D1LSLYG/NPEkKkvWsAW2KsSOiJOzo05yUeWMmOA3lGGCly05CGo6ok0BOxSlcnEybhiCCZFGRo5J
9fRizITBYjNaRMykaRBoAha5i9K41dFdIUQYuIlzEnavL4o6fAcYgoG7AXbewFnRnajoDuBsQs3y
IXY4E4FHTWJwIqFX6AvM8qWkYyZ2Cj+Hu6ZRqDgXXn3CLl7MyHcjI4kuW3pgtES637U3tHBSuyIn
M55BG5ZBMpnhGQdGELUjI+O4A4AQixhdj7ftOtHMjhyvRFP8QQnbkCZl1KBgDGQrRz3ITlOQ/NwK
44aZe9GhWiN+ExLC3OqnKXCCXEI0g+4I6xLzMmw71Xl/MtS9qVPUV5c4GGo8Lq1aOMR9fRSi0k02
oNUPVcEmKAkdUk+aAcmWMnTAsMB0R860DDKWK1WHtRPf6gtEB5kRhJhVabkDCRPFKRGKuQuaLsBg
AtfK3fKfGB4oqVvmDpl78BqijzELwsyHtR27wo3bkhe5fK4G+nNAyeBORw9a4UxHqR0xbeqyJ3Fa
rZYZjEepf93y1fftkxPqUZW78o6sISMXULcI67sg8ddULQ5ca4jEFoq3rkPLMmMIinepa8GofTGU
4Ek4sSAe1itdi5pGWoyePYhGlyIFJRgZatxUY37FyF01MRH6arzIXBpwqWIPYnFd/pdM4iUdhDhG
1K2BH80ALx3NQpGTsQnhciJAuLmqTtsIRjPIcOiBlGSjC9YnG6cvCD3lESl5c44xmQE1u5GR2A/K
z1mTm1AnfEKRtny9WIYGPchHl70pRGMSwLfmkqWq1K7aAzMdX0LVct6YuziQJHbHFCUTqB2JtLg5
SqEAYARBcAYOiIcIeoWq1IgZjJNetl9sf705kYNkRX6F98B3FUvR9apdgf8AEFSQPeOkiQBBxBwQ
tSBNsFxFzTsU3ugRI4SZcUT9q0+bG8AKEBaX0xOIC7VXDoZ8EIxqTRSgZHTdHHChB9a1iXDHG3KR
AI3FEWp8JH3cpa2KFyURct+1bOBCJ5Wz5IOLl6oAXNIGQTmZfanuSMonHamwJyKJCoti0jit5wP2
I2gTaMvFEnhUbsQCA1GeJG9GUIC2SPDEARdAtKN+JoRLhI/91LzRpjHw71gsWTApwHG1bexNEEnc
pXr3L+aJAhiNvapX7EfK1EnSN5dCVdYAftzWqEjEnYmjcLK3Zuy1QBDIh3EaDogbY1SegRlabTKk
oSqO5S/GcrbuZkxDSX/a3zZuf9O7g/atUYebDKVs6k0gYnYerE5uh2I89cjwy+7G7avMvVkasuGI
AXFHvWxfCn61PmLuVIpskZHJGMTROTXqN0b1zr4nkuZr/wDplcz/AKtz9o+nf0WkAF6IROMhq6WR
vjxPH6SB8ybl8W9CHaVqPMQI3FysZke9pTWbUp75MAmt8sCdxMvqQ4BaicHgR9aJtmWnbFohGFyV
0E4ky4frWqd+MpbC/wBafmLuk7LdfrTm5cI2UTeQD2kr4NiEd7P9aB0RcZsPnjzL12FuApqlIAKd
jkr8Pw4A0ztgSNRtqjG9PXzFo8ZOJBwKIZySG3KMjUmmnchKMuInA4qBdtp+pHy5mLqcrkzJhQHD
1K3zhiIwiTQ0JBDLVKYh20Ur4L2oAnVt7FLm79vSJzItxNTpGZWu3wTjV40cbE2xVC4u1SM5whCG
B1ivcj8XURjpiT9iIhYnJtrD8qlcjy8YWgzSkJVfYSynM3NPmYRJFB3OpWzqDEiQMs0Phxl2guhZ
5nk5wlgbkDrP0sUI8lzghLHRcox/xMuK350fehj6lpuA25jGMwxRJkGXCCZYAMtFuJjLEkVU9c5i
LVpRADEqETlED6E5Y2x4gSxU7oPmAkNbABKjLlLDQPilIM52AhUNuxy9wVMdJ0gjfxOrlrmL0pRN
RGo1by6ELlvVLVXVU0UY2yQ7CIZm7tihdArGJhI5bla5e+AOEiBFDTamkjEYZVqo6/ZwQ0Wx5ci4
IGe9CUw0Ig6j2hRiam54cgE0ZOZeIDAKoBfNOak5IGcXAPCvPY6m4h9qjdsRkYGQYjAg5FEwGmZq
wzUbl8EirxGKJtT1AZHEDYhcJYe0OxXZTiwgzHah5Y1FuNRuCOkS4oiQoheEvIutURq57F+ImxJH
6J7gtcAWmHiSgZgG49GV0MzRZWo/nBEAsYlgybzXJwDp7xA00DVdOC6IeoDsjIsJYMcwjK0Ndo4x
zig5JuS8NoVkShd58NbFYcvE0G+RzT2ZiMR7JoGXmWwCQSNxZGVxpXiXpku3o88QEmrKK/7SAtwF
Z23zQ5Sw3mTOmhcrVzFyZmzBi0R3K7bMtRBe3L81G3PxBOMEAjSmRRHtHBGRPTQJiHmU2kwl7ye1
cgTsNEZXJQjbGZmFquh4g9xVu5b4dLMDmrkjEabgZk1qGtgxfBkbkrAjI7yypHSMgMEbk5SF/KOR
WgVA61UxlXYE0OEbcSnkXO/o4QT9SjC7hKhbevNBMiMHyXlXImdvENiFKzYhKNuXjkcW2UUhsPTT
o1AVWw5oRamR6TKRcn6FqjMxkPUqziWzTTLncmEjEJy8jvTdGazPRiqmiwRIFUE20srdnmNJkA7O
xcoW7ERGArRXpQbzTPj2s1FdjdA0aC79iboqqLDocgFOnwO0Ksgdi1yrLciYuJZBA3KSO1MU4ruW
0FEZZLDoNKoAYAdGh6IXDIC0JPvUOVlIwsR4pAZgIW/JgYs1QD9KlbhSEgJxGx8unFFnL5ISLgih
dSlqBEjRjVMSG3bkNds+dIY5dyph0tGcgNgJXiIG/FSFu4YwkGIkx+wqPmHVA4SH5ejGi0RxW/NC
MKzOAWqLzvSrIDYvw9w8cz4dijbiBKQrKRyVryQNJiC425q3IAeZIkPuC0yDxOIOaA5QkXXeUJHg
PZsXlzibF73ZnHsOaMpFgMypW+RacxQzP2KRm87kjWZr6lUGNs4a8GQnauATNDEihWBtvkz2z3Ky
eaAtW7LnVjGR+xaokSicCOjFmQiKk4IGRwTzLAbVptBh7yrdnGWROClG6dRlE6ZbVGYiZyzbJabd
kjfJRt3Q+boiI4gKbEJTpLNSldDjJqI6DOJOFaIzkROI9kyNfUiLlsQ3xDqQ8qJkfbHDIepaIS12
8hMD61WHB7pqO4rhJhP3TUJyHjtFVRVTqj9ijMny7goZD+9QuQ5o3DAU1FyE+mEgzEAF1xcuTCeJ
l9ajpvRMyHEYyY+pSgamJanyAebATbAnH1oStRFqQ2Bwe0J+Vv64kvK2wDfo6nRPN8vKVv2bgAB7
2otEwbMiHGsjSe9EW7kZkYiJB9PpuRExskHWg24xGRiAChLzbmuNIkMG+hHRN5O4mZnDeEI3NADe
IRlIFartqYm7BokA+taC9ueLSYLTG5EkZAh/T9vyLwxP5zB+p39ehKpckO8ql6f6xX30/WmldkR2
pySe3qYpqJgjK5EvEanbAbUZku2TsU1iGkbZVKJkXJ6wIoRgha5sVFBMfavNsyE7eLgjpomjMsMj
UJrw0y2gIwsahM+1guORl2l+pQkIs1c2TGm2QoV5crzxNAJJ5imIKeMQYnhrgiDtNOgyOLEDtRO3
okLo4pR4JbCpzBoSWTijIkZr4V2URsenqWnn+XhejnIBpL4PNS5S8fYuMz9/5VrtXrd+3iDE1TEM
QiJxeZwKiwxKD7Fylos0uEnsTZLjKaETJPO1KI2sVCIu+XEkaio2uW+JJgHC1SxzWoGhROfVp0An
Bc1H/wDacx/tlcz/AKs/2j6c+iEhiCo8xHGAqN2KEo0koiYcTwKiBhIOF2mA9ZHyvBPKQAG0p5Xo
AD84JjfEzshVODOW4R/KvhcuSPzix+hNy/LgfnF5J7eo/m6M+5Cdy0ZRIcQ8I7w6MTrsxkakzp6g
UbQ5qMokuYkYnecU/M3jKWyFB9KfXcI91wtP4eJyciq1WbEIS2tVVAPaE2HZ89OaDaUJXrgiC5GZ
LdijK3LzzIEgQozbXQ//AIXykpyiHuGYcDYzFRu8zCPL2oY2DKVrzNru6lajpIJDRhPzAHy1Lkzb
cyvQPmv74EcPWrcpUsz4LmxirshlFwUbml7pH/pkJMxegKAIciRYbk6mZYOB3OgYeFqNgyNu4KHM
YocobgjylnVK+CchgNlSvLtXoNEs0OKp/RdXeS5OExelwzuyDaQR7Pchbtm5PeJEn1KUrd2QgG0m
4TGR7HR87mp2ifZ1GfrwRAvgyOZi32lNCVu6Nkg/1po8pbMf/tgD6itPPWDb2gkH6CyI5cSu3SGh
ERYA7yV+MnHULjyJ1B/U68PaStWW1AuZD6u1CNrmZwtnKMj9SlzXM3ntz+7kayOn6gpcuJic44sM
ChdFjXbynEJp8MswaI6ZAk4NVCcYPMYyyQlEtGWWLDeib1wkdpCII16sSfsQt6Im2zaCKKNi0Bat
yLu2p5bKoXZyFqYHC5x3IDmbTXh7QJGr1Iys2WkKajJ010jcBgEY1hdtH4c9q8rmnBw8wLzIXNT4
EJjUrSQ42EOE0YDsy9QQuFvMjgMELXMh29pA2iJDLNEGhXDgcU4xVKJyXJzRtyzwKebeYKOMx0EH
Aq5G1B9BYUqyjG/ESnHAHFloNpgMJdiaJ1aaMaAIW5u0csQyhzNzwSwBV0jsIVrSHlqDIyI0k5FY
nVsRhHxDavLExEyNdq13LjNjiXQtxvRA2mi8nkDUHi5qVI9kRmpSnE3Lhqb4qfUtEL7HZIEL493h
ziKP60bPIiMYxo+H1r/uLeqPvBCUcD0N9Cnd5WkZgiUe1WualaJsxkTKZwZahINtdTmOIRDBquUN
MDbnixW/MJ8k5NUSS/SGCZtUpLSSxNQM1xOfoTxMot2FNcMSfzgfsXw/Lbdj9K1zLlaY47AtcZxk
JeKANWVu3YiYGNZvt67yLDeiIDUdqrJhsC3rfuVBpG0p5cR3pmonAwUYQDyapVsGmqQjJC3CIEQG
Zse1C7y4EPOiSQKcQUScWqmWgxZjTo4iwGKJB4cgerj0S5iLGMcQ9VxS0nYVisR61QgqnWJ+hE7F
C1HxH7Vo0SM3oaoRtn4oAB3r8XYveTdmOKE6iQ3hkeV5siDYxgGEh29aqyVGWqZWuOG9O1QvsTkd
DSVCzYKpxz6KJiKqUSGY0T5BG5IOZYIXbZeJx7UOcgAZwy2gp5C5G77ml69uClzMhpBYQjsiOqze
paBFnUoRzr2Kdq6T5kfDEqUfKLbME/ln1rjgRvxQ04lbZbSjHXoJzUuVukXAJPGeJVFpj3rfmtMQ
8zgEI+2WMlGA8UgPqWgy1CReUtm5C0bEdIzbi7XVzluXukW4mgIcVrmvN5iZnPafsC1YRGA29Bjc
GqOwo2zKY5R2iJflQt2gRqoASyfmGnMVERgO9fYgZs+TrRIAQ9brVaPlyOWS1W9Qjm1YntCA5mBj
L3o1HqWq1MSjtiVT1lGUi70iENZc7E0D2rbuQ4Q+IAxREYgyG0VQ1gxj9CECKnNGGgxA2nFbAuKS
OnFUcriNNiDcJGaaBEorTcjUd6+HEieRFELcSYyOAlgU9/lxMZytlpepExM7YHvBz9CMIAyOwBaZ
UIoQQzIxEi0qkZIWxWUqB9qY2xqGIlUFC/zfLQnyk5hokMQ/uqRsSFm4YsP/AGk4XmcpzTk+yaA/
TILXdsG9ADi8IPdo/IgL8Da1ZjiA7c0BC9FzgDwv2amVK+naQcHEGqMTZgHzEQ6Erd6UDDwERAPe
c07SvF6gmOghGN6wbYjSR1AkdyDX4cWAcIEVBwI+QabgEo7CmFqPaQ5+lMLmmLuGiHHeuG5O5AYE
EA94KAv2QRlcenfpdab7RzEonUEwmz0BIIHr9C/pH652qqr3J8k4wKlur6R+ozuqJrlWDgJogFg2
mWCETaLj3S4R4JAbAE3oI2ZYE0KiIWxC6MZQcgp7kC2RRlLxZA7FVq4DoqqeglZugy904qEuUuEw
lWUDjGWaEtoB6Z25UkA8D9iZCQyLqYiXBDjtOKlHYUNvUYTLbHKrVBRctvQ4NQGahCRlGzEuBsKP
MEyMcHyoqOQuGLS9a0TjCdvMELzHFm7mI0+hE8tcjfhnA4qUOYtStXG7keMG5EmmaJdU6odMAua/
lOY/2yuZ/wBWf7R6rykAN6bWF437EwmB2p4kSG5cAAO9fEhqG5MDplsKr6NnZdmaNqRocAjaIaJ8
PYgMWwVqjGJMfqUYSweJ9SkI4AqREuICgVcfkDkgbymjOJO4prt0A7BU/Qn84RbaGRFmJuNm7P2I
iPLzMhsqFGdiYjcJ4oygREDtK+Hb12xiYBn71E8vYu2AzEazJztLshK9dDnEGRLL4vMREM9IcppX
pGOZADqsJXD+cV9xGTe9VabdqEY7AAqADu+f3vXIwfBytFu4L02MtNsgsBtLsoRtWbkdYJhK5EtI
jICNSrYs8uIxnEk6CYkfpSkKK3HmuY0RiDr1CMqnZGOPeh+InK9pjpAcxFdtV8K0HZnlxFtlVphE
QGwBkYTiJRORDj1J/KJtCWqF4DVFhUOIuQrPNRI86xIeZB6jVwn6UC7OoXcXtAE740KBBcDMIxum
LCT1omNyIOJEa/UhpE7k4gagBpAfbqRtWOXe5KorqH0BHlgIkzb4t0NG0DsLsVC1D+o6ZnV590T1
mT4CAyQlfvHy4HXOZETOXbq1D6FH4gMBxMbknJ2kOrFvl4C1YjEiDBjKuJVy6ASbcWp+dRVBBT3b
kYNtIdECRuEe7h6ymsWQN8nP1ITu3ZWLM/DKLRB/VqozvE6ZeK5ImRiDmXU4WIS5iMjHQdJjORNe
HdvRt3Y+TbxlOZcDsZSmb0JwiS5wjTaSrc7vJXbXKyJM+ZhEnWMtGqgCP4LnQSf3d4aD68Crdu/D
TZMuO6CDHTnVQt2787dvPlwTONcy+CuG1GN6V2bAQDmRbJH+n2rPlRD6pzHDB94xRt3Iyuyi2og4
vVHmLbAvp0HcvJvw0tswTwIkFWqeJ7k0kGLdiIuPNxQk1itF6I1gcN2ND3hRuxBnaxEhj3qF64xt
yJYAYd6jGxbDFtUjsQjKAkZFwMwMl5lmRAl7KEbw09tQnBAO16IiWAwbNabd2Os+yTVNdD7JYLVy
8y2wH7Folbj5m00Wm/Fg/iCE4ESGRHUdMnU7luLTniegyB+JmAtcY8DNM70JwbS2S8m5c1RHs0cK
XLW7vx5RHAAT6zgFb0h9J1epRuAtrodxWqPE30owkDCYwKJEtRC0xBuy9q0HKN7mTEG2eHlgGHbL
atA06R4YgMyJGCJgNF3LevK5gEHASR0SaW0UTXj5sMNMtiHM2K2ZeOPulCJJJOwLVEuEYyDg4haw
55a4ajYhZ5ZjGhftVqV4CdycRKRIepChzcYCNyJYsGBRuW/CTULUc8kQMOmo0xzKEIDvVPEVbObs
n6lCVXr8Uu4JoBhtOKeZMu1YprUDPsC+KdH5qoK71gmHRIbqKTS0yidijGEuIF33p+YtmUojEEB1
5xABjSMNkVwlggR4hinOPRh6umoWDJnc7AuG3IhMfhjfQqVoCU4EvQ5rVKJ1fSExuHSKME51eta+
XuO2MTitNyhWpGhDLTpPb0gInaUbs8I4BStadMgHG9Wzew1RJiNgNVGdsvCQBBGxWYhjciDq3A4J
zQIgZIsXZcRTW4sNpR1XWXBzDHZJwEIXJCD4ENVStcxc1gBwMXPcjhG0TwviAvGPWnDHf0MQt6xW
HqWFU4oU8sdqoUYjxSoFAgiE8ao24yeZLkjeocxzY8+dziAnWIHYrvNcrajZv2YmfANIkBUggLbH
YqFP0N0VWJ7E5JB3LxSKq/rWkTMZHB6hG3IVGe3pNVpie3ofE5BESrd3qVyFsTuxwk1VIxliOKUv
ZUb1oCekvKYy7VqNkC41GlQlTlOJE5FyWwWqZ1H6AqlymHQI3oicRVijDyYiAyZGfKXaf9O5WPcc
VK3etHz44CJeJfehf5iQFvGEYl/q6AybF1WOmR9qK18vI0qDChWi/EXGpXhn+RSL6JgEiM6F/qRk
YlhinbFCIxXwzERkGJkHZGYJlPIBCzy8NH6PFI+tWxz8zF/DJwCO1DSdW93dND6EZEsq1TlcAMlQ
sRiI8R+heGUj+dwj6ExgDc2NT6UZam3RwC0wnrh7NeIeteTzdqWoUE8Kd6ErtyIzMZM6lPlSRHe3
0LTCsjtKF6/eEXHhbUoczb5iA8ziETbb6XQszlbu27g4dArTtCucnztsaLB0w1RYsja5ORtyGw8K
Z5QjtDzgvi29Y9+1X1haQYXAcYSAf1FMIGByMSadgKB5XmSCMAXD9tSF8S1G7EYyYGn+Av8AQmv2
pQLsdNfoLFDTeiCaNLhP0qlfTvOEZEZkAoyh8EkMdADFRHLX7krYxhqAPc6fmOW8yL0nqDt+dpWm
6fJnvLx9YXwbkZ9hf5FpMQY7GCL6tJrpEqIGzJ4imm4T9BR8uULsdlSR2OyBu2jO3nLQY6UTpmIj
EsKfSnhcjXaWP0pxXeK/IalhvTeZB/0gjIkadqaI1b1Q6exVLqZGLJyTIbCUROBrsRjMkA4UUoCd
JBnKIx3+hcdDHrE5t0EjErhDkhuxHoqEQEYyDnIqgVQ3QJwLSCc0kA0gNyFm2dd53O5aeZtucpwo
e9GcS49fUqqeg3xPWGmRCEtufWogoAlhmoSgdUDi6IhEGEslPlblsG68mcDM4qTDPoI+lC4JIXAX
OYKJlCJGYKlbuTNkHDYntSFyOTFNOJHaq9GKxwQjs6Oa/lOY/wBsrmf9Wf7R6hlsCeRJGQVQq0Kx
QMZEDchhL61QnS+dU1zgmMJBeTcOoEsCtUTjgqjp4qHanGHS+AWoB2QCIhjghGdDEqN6BaUKug5o
KFR04GqBGRB9SMiMSSjLAn0LmgWk3ovsdGcrkWGw1TCMzvZE23lP3SGXgt6e1aeVtGMs5RGr1JpT
uiWwxcLVcuMMonhWnQBDOYkyeVyQuHPFa714XI+7py71o8kDeKFPat12yL/WuKII3gJohhu/sEIm
URI4AkOpCc6QbWQCRF9rKZsgXI22cylo1PlAHFfCsTt64vYjGIuSkfz9gUdYDyixjIStQtnbmSVG
XMX5RIiY3JieuUhsjqFArZuR84WwRGMgAK+82KBtWowMQwIGA2DrVp2p79+ENxNfUpXbUieYk/lm
MJATkNrgArjgS2bL8LYjHyQ+lo6pFy+1GcrptgA8Akx+hf8AcT1yicY79r4quq5mYzNH7Ay8wWYC
bniIc/SrYpEGVWUhIUBZjsBLIxthhpcsNpUXk0ZAag9ACM007sXzEeL6lG5aeVu2GjtOeCuDlbfF
NnMxgyP4nnI2WxiD9kU1yd6/I+EwDP8ArOUOVtcqLl44u85R7l5E5Dl+V90gaz26UBdnO9MBtRNA
NkRVQhCxEiPhcOVpMzKfuQx7yjy9jVO5gbVnJ8PMuGgQ5j+o83ZELTSjy9o6hq3g+IprVzzZZ27l
sAAb2ZGR5G1EQLTvWRplI/m4/aomzzY8yL6rEn4K7FZHO29VqOM3PFLLUHQuaIiQFJMHA7UAKiWf
YpeZZiTLGXteteTbFIk17VCPLgaQOKRGex1clde1dhJhIYFfGGu3kQmdjsPQ4p2KtQqrVE12ZFAE
C27mRFQ/YvgXaZMNLomcBcBxf8oRa0wiwMcTXeozvkWwXAjMs4ZPZuaTuLprdzVHYVbuNxyLnNij
b5gaTmcQtRvRbYEw4w9C2C/FxvRjKVY22cknJlIA6sy1Qmuw9S4JB9nS4CbpkdTk5bFMzi8ScAKr
HTFmDlipTidVwFmiUbkgz4Dcib7ASgRFyM1KzbOkSLvjVStX70AXdzID61KFi1PmLowlbBEX3yNE
ZG5GwD+6tl5Htkox8qNuYpI5y3krzrUPLuZyBNULwkTE5gv61CcBQ+J9qJOIwIRjMNMYSzRszpOO
BOYXFIArhIuQnSUH2oCMBKMgSIog2fLjtd36DbmHBTXbhtQnhIB1CzeeQtxERMZgLy+XjqAq2aPn
3Igyr5YqwTxoO1MegPUphQIEIvko7vREk02po8R3I10g5BYplQaQcyhreR3rgi0QEx7UScBgq0CJ
aqxx6LhiPEKdqjDUZajUIfiH8q3iBmVKfLw8u9CPCQSxbajE4ih7kC7KpfqUTlGUzpguAMNuaeUz
FVnqB9meCPA0YeIP9KYmmwIAx7wtcC8ChKJqviRYyw7UCdZiMsk1uDbSUPMLk5IdiJRKfYHVzUdJ
J4Zb0b12YlLCICN66Ht2gH3k4I2+VuSsyA4WJZ1LzSTMEiRNS4TbMlouChwWqxcEQ1XWqU9W0oxt
hoA03rzb50xOSjaFskHE5stI44BzF8QVqETqNQwDhajHVAZyJJTXIUOcSaITtyM7MvoTnqN0EihX
EACc1qNAjzLUBaKuQvHVGIcSOS8qR1P6l+F5l4whS1cAcNsLK7yXIvOV6JhK6xAjE44p4nXbzOYT
xNVplQrHrsonagxelVinWmOGZTBEnJG7dLk+GKN6UtciToiN61Qty82WNMAp2LxFq4S4MqOrolOM
rk4mMIOCSSnBlqyZfFuGUj7L4JmYDDpdOSjY5c8ftSGSHL65ESwAp61qvATuSqXWqxKVie2Hh7wn
vQ86A9u34m/RTW5jUMYSpL1FVWgSGobENUw6LWtctuCJlI2/zSNSnOxzYeP7sg1TThXaERCJjPMn
EoQnLgGEUCYGNs+0UJcvIwvwrrK+JIzIwbAJpA6fWqg9rUTROonII3px0QG2pU7nMCo8AOZR1SIj
lEUCaJY7arQXJGRxWi9YeWRw9a1Wo27cZVfEri5pt0YhG1cvSunFjtRvDiiCxGYRkTRfBLHaA6Eb
s4ytmhb2QrGm5G5ERDSioecABbwJ3rECDbkSMCU2S1Qe1c96Bb6FquWxfiP3lt43O0prN8XG/dXu
GXYhHmIGxI5msfWtVqYmNxRFyImDiCAUeAwEqkRJAfsR/DcwRH2dRIMfUhCbXQBwyjDUJbjsUYXu
WnG6zyFAO0OhPzYxf2ZFpDuWq3ITicwX9NuRE7MC+ekAoSs3pWzHw0w74sVwSPMRGDkEN2HiWnmu
XNtq6hT1CSBF0RJwE+H608SCDmKj5FiiZ2oknEsyEo6oEYB9UfUVI8veDyqxeOn1OEOE3SPFgQ25
qrRcsaSA7l4/RJOZG23vD7U0LkZHZ/79FfRGZ7lpgJXpnCIpEIyly+kbjE/Q6zD4hDYvLIJ2kZIT
Z9WCmwIIxHpqFk2pcTEdi92SJPRgsKqJejsasgYmNyJ7TIDuZPdiZBneET9IKnKzaumMAdU9NBSr
lBquOjRMdhUpDE4KuKaXcgI1WjGee7ouGMdXmR0iuBRnMvKRc9DxLLioVT0Zicx12OMes6BQzUIy
MoAjxZJgI3YxwfNObflH3RRGLExOIVIq9a5uDi54Sz1UpWbcvLdwRgmnEiQyKI+hAnMoTtSIotPM
QFwbVwSNqZyOC12fiR/NxWm4DE7+hwq4rmv5TmP9srmf9Wf7R6khuRYVVVVVVChK3MDc68u82pvE
CpQIMoxNJBCQoQremenStF0PvVICY2YJrsTbPrT25iW7NB8F5lqu0LWA7YpjgjGRpsVyQDDUWWke
9VRuQNSKheQNlSpDJ0I+7QIQ9osEAhEyHZmncAJpXYvsxWrzYtjimt2zMbUxty15BnWEbccgRl3r
y7MYyJGIxT3fMP6JcfQtJuThE+9QIGE4TOb0T8xcA3Q/KVwznE7XdOYebLbOqD2IU3LTaiIDZEMn
/sK5oBiURPmLcSKkGQQmbdyUJAmMotVu9CVm1HWcbM4yEojIk4IRsRkeYlxa7Uh5cYnJpU9aNq7e
aE2lduXYgz17IshG9I+WwEoQkRrIzmoy0azANATaQiNwT24RgTmAB1dV67C2NspAINe1kvpEIyOp
sWopXbfLzNoHSJSMY6juFVM2OVAtxHDMRnc1nIR8KhPzJcvibkZaYR3CDCUvWpf/AMQ5kT1F9QMp
SA2R1MApSN24dQaQGmII2Ui6YcvEkRIjqJlp7HKlCUvCSG7FzB5a75V23ESAxEnoaFSjzVjXEODc
t4/qlSEZgE4CXCX70ddyMdlU2smTPpAx9bKHk2SZAOHOD7WQjGMIERaVzTXseToy80xBx0HST6mW
oSNyUjxanp2lCVuwWrqlKrncpwkfLlAsYxb60DeuzuXZ4wgNUgxo2xAcvyQiMfM5k5741Q/FXzIf
9O0PLg2ylSmsW42xmwqe09Hl3L8IzGMSahG3yVudyROnzMAexqoWtJjK47xrCMN8yA6PKi/5HKN/
3EoEPcO7YETzd2N8tphAA6Y7+B3KvT5eVu/5jEuPiADLiqn0C1cGE4hiPUjpOi9GsZxpqRldDX7Y
4xlIbUYSfQcgSFC5ZnKVqLvakduxarhkDsESVrtzEszEYjuRjKIIORCNu3ECIOHaoi1dFmIqQXcl
X7HPA6YNoltfMLzbdzVazdcVN4qE4IIyW9ULjemNFRYp5RD7cD9CfUYy2u/1rguA7sFwiTD3S6MZ
xJjslF/qTTsv3kISuQAtZjEoSsEAd3/1FaLJMh+kAP8Al/KhAxjqNS2P0oxuR4thoVrsS7Bn61ov
DWBkcU8C0vdPQ4xRldkIwj4iSKJpXHcPwAz+pTvWLJgxafmMPoFVKMJS0yDaYCjneoG8fLg1DIuV
DXcM5AvcODoRhb8uD1mHJ+lREIeY/incLn+5cPKa7eREp/YVG5+FjCRqRIaiP1lwADcKJgahOcU2
RxWkimDFaRECOwIsSxyRY1zUb0fFGhO5SvXeZjGcRWGbrVZlxD2ZMPpTx1fiIgiUCrUOVBmbofTk
jZNsRuRpLOvchG6KFaD4/ZO9HlCCJgsJjYrlmB1X7p+8lVhuRuXTrlKpJTnDILVhHNaYJjj0uKp8
8+pj1OKQCItjvKeZfd0NEGR3J5lhsC4Y12nFVThMzkogZoEx0iQeL5qqcBYJlC7iHYgoTEYxnOrg
VUoyk0J1EsnU58v8eek6YW+Ik9yn50TCZLmJDM/RXowcomIaW9G3OJMwaMhK7R8AvLHhhknI4YVK
b2ckdqlZMnMhVlK5OZjcgOEHAokVOLLTOLDAgrQY0d4yCphghOMixwrQqoYbXURo1QYN2oOGPQ20
qW0hkLYJhHEb0Ll0nVVnzAV03IGXL3aMA5iYnFEmWq41LbEEqU5UMiSe0qRg/aNq0kuRtC4sCjbm
W1UB7U5Or6kJA/CjQdqu35DHhiVbnauAyljEbSvLL+bIMGwJUrZrBuJ8FciMHp2FC1LE0A3LTdcD
IrhDwbFaYlz1HI4R0NMtbj4jtUrcYiVuNJRkFKxy0RaEsdKje5iAu3ZZkYB1PmeWgLU7YeQjQEdi
LHTJEEY4oytY7FpmGKrUJx0MOoFEgMGZOqphSOZTBOcFO/EERj4VMXSWJ4SVCVwPcJeICtm0ATIP
KW1Wb4DScxLZhkCRU4ApoiuZT4k9XyrXjnicwEdRIgKymUZeOZwJy6HKonvgasiKS9YUrXJ3DO2A
4FwjX3FCF4+VcOUqP34L3kJQlqjsAZNCDbTuRN7SZbv7lchCERCZxaqEICpzTXfiXNxYDsTa7mjB
nTWpSlHZLFGdyOmLZomIcDE5BTuRusRkBRRMgIRfxRWmYjdizCjetGN2IFzKIGHrWsaQMzI6voXF
OUiMgGC0eWANufrRjch51n3mecU9i5pgcDjHskMkfNtuwpOGBU7k3EpF0WzxG1Ng+SIvgtLAgLgj
G5cl7ILHvDo3uT5iVuMR90w0kDLZ9CN7mLHmwwM40Zuxabk/Jl7szTuOC1W5RmNsSD9XU+LAE5SF
JesIjl7nnW87V4uG3Li18jeOYkTA9qcxhzVr37Z4u9adXlzzhPhKpUHMdHxbcZthqDqMojypxwlA
Aetf9pfmCS8oGWmJ/VwUp6Y3rfuSkZyiv+6tEQOE4RLDcRJAazbMvDrGkHsK4ZCR3EH0zXIiY2SD
p4Q8qQwlAfYaInleant0eEd1WR8zl5XrY9otq/5UBfgbGrAkgjv2LTG7CR2CQPUbqv6NpgSGw1RE
rUQ+cRpP0IHzpgxDRwopGN2V4NwET0kHsK+NYEx7+LdulCN+IaQfVbLt2uqTO9waJ4SEhuL9NOpG
UcHqrkfbxB3InbRQPl6oSzfNAC2R3rzCDqxZeWQCB4e3Yp2dAiTQ71KBGBb5DpvVjtzXw5CuSoqL
UFDiBIGGdF2qUI8NufjiCRE9owWq2NAFO1UGoZEJzEhkLc40NNSfS4yaqeGIWmIAJoSQjKZeRqqC
nW0yw2pwsPQg7F216xiTiER1goxjUk4BDhLJ+XDTFTqLBWo34G3z8A1zEEyHrdG3KgLgI6U159Ns
A6dtUIRiBBmpkiLkRKJ8M45dqNy1hsCEig9WVB0PauEDYcFpvWxqykAnyQGzo5r+U5j/AGyuZ/1Z
/tHqMc0SMCsOlhJmzK8vVqAzCGmZB3FGxzA1vgSpFhES4ogIGRAiMk4ckIDSzZhNION6JtPCeRqh
8WWkKNvmDqtSIGohsVXwzH1p7VYtUBMaKYBxqjKUm1VLomV2OwAlTlO6AzgKRgJTJJPDVGQhIzGS
ibbW4QylmVK1ZsiU28cUZX43CTXhcpjOdm0cdRyXwJxn+nRa+aMS2MYpoarTe6VqjqnLbIprluMh
sIXwrcYfohvn50YyuaiMREOqa/Um1mPaE8JCQ3dfYviTETkMT6ghC3c8yZLMxiB2mTI+VYq+nVqE
h/hAqVONuZESdMSINJ9mNFJrM4RI0ka5CMvznJC+PeEbQDQBaUh+qwRldBuEtwjhjTcEZQtDWaGR
4jTeVQNuHSdVyMWxeQCOu8JacdAM274rRCFwzIcCQ0Bt7rXb5WJMi0LTylM76BG5cjOzbb4du1pi
X36nKief5qgqbbmQO4iOkKcr92lxnhaiIBhk5coSlbM9IaMZSJjEbAFExsW46fC0RTs6jzkIjbIt
9aJnfiWDkR4yB/hdcxKyXtzmZQJDUlXNa7ZDziYmMg4Oe0KUr12NszLFiRANSiY3IyhkXBJ7k1qM
ozOBfFARtyrgWw2VK0XrkYBwJF6/QvPtGz5TA27eqZi+2VKo3Lk+X866eIlyS1BpoFOU7kbUbmBA
1u2wPRE3ebunSMIND6gpyPE8ixNXqrUYxETpcsGxQF+5G2ZYaiyaB/Ee8bZBAQHKWTWgiY6ie8FR
lKcbeqpIkWgOxeZO7K4MZAs8jvKlzAsxBdosGADstW2q0lzbAeURTVuQ0NZh7saFfHOuEQ7y8Q7J
CqdyD7EvaI3qNuRjqiQARIYZuo3jFmOP1jvUZiM/Lnm1B60COKMg8SqhxmCF5lqBF+L+Hw12rTZ5
d7uyUmCgLxNu5KTESFHO9Ooj2mLqFq3SA4jVnV6PMx+GAwhKpfcjftTFuIyKYgTGZimiXPQ8SmkG
3qhQcrFOTVb00og9yMGEbcqyAGJVAYbGKlKxdenhIxXk34mFx+GW1C1FhMeHevKvQ1jDYXRleIgA
KuahA2Zm4DnGhj2ujahaBLcE50beVC5zPNw5cRrIQ8THaA7qUbEbvPm4Hq8QDtODq0ZC3ywhJxTj
7TipSvTlenKT8SNm2BCccAKd6Nq8ddlqSx6PCD2hGQi0jk9PUpS5aJNs4htQCNq/b0zHtDBaonBO
M8eja+SqMVTFPLE5LhPEFOJxGa1SJ1BEjbRDmLUBOceGcT7QTizG2JDQNNdL96kCHLVkcX2rVOZ1
AuZSNAEYwe9IZxwQPlCAHeVr2FR0hzLAb0L8ouBjEYhYKgAG1PmmOPS3VxTROqWwKh0jYFWpPQNM
WG0rVfJkdiEYxAAwITFmTRqR9C0VB3pgqqlAomRlLTQOqhgt3QwopRNWDhC5ZGsDhIJZkITLzPi2
BQjACoBJ2lW+YYC4JaXzIPUIJYnNarMwY9q1SAlPbELWTULWRKN00LVBRgCwIZ0BIgRjgRmnGyqA
mNR9khaAWi7kKlU5PEU0w4TgkDYVprOMTg680RnbfGJrFQnCokHBBptUTtR+pdlVCJzLlQs3BGc4
sIuM1RoxApkFau2Wl5jylIbXUrkgBO2xic6laYkACpJU2EdQLA07yESQNWBIDYppHvdNEa5jIV+l
axNpD2RgiKRGJEQ1VO3gYFydoUTGOoM0nxPYh50tEHOkE4BC1C5ExlmK/UpajpkY0OSd3OZ3J7bS
OcCtOjREepa5CoOSbo0xxOKpghCHilQICEwLwqxwLq9cmRO7IUhGoDL8S4Fo4CVB3KPLf1EGBBOi
4KxIxyUuW5Z5G4GlNmAHeuHEICrrctmyQWofEhmRkniappUl0VZU6hJoqeHbtQAw6K+EH1qM4gMa
MoGQa1LwgUqUBd1TuEcRfBGxGYuWJcVsSGSs3rwBcl4jwgbVvPQU6ZMVRG/fPmXDgMgEwpuCZOcU
8iIgIw5cH9Jar0tUzhHNCMYaCMCDVab2mUhURK/7YExzhIuO4oW9HkSyEqv2EqUdQlMioo1exbSV
qulvzRiibcWfMqtU5oF8GPDnI4IcvIeYQeI5I2ZvCJFNAqnEmiasfEe4LXCJkSXGrwgb1GV2YAJA
Moig7RRC1zHK25wAbz7cOLtU48hfFyGQGIWjmoGAnUHL608SCDmOgyHBM5jPtC8iNwNjwrTzFsS/
OjQp7MwfzcCoxuDTXNNCAF4eCWH1Lz7c4mds8cATG5E9y8qVwSmSPhXISjPuKuRuwlYt3RqaQ4dW
4onyoHV7cQH9YRnyvNztyakWAHezJrtg8xEYyo/dpWm8DZnsNR9Ce1ONwfmkHpMZDUDiCjPlbs7U
9j8J7iEY8/ywnDK9AgkduC1f07mz/o3CD3VwTc5YMB/1IHVFPauRluevq6aIxkBIHEGoTG02wxJD
dlUDY5g22Lxlp4vWCiNUrsgeEjToI3vVeXe5cQmA8nkz/ooTNzST7LOR6lrtSE47R6YmVmEicXiF
LyZC0JVbSCR2GiGi/K9bAyiDLvTc1y1wHKUYsD61pMvKn7txolOCCN1fkpEogg4ggFSEQbevxCBY
HuX/AG90xIrxflCBtHgzjGWqvZNEXrIGnaCCfVRCV2BgDShEvoxTG5pJykDFPCQkNxdMSATk6uRv
SEYkeLYozgX0nLMIxMNFsYPivKuh4kuO3cvN5eWqQroOKcghqEFRm72547kJjHEHaEJgcMx9PpHV
A6pFVgVSm4qgcJ5dDutMQ7oEXJxk2SOi75sfZc179S1XbDgY6Xcb80BceBP+If8AKqXIyGxwsAxX
wdPfw/Yo3dILlmBf1p7kfKujHIFVi4yliPWsqLTAJz1mOCpXocLDrxlsoesCnyNetFfjfM1XoFjb
yCqA4wXmWToD1Cjd5iAnCVJNlvUL9kaYyY+tA7VG/bpF2l2JrhYHM4Im1LHLELQaIvgjHZ04dBPT
zX8pzH+2VzP+rP8AaKfqMVwh9ieQ6ASSBmy82yXkMXRgTWJYoXPMiCMio3JXYao0YF0xmD3J4zDr
WA8dtEeMDtKxiRmXR4hPbEICtuTg1QsRBvSFInLch8OEtXhiKlDmLdiVuJqzY+tNZ8+N2WI06R9K
EudumNHAMn+pSs0Jesjio+ZMmcjlhVfeyD4gLyOXtic/amal15t+zGVyVah01uIiNkQ39hjy9osT
4pbliqSXvBCVskEYhAE/EGI6DIziAMS4ohGcpAnw8Eq9lEDZsiYchpS0S7SCERrjZk/CIGMx/idS
pLmpGOBjw/4dFEJRteSW8eoiT95NFEc1fEhH2BEGJ7XQGg3CC4MjmdwZNatxh2ADoechEbSWREuZ
tuMQC/1LybVu5dubG0hu2TI/g+W1zBbQSZSL7BBC2bchD25WreljsErhRFzmyJS/PIYbGgELnMSG
uIAAtih3y1O60kSuQd9Ey8SdpAWqHL2xL3jEE+sqlOponftxn7pkAU+uUrYLG5CJlB+0Y9ylHleX
N4xIAjImE5avdjpKIhyw5bDQ7TG/VJSPNc3C1bYMbcjER2mgD+taua/qEuYMqaBISlJtjapepA2L
JndPC0tYiB+dqohzJ5f8LbuR0xiMJaKEigQALklkPL4htotdxn2KkQJA4gKIqVcuHwgkNWp7uxHy
bjW8dMjqYbqKfM/e3YB3mclG9dMbb0EXzfertsPOcoltOAcZoHe7oGyJ6QOEWyQB2ksFr5y/U+Ie
M/TggfK80jOZf6MEIGIttgYhvoTnihL2k4qFIEfClhL3ShC+DKGUxVGcoHRItCWf6oXwJzcVkJBv
tKa4TU8Tblcu2LkhIYx3KJMRK5MGQlOoACuC7KM7Mw2kZnsyQ03pQjGotyLx9RQhGdudu2GBiXbt
Z1K3btm4Y1mQaDvQvWJAmYeUJYgoQnAAwPs0JU46DKJYxjs71rs8xKMAdMYGIkW/SIQHNWyZTIe6
C4I3pwKZKPvMXUbfMylGIOpo0rvRuiWuzKLRB2qfMg+SYBzLJeXGXm7BmtJ4Ze6VgniWJXGKbQsV
2qpWPQKKUp3ogQ8VQ4QFuMr92R4dAIII7WR8rlzZEKxlceJDdtEObvXBK5bm3lRiIlsHJC8zmZHX
DG2eJydrKNjkeT1RnWM5hnO2qMr9yNmb0gPa9SnavWpTmabSGURy4M7cwwiMkRMGMwWMSgTTeFC9
yTicAx/OQncjonhIb+jd0MCxRlKESfeFCfUhGMdJzWFNqogq5JwhjTbijc5aZgcJgllKyCZ3D4pH
an+pMFIDAh0CyuSunKg3ojURbyiKDoAUpQiZCOJZQuXoAcoIkPRgdqeU4mJFahSu2YfCkcq/UuGs
tm5OO9PnkmND0VKqeh5HuR0Bt5XESfo6GiDLcE8uHcE7Od9VQMVoGJDo7TgFUozZjPLYhqAKYRAW
HeVWp6zK6AHeoCFudRLIIWTEShAeLYvO1g2rQcWgPp6jZnBOMN6kZAE7kAKk4gDBOaDfinJYDNUD
gJiGCBjgM0+xNgdi4hxDNYJojDMoticSsVKETpbjD4b0YE6os4W8qUsjRG1EswxUTcAjbhJzNxUD
YrdmGJkAQM8mQswnTHQRQEo8reaMI8Q04EbVwmrNLvTgOVwx07a7Eas6lESYnE7l8OOrfitUoOix
Yk1GxO7pzRCUSxGBUbls/EiKxzTG2ZE7EBZaMiPBmvjO5OlgVSgRVMdi1SxKMivxIDaiwOxXpX5H
RDCZRiHLbiyteU2gxHrXE2rUNG1+lxQrTLBATPDkyIZ3xCNyyG3JpBimJonB6lTRajSGW9Uw6Bbh
4pKMNTSjXVtQ5a1EztwPFLMlDmrtogCsIbB2IyvmVsxFaO/qXmQBjagNMAce1G9KuJZG4GbIZob+
jsT5p+jeU5TyIAWvM+EbVrulo5BaIREYZTxdNAkzOaBvTOuXshNAcXvJgAGzRhajG5PJ8uxEzGmQ
yTXDpltTioOBVaIWwRAywJ/IibR8y5mcGUjN3NTV0TasCUBsDH1qA5m15YHhBq/eqwMpkVk/1K5Y
vTEQfCSVcaYm4oIl0OXt3ZCBoCQC3rRlEmN531g5qMOati9AYSGIXmctd8qR9mlD2I+dHzIj2ogD
7VKbtpGBUrhzNOzoZ27EDcOonCWYWnl5SuQjkR9qPMQuSscyMQTwz3FooDnbNyMmHiiS/wCjII37
EWswpbtcy8gewHBMRLkpnMcdsoG5bF+3/wBW0X+hfDuAS92VD9KJuWYkn2mY+sLXy3MSskYBvtDJ
jbPMwGJJH0ZrTzNs2C7VL135prN2MzsiQT06hHyrnvwoUWbmrWzCbJpA8tf/AFJOvhTHM2shPxMt
HMwlYubCC3rWqEhIHMF+qBcjGbYagD9ai7wlHwygwRFi7MTJeQlJoy9SlK3oMGpbnIz9SEb8NQPi
0Rlqj3JmmIu2sxaLocca4VFfS6ZgSicQahEm1pJziSEDyd7Q23hJG8o+bAXbeILaj3mP5FHzYStm
WxpD6Kp7c4y3OH9Xyd524yO0iqEo6oTjhIEnuaTqUrd1zL3gQx/wlTtc5K554rGUC4IyxUoGcpQA
oCX6JTPFF1j3FVcHJNMgS2laJRF2xLMDii68snVZPgls3KQAeUOKKwrmCsFQLBYLDp3LaqRJ7lSD
dpAXEYj6VxSPcGXhftX3Y+lfdj6V4AFRx3rB+1NEAdifoxZVtxI3hGcBoJDEAPH1I+RzMhsjIUQY
G7IeIEADuKEZWrkScjQLSZMRQwnRE8te0jO2eKPqXDJpnPAOnu2yYHCYrH1pkCc6jofLqOFVeIJg
X68oHtHXjPMUPQxXCdW4YqoIWKir0b0xAybS+BQjbmJdiKbEhW7uYp6lE5iiJtSZ8QvI5scMqati
l5ctUMk1yktqMTUEUKMdvRUdJRPRzX8pzH+2VzP+rP8AaPQHGKjPKQdYpjRMVVinjIdjonAI2rVw
gHYEWNd6d1V0andRfDkz7ELduQMTSpRPMWZTkfaEnCkbl0wicIA/lRlG80NpFUfxJlcmDRyw+hCQ
tCdkF9ChKxZjCcN1V5JgBvQmS0QowEneBJUuYmKSlwhGZGA+tNGsjReddi5xc7UdJYbES/8AYWdw
4RDqVw+0ejFUktqE7ZMSM0Z3Lty4AaRAi304rXDkjxDxSjpj6lGd27CLeyCSw2DYhO7duTm740+l
E+VGUjjKQBJVAIRzagTTvwBwYFz6gpThquiHiMQwHeVA/hm8zwAzBPaREFSnG2bNoDg02jIyP+JD
zLsrDnieQhTcIVWu5ejpAZparn7SFuRnKIOohxESkczpCYWYyfGUuInvK024RgNkQB1HkQAMXLIi
XMQJGIidX1KQEbk5RagAGNRiVI8ryPmRHhnqLF+0RWoyjy4IrGQjjubUUJc/z0oGL6pwmQS+0yoo
xucyebiJGUbTibyzJ0CvegeV5V7kqhxpgCO1/oCErHIaDL2pZD/FpQ8y/G3EniESIltg0xf6VE8x
zl25IF2PFEfr6kAbOuQLmcidRPchbhARgMItRcrebwSlE/4g/wBiAbvULmoMQMSyNQpULHBPItQ4
lTtXJtcfXFg4wz9alGwBMENrwBcey6MrgNm1OIEYghyWzCjHloTuyIqZDTbD75MpXbkoRlEgNAEk
uc5KzCQ1CRBINRtTYAYBM3aeiqaJpsyT+HbmFougB8Qago3eUuC3mYSrBEyMbhiGg3hidrAKgIlE
NXPegxZ82dCRiJUqY4TjhhuUeXuny5QfRdxDHaFqtQ1wNRKBcepeVomZmgBiwHeVHlpBoRL3JFwH
2ltgRNqMp2DKkgMWAjnsRjCWqeTZdqlcNWz2kqUbkfME4kRAxPehAXBbiMIhiWK+/lXMii8q3e4D
gaH1Onujzg4JkfFio3rUhKMg4QgCDJ3O5eXcOmEqszuo8wCDbiDgGcminzExoMPaC8q3xbInNabg
NuWwqi1DhO0J58UR7SBMvUtEIGZyO1SjbjG3KPtZncHUJ3uYIk3FGNKbA1FKUx5gl75w70Ix0HRS
MbY1SUrfJcoTH/qTYV/9b1GXOcy0S2q3Cn1UREQTOXtyqyFm6HtCkSBgNyN23xlnivxFweXzVtiD
gSHQlzB03RIaqYgIc/ZuaYXjwWxUkDMoSGC0ycHIhaZEThlLor0cJZNIOcinOPQxsSnbas4rVbNc
wcR06hxXPcBqjzE4+TAYPmhC25nI8VydAApTM/MnMabccq4yTHAYqcx4Rwhapl5ZBaRV6RiELlyQ
1H2VqQjGpJYBabgeVwPJGEvuLhcHYrfJ2qiUhAyegdeWIjSzHehdhTXEgxyRjINCRonFQnHiCY0I
yVTRNGqrLTuCfNVXDFh70qIeZdMichQJoinQzIij51QIPxI4MvGK5pydctuSwTEOU7Adqx6dINUw
DBaumEph4Gkl5gaM5YOXx2KcISGolxvCuznWIiabaKThnJIB39ODoxj4zhR6FMfBuZDPatu5aTQL
TkiXROAXamaiORW1cAeWxapgjciDE9qjJ6EEesJ1JB+1SuWw85lgTkrkbuMGqNhUPKt6hZnEzG50
JiQANallEWSJC3ERMhmXQBxVaJgtycsQh5cjGI2JhIk9jIn1pooOalao4bEATpD1K1icdOUiVavc
IEZPrjmRkpSlStY9BqtcsBh0arn3cTgM0QIAwjQwkAUbFtoQOUaMrYjEGcg5kRipcxyx8uUGcNQg
lsEJX7hmRgMh3dRiHXCXCIIrvTNwlYV2o8OqIzCeJTSoejF1qnSOQVOhx3KJBMZSqWVq1IvMji2h
RusI2xIOTgZJ8kYW6QuSJLbsVK3ImJAeJ3po8QrGYORG5QA7aKtUYxPEMUQ6dEE+tUqnK0isjgEJ
X5An2YArXcxGAyC8u0HOZyC4jqhLwh3AdMRqvHGWxOT3qhqtOJyATy4BsGKY0ORWiY4ThIKoItHA
kIyuWyZe8ahaoXHJLgNT1qFq3cevEyEb13RH2bf5SpWQBG5E13oO3mEjQM0RO2JSAoQWR5i8Rrn7
IyClAhzPEBGULFI5s5Xw4F85FcS1eGQzCaQ8y0MwKshC3BjI1LMVSnQInA4oXbM5eWMhj6sCtXMW
paY0Ny2aD9KOIRjGMLkZe1HH61q5O8SP+nNaOesm2ffjULhMbkDiMfoRnyszy8zs8PqTc7y+uOd6
yC/aWT8lzPnx/wClcpJu9CPN2pWJbcY+tarcxOJziQU921CZ2yiCVq5e7Lly7gQAodxoVwynzVeH
w6SN71Rjf5adrT4ySKd1FHTegdWA1B+hr0BLfn60TyV4kD91MAj1oW+f5cgnA6dUSvN5O5K1I+7I
6T3L41qN6378CdXehET0z9yYIPoHlbxLkAkAnsdGVgxiJU0yBLdlVCML0rlsY6YgyHrdE81YlKL8
NxtPrCELo8mRDgyIMT3rTC7GUtgPpWnbjJ82r61I25StmXYWPfVDyJRnEYxMiX7pIjmrUYacQCX7
Ri6BF0AnIggrVCQlE5gv8mJm0Zx8Es+xa51uS8Md29aowEKVAwdHeUNYcqk5wO/iCaN23MfnPEr4
eH5k3HqXEw3FlqiISlsY/lXmCEbczjpFCe9ebzEtO5kQAQMitUsM1wSB3ITiYsd6eU/UFVz2lUgF
wgDsTP6CvXp0MmnES7VW1HtzUhCel6iTcQ7KqWi7G6MKhpdy/CvKdokvqqQ+QXxQQ1dJoSi4bd0V
wGA9MN9ER1pQ7wm6HFCq9EQqoPXoLI2JRoXIKMclrGBxThNKoWqK3hAjagZZrcnimKbNE9HNfynM
f7ZXNQncAkL1wEPnqK1eYD2IRsvjiQhavAuKAqF+7b820PZwrtqrUOUh5Nu2DqnIB5SOQbYhC2PO
GZYotAwjsGKaEJucySmvXa7HRMZAxKJnUlYdNFw3CO9ML0m2OnJdMQ43IC7Ex20dfDcyK4pm3P3s
lGMronA+2FrjegQRtCJEntk4k5OhGBGiAbvUQCxmQe5OawBaPrQtwABavahGuqVVUdBlIgMiRgT8
8OaBETua5j2IcRXweXptuSb6kxhaG5yuKxGQ2wkhEk2pnKf5U4LjaOhpSAOwlkXeRAciAMiBvZTt
2BOZlQ8JAHrTbOmvRpB9atTnIACVScAE8bvmkByLY1MN7I3LdiZtuwlIiLncFEWbcRGWMoxlNlIH
XAexKlsdpzUY3eYGp3lPVKZ9RovNu3JSuswMQIAdjJxaEyakzJk571qhahGW0RD9Vrl2EDsMg6iI
zN0zLR8sEgntwUo8ny1y9OJY+6P1NSla5Xl4RiP39wSgH3RlUqMZ80bEG+JJxKUj+aIgN61CHN88
HgSeIxE5El+IyMkTCBvxgxkYkzDmlagKVvkOSMrkGaOkB32aBJG3Z5eFoNW7IEAE5AS/IgL3NmEG
aTFyTt4dKibt+5PSCKMH1Yu7ofAEtOGomX1lCMIRjEYAAButeLObZjMd0h9nQLd3Tqg+Jqp3BYJs
ggEhkbluzK1AkG1a0EmQOcije5m9bFyYqZHSIfo7VOVuzf5g22iZg6Lc38WqUvsQtX5QsWYuI27E
Q4GXHIfUFqhqeFxoykXkQoSOJiCrdt/vJ/UoyHsRNe5lUuerRcQqjCMjN/YjVSEY6+YnTSMA5+xR
uiFAGlIYOouWAUrHMnyiSTy9yVYj807ivIvPbuD2AeKO+J9oLVy8zes5aC0h2xK0XbhicxOLH6kB
rjLcWA9QXFdeOGmFABsxVLcpbnDIQFvy450p60RByAG1GpiT2ZIC7pEgOAwx70AC4OL4IkVtyeh9
khPGiLSMXxESYn6ETanrhnCdSFCcCHYao7DsQjEiRd5EVZloukxtyqAMaYYqHMGcTCAJBNCO1Snq
jcuRpEQ8RK+BEkO2k1QFwiyDkzspa5mT4F/sWkCvvYJ5ASA9rMKjyOwBDyYC3E+1ifUUZ81zEpjB
onSo6LcTKOE5Byq9B3LyrgcHNGJ4rUvpQuQLiVVIygJFtyMYhxIu7O20Om8pobNyaYNs78E8Jagc
x1KqhYpz0agxbIq5zdmQhfiXMBg21C5rmJam1Dwjc3Qbl3wk47FG3YlC/ED2jipQlY03RlqJCf2j
gNgQsWRj4pIQ9rIb0JQmAM3RuSOu4MZHAIRiGbFM6N29PREYbU1u6LsB7ObKUJBiRTcVW95VyEnD
hzRCF2GqQDanAfejctRcigZgOxyhb5yzoEzwSFfpXlSxj0GcaEYqp6GjFzuDp5HSNgxVIudpxVQy
3nDoD5YJiHiaFORLsdabcdMUBgDmUGtm5BnJhXDsWkQkI7WYIxsh2xlkF8W+X3Asmhfc7C4XDxL7
sR2kshIcVz2iNqE7obVkpWzQDLpI2VQuW4m5E0pkrduRa5mRk6jE8ZaplV/WrV+0NJuvGYGDjPpc
eLJafWsejtWwMjE5LgY7lxM2wLcU5Rke4Ik55J4u52quBRAU9TOPCN5RDsybMotQ0CFnmpDUS+LE
FGNgNE1JxJV/mmErgkI7WzU7cw1C0sGTnI/UtIrLahqxKFqOADyJQD7mQyJoiYlmTmkty07cQnbh
GKHCDEdqBtEwkDULU1RjREgqrl5UCJPf0aB4RiUwyQs28TiVGEbmmca6mo6nGyfMuyxOSHMzveWZ
hwJVFVDlOegZgUt3YYEbKqPK24G3akXlKWMkCQZCYxFQuCGneV4gD3LXciZWzjIVH0IEzAfJRt1M
pYAJsJZFaZBhtWuht57U+RRu2s8YpjQrcvNnV8AnCdVUZXfADQITDTMhwLRceM5SqfzUICBiWpIE
upcpK/PRCWlgSzKxXhaXrR1GUZHwzhkrliUtWriD4uVCE3kvL5e2Y7zimNbsqy29DutOO0oAZBaT
MCR+hEQLv7eS827PXc2HJEDhjnJacTmU0aDYnNSsW2BYkDMolu9Ek4I+WGiMZlC2IuAayId0ImGs
nwjBA2IkXfaHshGHNTPLy98ViVG9ahC/ZgQROBeQ7VE3bos3IhpQli42LzLBPlTDahQp5SldmcNT
koGRYn2WVTqknYycZZLVKVMsQelwjImgDlTmPC9BuThMEL0JmF3LYtP9QtebDKUGp2hwjGwDbvRq
SGjLvfFG9dgYbL9gVb8+Oa1TAv2v+raqR+lBHSY3YZjMdoXmcrM2Lg2GibmrXm2/+pDFNbm0vclQ
/SnEfKnlOFCiJxjzlj3SOIet18GZ5O/7pAFe9fEt/ibQxnBhJuxNq8ufuXGiU4qNye7bjMjAyAKM
rJFkmhAgCO7BRjauzu2oimmMdQKjHmeWuC6cwwifWUZRkIkUlGRAIKaQE4nbUIT5eXkSGwUTXrfm
wFNduJftOKaTSkMi8ZfYtXLXiI5QuGRA7ERzNkxA9qAlIduC+FMS3VB+n0BjICQOINQiPJhXcEAL
0hCJeIEQ470TbnO6PYOoD1iijG9ZgCaa9dERdmLcx2mJ7CvhTEt2B9JxAHtQIiLcxhKAAP1Iy5fm
LkZmpjTST2UQ+HcvQA4xIh3/ADWRNyxO3GNJEtTuQlC7GQOwhP8AITO4WARvXKWoeCORREZxEgMS
cNye5MAblAQ8LOFVNCJPanvT/wAMQiLTQ34lcbSO3NQhHHEoAiqjCUniMluRBkCCGZRMTpqojdX5
VIQI1Fw+xPGRlN3fegbjTbDUAnuWYszcLxKIM9AyeLn6FqhOM/zR4vUn0luw+ldRmPaHWiJeE0KE
xW3OsT1GiHKjqFIoMOl9q3hEbUx8JWqPFApx0FAhRfYnBcdFUwTg1OXRzX8pzH+2Vz4I/wDJvOf/
ANSSDhzsQjbtHVkQEJeS8dshRCUwCc4hE+UbnMkDxOWl9TJ4xA7AsFh0NIOi4x71UMN4XCsPQYrE
pjIp4yIO0JxdlE7QUPNn5jU4kJCyH2oSnBmxUdMtMhtTwrHcgCMVvlh876704wiNpRjysDel7xpF
abtwxicLcKBPateXE/vLlEDzPMykcxEU+lcWqXaWWrl73l3B7L6j6gjB/NiKaqghGz94DSIkcFbh
Yvz82QeceAxHqWnmzO5rrKegan2CRKjL+nWgBPhMTqDj84iTKVq9ptk+EWpNEdzOpWtWpsz1MFVD
YvxF6MzO5QjUQCOwMtNuzbg1SWwC+EAIigJw7gFqmTInbQdw6XuTjEfnFlx34ndHiP8AyujC3bnO
YDsWjQ9qiOX5KXFmRI6e2gH0rGNmRNG0gAd+oqUud55vMx1E8OfC8h9Sjcucwb05cMTGYMpZMI21
KMOVl5kPZMHP60lKPJcjGcAOGUJ8Nf8ACEJC4OXBHGLkIsD+a1UPxPMyBiTqu25yE5A9tAtNydy9
b1GeiZFZHORABK1W+XgJbWf600Qw3egMpERiMSaBG/O4DbGcS7nYGUxCFw34+C0Yl5HuVzkoWrd3
zrZ1EaoxtiviTanILEg0opwMiIwAkY0cntUDdhEGJ+HAB5H/AAr4VscrZP7y7W53Qy715l0yv3Pe
uF27BgjIR1WrjGUPqIRFuzKNxvboMWdeTzBHnM4BDuTVGnYuXtYs8iO1XrhGAAHVMjQBaeWg+2Rw
Xx7z/mxwWbZ1I+pabRhB8i8fpRholbkKGUJ7U0ybkNkqS7jmgD8S1PDaFG1fBvWMbU8Jx7JbkBb5
mMtlu+NMm/SzT3OWLH2otdiV4IA5uDH7FpjbjInKE3PqLInTQHBi6+Ea7Afy0TzBhcHtRpIdozQ8
/jtnw3Y59qEoAQjjHahbFyNMABj3pxESG5k921IXM3JFULYuxEogagaGnapAGWrZF+LvWrk7coAl
ix1RPcrMvNt8vKIGqAJiYt7wIqjdPMG/GvmBw5O+UUTbhojczNSw2PgvNIGmA4Gp61K5e06/YHtE
7k1qEi5aMcXTT+CBsDue9Cc5G4TWRyTRGneKJiPMjtGKDFBb1RVzVcCvLuVGR2FHlrm3hKPeiY4y
Zjs7FwR4WwJclYaJbQnfVbOzBCcT2hOFXHowTJ+iUYuBcDSlHELy7N97ZOoCTah3oDghlqJxO5GF
+Iu2zi2KMuUvGIxNuWRUJAvPAl18a6w2YrRZjxH2itUi5WrlWpijalMQbEAMStRck5oGcWsxrIkI
RFpm2J42y+8qkREbAhzFul23UjaFbhZGmbcZ2Ll5WzqEoAkjMnFRE4vISBivMhiMQgRmjE5o24jA
tVPcL7guEDofYqGuxBssiqhltWyO1YlOWHaVIW5hh4pPgoXeVmLspBpROQ31VyV+Hi/9ME1yQhHK
GC4JajsBWm7FsnXlklzWO5aZNLKoQlCMG3BeZckzeoL8RCUpP4hFacCNvQQ2KuxAcu8ULU86ghCB
h4AxuZIc0SJWbQpCOI3noonGBxTtVMR1HZGc5CITW4SlvNAmtgW45MHK8Ql2gLSRpuR8UVGMMMyi
JZYFMJIn1IykXkalkd6A2VULYqSURG2ZCRcSGCFl3uiIGrevxFuUQbsR5lubnVswwU+W8nyJEESk
TqLHOOCnMBpaVqli/etUYkxZgEZEgPiFrnEkRzCB3tEBaIh6DUov7QomliFrNIn6VsMvDHdtKAI1
SliEJCekGh1VZAvqf1MVEAmJyZVPrWiFZGibM4lUrI4KE4g8dZEKVzmHAB4XxK1zsnyhV8lanabR
KIZuxW7eN0zeLYgDEoeYBIDB8U0RwjHEryLYAPtEBOXBQIOuxKk4nIFXImxqhlKtHVqdiIlTins9
afNESqDkUfLLx90rTOLHYU0hwnDNGnYQmOBwKj0i1HxSQMCAIiroWowMhbGmJGZUeY8uWvEyIoyB
NqRmcAGb1qd6fjmXKjalLTeh4XzVQ0weEla3Ny59CnauWhCPvmrHcmheF4SArEMx2LW51bFpixOY
IVQAjM0AxK0WuG3tQ88kjMqULRe2C8XxXxOGAQjCOkBYsqlkdHDH3inNTmSnNAEIgiL+0VIwlqvk
ODgArhlJycS7gMtQGuvDACjKB52yRCRAEsg6ELUBoI2Yq7C1aF2xMPOByfYpR5AzsxkOO0SDE9iE
ObtyhP3yKHvXmcvcFyJ9iVQyAvWzZJ9pmCJtFxnP8i0gmRxKibYbT4onNfFDwkaFmIQlEuD0+XEt
OdO7pEDgTVNZ+JAYRpgpPIi9LETYPuxQlcjoYeKND+zVNy8xzFr3JsJAbHTzjPkr+EpYRfftXmTh
qOXM8sWl/ijmnLczbGMoUuD9KBXw5iW2JoR2grU3lzylCiOiX4myMpVkB9aa9qsT2SFPWsbd07Yk
ah6qp+UuicP+lcFO4rRz/L+VcNNZhQ9kgtfI8xqjlbkxiR2rTztiVv8A+5GsU9qYmNxr0absRMbC
gLcI2pjCQD+tTny96BeptCNO5Ec1anKAFbkYGLHYQUIw1QlKsRMM43LVKLTGE4kgg9yJtTHM2/dm
Tq7lovfBuZxnh61rgNEsYzgSG7GLImEo8xD3ZPqC08xGVmeAeMmPZRaoEEbR6FpBxsKMJQjp7BTs
QJu3AY0iQQCPoRFiZnVxclM4bDFRjfhaiCPE5Yp7rwn7rEv2JoE6vdIIP0+ke5bjInNq/QhK1Hy5
DKpie51LybkZRxEHp6pKMeZtcRpQGJ/5qLSZ6J5xlT6cE8S4OYr6TzCHJpEb0eZF23pFdIOG7BRN
yWr80UQFmEzayZGV+Xkk4A1L76o2bniiW7VCOYCo3Yqmm5Yr+5GUmAC1HAYdijbsgaoYyTmpzXlw
xwouI1xWqIYxqoueKFD0P1t/yBz0fe6I5xAxRjN5SPtuxC8kUjGus4lUw6Q05BsKlA3YibYnAn1J
pWTEbYyc/StQvt+bKJf6EBYvRuSJbQeCXdqQAtmJOGogV3F1pux0PgTUesOqAy3xqE4iSBiw6xHt
R+rrAjEKEJj/ALm1R+j4NomPvGg+lCfO3xH8yNURysXOcinHVrmhLDop6lw0Owqqphmn2LQcukkI
k7Vreqqua/lOY/2yuffH8Td/bknMQZbU+gepC04hEVLYlW79ktqlolE5uCX+jrULJqTkdtUSVUdG
XpmBYJxJCMpONhQMQ1zciJ0IDhV2/IakAJx8lrMDvVbsB/iCreHdVUMpdgXhn9CpCR9S+6LdqAJ0
E7U8ZCQ3dX4t0GXuxqUY8rAW4+/LFeZdveadjuyM7/MCxajiBWSjHlgOYvnCRqfWaBSt8vyRmY+1
qGj1oGN0WIj7wyEWHZiVp53mJTEsLnly9UTgjLlrRJFGnKQnIbgF9wLdln8u3IAv+cSFKItkyiS+
wLVqjbAqZTlpCAjPU2wuO5V5m1asx/6pBl3RUwTelbFLcrEfLid5JR5nm7wJx0kmUvWmGXUYKML0
IzjmJKNuAAAHDCNAPUjK8WjiI5d6BBNyXYh5VnUcqv8AUgYRjCRPh00A7SUZ3eYBce8YiO9ooXeY
543LsKmeqLD9Z1qnGV2U665RlIS7MIrV/T+RjcLsQNMbh3iIBRjDlpctTgGnW5/OJZlE37tsxZjb
hKVsA7SRircp81dN2HvHXEPlESWmVkXJGpuS8RPaMFptW4xBxpU9pVKei8s346gWPug75YK5DlbY
vwsgGdzU0a+7SqtTs2rvL8tMHjiBKcj2ASKtz5q6AYF42JxYyD4zP9ynLmwJ3ZlwIGQjD9FE23lO
QYzmdRYZIiMRF8WDLmrYidFuZPcaj61ONu55FvSPMIDyI2DYnhHVIEE3ZVl606EjtHqWqFYkPEgU
bZguKWkaak0zqrfN2WNm145Ese4LvUgMLcR9K1j2pV7uniuAHYjcjIGAzCAjL4IrI7dy0xGmIwAW
iIcpp3A49mA1H1o6CW9rV4o92xaiHicJioQ13ZRILnNS0PxVRkcYyGh+1ESx+pPZuTtn80kJvPMt
0hGX2LTfsC/bOJtwacd4MU764t4hwzHbFRuRk8ZeGQU4XgPMgHhNAkA250kDh2oCN+PlQcERLyi+
VMUbVq3O7p8MSNPDtLOjOxHygDUxLv8Aourcrt55tWOGn1YrVKUjLMTq53lMIiAl7QrRGJqIlgWb
vQhf4rZ8M9nahehf8kyoNJMn9Sjbt2J3jEaRPASluC0+bCxbHjlCh7BV153PXJX5ywJJFB3lQvWx
wRBEY7Cp3ZW9ZiwiBi5XkQgYXD4YlaOYgYS2pxUbVqFJbk0g494IVW7oEgUJmGuJxZHmdOjy5NHs
Xw9QfIUCciXrC0cxAnezFa7RrmEYzDxNCF5tkmVo4hCcD2jYnz6rRREbZkBQywCMRSQxiVONyLyj
QDMbwtcZkRwjqogYgRnnIHJQt8vcEp2vEQcSjamwvZErTckLkDQkNQ9yMtiIIV25dhhQNRC6dRY0
jkhGI0gZDpqExwUbt6L2rmcaEIW+Wk1kCglxD6V512bQwjTHsR5yzdNy0/HAioUdYYTAPr6MMc1R
UxXGX3LhjVSGmgo+SoAqCnqCe5XcEwovLsjVcOzJPfmbkj7ETT1qtmMbZyJL+tSNq1EWzU6ZEn6V
b8z2hwlsEObMidWAGCEreMQ6iSKkcRbNNZm8cgMUxifUmnlUqekUmXD5BcExIGpinJELkcmQjcix
OB6Ii4HhKkkWIFyWDlyjG2RqBc7Sr5uEPKJjEZklMcR0xr2qvUlcmWjEOUbkaQBoTgAuO4dZxIwQ
nEa7ectnatIAj2IXIniCExwyHiCxpmh5RBGaNy4akNEbTh0AGilLuRuxD6GZCzcYxng2SsR0mQMo
mQ/NeqjKBeMg4KhAVlGDT71pkGfEoTxGxUiBsdMWCxBfELzIag9aVqjIy1ugQHIoyJNCE8y4jl2K
U5Cpw3IyIcy2oQiCSTgrdq5IAAVPYjo8IwdGq82filgNgRJyQumLylWIOARuXAzUOxGMi4FSrcjE
S1xcvsUfwMpxhd9iJpEjGi8yUjrIacZkk07UbcRTKiaEQXxC86+wnMuIhflWkEXNdNOanbEdeoYY
MCvIuarTF8SRVQE5eZZueGY6ahpZSRjMcORyQgXiTsTPqBwKDYCjdBJyQuw8UsH2BQ1cJmOJRMq2
4k6XzktynCB4IFg204rVhFeYTohH2pYK1PzBoEah8ShKYJD0GGC8u3EkClMFgnzVMd61XCwC921H
AbVpjjsXwxhsCncvU00behKVEa0CMbEdX5xwC1XDrnvwWCJiHYOTsU5XCZaMBgFFrjkFtAGHehah
J5E8RGACedzybZ8MS5fernKcyQLv7smgI3KeshzSIzdRsgCBHt4lEaiZHE70SAWzZGHkicY0eRKM
rEtET7NSPpdPzkuEYasChdsXdMz7ILgoRlATjnMOShESBJ7m9aNu4NUZDAqVvIGicIk4BSk/BGke
hl5sg5OCj5Twt5zC081ajOUi+sgavXip/hJi9YetqTvHsXl33sXvdkCB3Er4kI3InaAUZ8ldNmWO
k1idyfnuWY//AOxYoR6kJ8tdjdkPePl3h/iGPetPOWpm3/1NNR3jhK1WLgmMxmO0KVuYFQ2pqhC/
bukRdni4I7WUBy/PSvTmOOBGoR7JFGErxkD7zH615xveXclW2xp9BU+U54azal5ZmOJ+0LzOWl5c
xnboQd8UWP4qAwBYSZabgNmeBFyle1OCCN1egxlUHEFOYHc0pMOwOn5aUQxcTlKVRsIUo34QGlhp
Gp5b4oRukVGBiXj3shPkb8tBqIGTx7ixQhzlowPvxIMSmeN2Jqzg/UgeWmbJGTy0n6UI3Ii9H34g
v9CAcwlgYyBDHZ6NiHC0XA4yyZOTMzykZFx6l8IxiXcXNUn7wgLxtxi3ijGRBQN6MoT/AESx7E0S
RLHTIaT6itvTXr6ZASicjVEafL1BiYcL9owQPLXzFjUPoJHaEY3LZuQHhJGon/FBPftytB9L417M
UDC7GW5w/q9CLDjWOIUfuWj2cSELsxwD6UIxDRGA6LPMRFZHTPuQHRSnQ5NAtEPCF5Vv72QqdgTn
ErRHxHFapYlORROB2qVmRaFzDrbvk0bowNJdvoWJJAwcr4NwgHEGo9RRuRuaScQAAPUyPn8sJ3C/
xIyMDXaAtFzl4kZSFJetNGUz+bID6whGyTG7kLhGk7lxWogbYkIRnbJBwITTtPqDAOpQMdNaDoEY
gknAIG40XwqmDk9i4LREdsqD6VM8zfg5H3YrVG5GEZ7BIOtMTojg0aJ5ky7Uek0WCqUzoSGCY4J3
XBGu1abtCtxRkKjJNt6HOC8uB7Vv6ea/lOY/2yufbLmLw9U5IROBTzuRj2kKE4SM5QL8GPrRtWbc
pc1LTpjMUiQxMnBX3QbvXHa9SAlLy5bCteoae1EWw+8qkmG5OS5WHTgsFsWLLaqjo0gVKAkQSQ9O
uAA5OSE73ABUDMqNo22tmgOztQu2+IkOHRt3IsScFq9n0wjy8WJxma+pH8XqnPdmjCFggZGUvyKp
1W38JQMJATzj6ZyWG0rjvRfYC5+hHy4yn3MF8O3GI2mpVbundGi4rsj3lVJPeqFY9IAYPmU5aUtq
bSGT25yh2L71xvZMRGXcxX3Wq7lEGiMTPy4n2IUQAixPtTK13yZAB9R4YflKa1o1YcBkX7XojbmC
YSxEWBPeowscp5Ng5gRlIrTdvC3EGkBEVH52ll5l9pZaIAwj9BQItvp8IJMgOx+m5zMDwzxiyEbx
MLZPERirk7V427MCwJDyP1J4jzbpxuXGJ7uiVmWYopW7gplJP1AYliFrJMycyixZ1iVwS0oTA1QO
wsU1znJwBLi2OKPftRnzQ86ZDUGiIbdEoC3aB0+HU8m7NTp4wjEnMAD0Y867G2TgJEBCEZedckHE
LZEi2+tE/LW5+c7S1xOmA2lles2PLmGperERfYz1UIzlPl7YfzrouPKf6NSvKN6fkPqMGDk75KL8
vAiAaIalNyAAYCgAy6Z3JVlGJlUsHAUrVy7CyJFhcEPBXvU7fKcxLm7cgJTu+9I+JX9VC0R9KaIa
AogSPEAowmWx+hCXME3JkPpJOkdyjetRaJLSi5auau8rOMYiByd1GOLOSr8vzm9VFaG0E+s9Os4H
HF01uHlnMO8ZBCBPlwk5OJr3JxXYVC1H2xqnLM7kZxDNkhOEtExmERdgQTjO2W9ccE5uEdtv8ioJ
3DkABCP0oCcRCEPDAbU5DHNaYHTGPjkcIqpnOW1wB9AQqLVmFMUDF5gluBtXqUTy1gnV7UwQH2Bl
ACHkROGkgtvkgb/MG5EeLS8T2Cq1aTMgcMJtIB+5T8uIjbu11jGJ2HctEgDHLYnAYbBknfSBiSok
zF0SoIwIkVH8FZMoSoRIHiO7sU43bmi1LxQj9QCMb486E/aw09ijC1GIgA0ABRlqtk2p7Qha5mMi
IUjOIdwjO2aRxiaFR/ER1vXScKqPMWxkQPzSVK7K3rkKRAxco2bcDC4z6SvLvQ0SwJW0J48JTTDb
wgQaKpotE2I3prcdWkOLYwJyUpXhotSNRL7OhpDVA4grzuVJBGMVxDTMUkFWoOIXmWC8DUxTxLTz
ienUImWwBGPlHWKgOvJhCYuyLbGWi9IwlEMxBr6lG9aJ/uUOZhQnxBSsykwJoGoomwHiKEO+C1kN
O7kMAvM9qBD9hU79wUmRojtZG3OOgnDNG3By44RGOPaV5ZL3MZnJypNt6plMsMyVOLvD3j9iHL2y
WJ8Y2K1bEvi2RpmDi+1GySJSnTTsC0iko4JjiFGcY6qsdyB2hCQGp/o6KSDrIA4sqCu09LRDyNAv
JgfiS8ZzURLBHZgF5L6ZZS3LyLsQ8RpjPevLuDVGQOku7I6SxGaeReW7oxZ0RkcULcaWxXobCeRU
dWILHt6NQxjVC7bGuMvafBWrMZkTLAyBZCFyPmUqZ8X1oeUGhdGoDYelvV0UPTKxAtGOO8qERQkO
e9OVK3KsZBipwdxEsD0GdqVM4nNRmYsJByFONuQ8udSNiDOwzTzL7Edyc41Kufi5ACdGK87lRqMv
aJf1KMbrE24mel8WXlgaQzUor1kScQk2o1NaoEmoLgozmakMwVKLiLpximk25UwWuJpmicRtRLUQ
EW+1GMg0jtWo+I+0nZE5Ba5eCP09AN2Wm2O91C4CWHhlE1X4blSwdztK1RPlmQzzQ5S/bExDwEHJ
Ccgwj4YjJeYw1bQq1TCiFuQB0YOFCFwjRKpO5edB9sWyZCRYEBiRmoacRGvrUISFWBG4v0V6GIcL
XDEHBGJrGVEQMMR0Rtk6QfEdyiTASEQ8SjEExJOiMMmUY6pawPEMHUrF68RCNH2jKqnIjxFwcXVs
zYRjEkttClbMmEpDScgynbnxGGBIojc1ARwUoQi0hV86IR0NElgViy2y2IzmWhHELTbHYtVw6pbE
fKMYkezKjokAxo0okvVaYlzlELVePD7gTRDKlScgoxuCsvY/KmMWgY1EcaotCQkTSO1PSBkdUjmy
IsPG5EUJNCd6EL8xZuWxpLihbYn5esIhhLB96EpyMpDMl/rTRDPmj66rUKHY6nzDaLgqa0PQbF8H
SS4lHEIfhpynHOMgy8uXwxmAjKYcNkjKxMT0+xJ3IRjzQla1HxM4RNqWsHNGIPHcoOmMciaphQRF
AogREHoKFGMplxsJwTpr8BNsDmO9E8ld1wFRamadiFrmIHl7hpxEaX7VQgjdVatOifvwoV8OY5m1
7shxD6U161LlOYxMogwI3uPtQnav/iuXO2IlID6H9anbseVKRx0SMZdhjJEfhpTEC0tIq6lZ5izI
XJB4ylEkscQYqcOT5i7GuryNMtD+qiNlzbvQOqPuybavMlqADl4S4XPcFGN8edHOQ8S0yEZTzjMD
UEZcnelaPuEAxKP4ywZRH7y2x+h18OQ1DGJofV07V5hMrcyGkYNUb3URyd64bXtW9ek92S087y5u
2iWjMkH9ZnC+HM8lexo2k96+IBzVoY3LTavUCmtzaXuypL1J7lsEnE1B+ha+WvkD3JuR2YrTzNkg
DGdsEjtXw5PmAXBbv9LouB4p5CROUtRcIjlpgVeNwyOpthCiL4tmHvxBPrZfGhKBHtAOD9qGmbE1
Alwv2OqF+s92YhsdNa0yG1UtA76hExiBuxTc5yokTjOOP/rvQnaueTP3LvhP0/ajLl70L1vGMX1d
wf8AKojmYiYPiEIkSj2goCNwjtBp2oSiRIHAio6TOchGIxJWs3oGO4upTt1gaAoTlS2Md5QjGgGA
6YtWL1TJqqnRpjQyp61GIxFT2ozJclP7RwWuXd0MEQVC9GhgfoULgzHW3KnW39IVFv8AQYdqlanh
IMDsO1GEwzYHaPRYKgTQgVqvybcEPIBl+a+a1yszEAWcF6ryzGcpCtdi8vy5a2fUS6HOW4F7fBdb
83EphDVIoShbNuPvy4Vq57mdcvcjim5PlwZe/NabpNsfmIEXq7WZPr1jcQU5idIzTkUOCNW3KtUz
LcUQcAiiEQQgDtQMsFwlMcfpTgiUdiMcCMkHoxVC53JgWinQjGqAPRzX8pzH+2Vz0bLuL93zH9/X
LUjpJEtyeUiTvPVohHWdOx1v9B2Ki2JisFXr6iABvQMRqmM8kJSkyErN0Evhgo2r2IDA7kJBGJD0
UoXH4SyMPZyQAHo5QHiFQpW50IKp0C5bLSCFvmTpntT25CQ3H0DzmI9pZNr1nZEOvh2if0ijpMbY
3D8q47kpPkTTq4rHoxW0dHGKjBlxkxH0J9InvwRFmAjvKJqRmclxSbcMSqnTE5Csivg2iPzpGq1E
ADcaoQvXLhhmBIrTCcpTjFowkwY9gZavahJioSd5AMetKEpxqNoUtJEgDlVF5xtyd5OUbsbU52ol
tTgE9gd0JWrBsWGc3LoJJ7IxRfmLXLxfhkTGJI7DqIRnPmY3ZgMIxJuOdr4BMejHoqmkxGxEWyyr
gnCEDI+WcQFGYLx2+hachEnByAhGcjKcqiEBql9C12Sbl00Fsgx9ZZSs2uUhfmzxMJsA+11ARnds
XtR8yR+6iMmyKn+I5mPMC4AJznE6g3u1UdEJRmP3gkRJeTai0M3q/aSmiABsHUAnMQfB822BREL4
uzlhGOAbaclEclyxiSCZTkQQG9Q9ahavXH8pxK3bDQd8SQSCgZSjbGzFFr8RDvduxSiJ6tQxZlGU
XDVB7VBm1RNVy1yB0iXiA+pR86Ys3G4oyI2ZFfh7RF0kuZeyO9HmbR+NcfVVwH2K5M+zFavfkT6y
rcdkR9XSYuYk4SGIUSLpujAwOLd6M7dSGo1WzQNsONqELtuM2oHVIm2c9JP1FAg6rcvDJUoQm0sT
uomoWRE5AS2IiETJ8wPqU+VsWBqlxGRcGO+SlbvXpWrZDElnO6LK5C7dkYgvGDnQWzIUfNsRuNIy
nGQB1vs7NiGh7MAOG3ow/VcJoO2ci4R0sc5EZtgFriSZCsnxK1v8E1JPs7QUPJkLhlhDEBQlYsmE
COEMa70ZX7xnbNZiMjXcApGUNcji4oBuQhZaIGERRASBEzhIIgjt2FbbeJGY7EBGTk9yY4IyHDM5
jBD8RLRKIqWJBbsXwJPGGL0J7igOYjrOJDlnQ5i17QIAOMV5ht+ZcJaIGJU7duJjOAcwOxaLkTCW
9YUWq2SPzclpmNKCAJwzWL9AG1GY4mxCjztpg9JxUr0uG3EURMC7YrVA6J7kBdj5kdpoVx2pR34r
hhIlSvtpk7wGFEzgTOUWLd4UbxlxlxqNT2oCU/Os7QKoxBNuw7uaI24WROEQ2o070IzuaDiYxqvL
83CWLbe9S49UQQDM4OuCQnpwG5PZgZMeKOBXlcrY8t8ZmpRBeUsShA4mpTDpM5loxDko102h4Y+8
iTl4Y5BATGurgFG7ERtiPiAoxU72rXagWpmVJoeXdiCYTiWqNqhOWRIknjVRt3ABDAlUPDtdUNOo
yMJy0kbV55+6FAdqN6NYTrGWIULgyNdjIjSbkJnhbIpnJH1IGY1Ngo2CTpyRfE4J8CmjDXI4ISlI
RiC7BM/RXBPGRByUYTLCVJbERiBgUQcCFK3bpJ6HYmmdWjiEk+gxMaE5OvxsZavKDGG7qEABtpQk
4c1YJzQqU5GkQSjOVDKqtyMuHSCQgDgEfWoyjTUKqqjEB4u8juTRoBRN6yg9diwTDGRWke1RC7bg
bkZYAYheXepcL0xZ1+PjJpRJGqR4ZROTLjskyl4Zvwup3J1lM6pHemZkWwCO1bAmwJwTGtESAxFV
ukUz0QAoM0L0WltTiGqW00AWo4lOvLj4RiUIgMAhZhn4ioystIYEPgo2Y8d0mrZOjclZM4nAjEK2
8hCcABOMqEEKPll4wDatrpzQBOCmBbeq0RMKSbFar7kjCtFoHCNrKWi69kHvWog6ZHhLOFq8d3xC
PZkjrh5cIipKBiXBz6TV0TIvGXs7ECKhAn1IXBAvI4blDlb0j5oc6TkFHm4WzLSXDAswWqRMZZwI
LupSHBgwONNq0MLkQcTnuUzdtDjlgMAD2KFoRe04Mi5zRjCYnbu4jMKPLOTESDkmgdQkSJxuxeBg
fCQoTtTEZjGOboxlcw2YrRZFVO2I/EdztyUpEaQKAJyqFhtRjacRzmU+MjiSoicD5ZxmNqM7VLYH
jKnwOAKE4ugPKDxrKRQlOOixSjVkoXBbE5TDmUg+Kt8zy4GmYL2zhTEKRs2TGTMZSIYKccRKmrZv
Tw75LTFCUKsGZHzGYLWKNUnJDlrNIDxS2lMt6AGGZTThqemoharMtcM4ZqjxvYEFNMaxKpeq12ZG
1Ib6IxuTE/LoCMOhl5pFZYKNmYecqCjgLzC0xGtBVNAMDic+oYXYCYO0Iz5K5Ufu5ChHcm5uMrZy
lpLH61qgdQOBCa7AT7Q6MuTvGy9dJcx+tauasGEx+/sAuPzswyuz5K7Hmbdx5MxE4/nCJU72vXzU
aASNW3BaLwjdIDSEjpn9VVM2mtXrheUZU9TqtR9CMrRNqW7A9y+JA1wuRIZ94cIyccxaiajEstMj
5Uz7M8PWnEBA4icAHXwrgv284SHEOyqEOZtzsXM3HD2uhKBEonAguOljUZgoxlZjF/aiNJ+hCXK8
xolDw8LP+kR+RE8xY13AamEDUbROKPlGV2A8Vu5EvDvxQEpeXI4PgewpxWJ7wjpj5c8pQJDIm3cF
6PuSBcdlU3MQlaOAOksVqgXG77fTNOIkMwQvbDYcTget0Ty03f3pGJB7qIG5GFyIoZYnt4UY3hh7
cATH6VwzoaAkED1p0STxkcIRuXZPsHRuVVRVQ0SNs7YkhW5TkTIgOTV0ZwJtSkGkYMARvDInlOYu
R/MJZ/qX3Vy9bAGrWzvuIyWmdi5BsTQso27Mnq8nDdERepbzIXl8u8dHsyGXUj2p0xJ6C2Ki2VVN
9qDKvhiqYLh8RQlLEoEIjEEMp8rI1iXj8ip6LReiJDLaE9q4Y7iH/IqXQe0MuExl3qtsnsY/Um8u
T7GTz+GN647hPYEzE96+7B7UwgGyovCFSnRes3B8SRcbWZqIiF0yskuAck94+ZZuUMs4lG6AKh3V
2xZnpt3XE4s4c0Up2oxlKQAJI2Jp3CBsjQKtT0OmiokEitVtQE4hxgjIgg7sESJmuGxA0lLcg8Kn
YtMgxC0QGqWwIi5wyGINFSQc5BGrb0CZ6h2qsg3aqyAVJnsCAiH2lW7vLkPIDUOloxI3oSlWXTzX
8pzH+2V/Uv5u/wD7kl2ojrgugRn6J2WCeIp1tilC5IsziqqHAPrQ8uGkANTM7UCT3KMrd3iGMVGU
qkYoRODI3IihLlGRHEE/pBzEAwOPVa3clEbAVwcxMd7/AFpzeJ7UY87KMIAUltX32rcA6+HbnM76
BfCtRj2uU5uaXyiGTzkZE5kv016d/WwWHQIYJwiST3JzQZpoCgpGIQu3pOTksAyaA71WTKhE47EJ
sYyGSE4eKY4hvR5eR4TguO6DL3I8UvUF8K0TvmW+pUjCPrKbzNI/Novi3pd8kwm5RhLwywRge5NE
EppSLbynlxHsWmPCNgT5qqcF1VeJUqg+KBzCZUQtHEID19Lkgdq+LeiDiwLn6FC3ZjO5O54Hjoie
+SuCxyj6Cwk5kCXamkV9aieUvStgh7huQAiP0Xipw5qULpmQfPlIuOyiibVy5C4MbjuT66I2ox1i
XjlJjKT7StNi3G2DjpAD9XVeuRgDg5+paYXDemcoA/SULXJcodZxlP8A9MhDmb0bFkF+Bi/ayEub
uz5n3QSYgHuKjLk+XP4eAJkAXrtqXUbV64fw8HEYihl+kc0woNg6HyCiQHcomQJOWxaBVkbMw87U
TKMe0CqJMasclMs0TEjfgtN2MokEsJAgt3rmJjYwVq22JC7BgnJqu1VQaxOUvZlUKHmjy5tSeIPa
F5tkxjI98ZKMJxhbg/EY1p9K03LgAPhBLKdm7LikARpGpjtomtgzjgJGj9joEAQfD2m7UZXZ6tun
hTkPuNVqjERO7JRuQeMR45ZNsVPCE8SCBiDRCjSdtpDlMz7CUJ8xKMI7ZFkI2AeYJ/6eAUo8taHL
2xh5kSD6ypHn+YnKUi+mEnj2nUEDagLsBiZAGS0yaDDBmQFjimfaagWgQB37yqliMDmhG7Uexc/K
iLkSDtxCkbIMxAVI91CXmhjkQygx1UqeitQtUPh3BmEJcwfLkBWhP1KI5eTgFmNC5zYrTdgLm+VV
O5ai3mDh3DYo3BbMrsqDTjTFS0gnR4olafDLYUxFETAuNiY0O9ULICXrCDYha41yIQN+yYEhxxBj
2rDVCPiiBSild5UwjCQfSAzsuxOMTkrdyc4jzI6gGoO10A8TI5xATykS29NbiJXZYyIwXxJDT7oA
CiI0rkjWhp3JoTlaOBOMSmleptzWid0CJOok0J9ajA6pUeMXxQMrcYnIGVUwhFzWMo09ajZEwb0g
4bYgLY0kDj2mS1HNb0Agb8q5RFStFpxbGETjI705Ln/1gmAcn6E+JOMkRbkdEvEArtq9bkOXmdUb
jYSRFlyZDFmA9aHLWrRuMHmcqrztJFslmWoVBTajp2Otu5A4dBbECi1uZ6amIwYoSlYkZjEA070L
duOmEaMMFol4R7Mqj6UZEAAYMGVQ5TyIAG1cEn3BahEEjIrVMAHJ08jqAyjVHRAlsyFqlwphMHeU
wr2Lw4KgZlanHxOyY49AN2IlbniDgvIsQEXNTEUdG2ZASJcPmFeYGZlEgAVx6dyOfasOFAhStnCQ
RhMEGNKqEgcmPcq/R0ARiTOJo2KEr58qOz2los12yOJVV4XCC3lDYA6oHMQ7KEdTxJA0soyA1XJ8
IA2qw7C5GLTj+cvLl45SGgdmKDYnathyZHSQwDVTxLSGIOYTzq2ATu60Go27EYkYhG2cIFguLBa6
iP1pnYbNqxTDJCEKyOCbGRxK8uFZywQiBKIxBGZVqfMuJkYHEq1G4GEpAMc0wDAYAKJhQyi8m29D
EOFTBOctichlq7mKZiVpbS/rU9AqHPqRjbuASHsyLHuXkxno5m2drVCMeb4waMMR6kIjAbUWKaNS
tUqlYIRiHkcAFC5McVzA+6oRunjNA6kbkgdbMTk+ShCAAjGIZlbuWQBcmKx3jNXLV+2BMkESFEbc
x2BC3EaoRpPYiZObMjQxbNXCA4Ao+KuzZnmad6iJYRoFEGMnHjOSl5A4QAO9OzRxlJG3qMNRqRsR
i+ovitI4pHCK13if0Uwoy0xrI5IU1OcqgKIMawGCMYW9Up7qR3qEebiTKVdUsJHYjanEaCGZSsWx
G5aiSICWQfaEJXSBLCIPDEK7YMwJkUIqFquyfcE0Qw2dHYjqYiNKYKdkS0GQoU2jUNoIITzkIfT9
S82MhciMWoVEjPFeTc4AKiWC02Y62xIQ1EiRGAxHeh5T3rY9ks6lpBjcNNJDMVvPRpiHOwKNu9E2
gGDsWUbsbgmcyC9fsREJCcpBgB19NyOqOxPydyUBnDUWfvdC3zdoxkMZggg70DEiQNQQmOCpE255
SgSG7kL/ACpEwCDIAkS+lRtc1CVq5ENqnw+qSErU/NgKgSNe6QqhC1cNo/8ATvF4d0k3NWzD/wC5
Dige9archOJ2VWuHw54PEBHz7WqPv2g0hvxQuchdN+1/0p0LdhXk85bPLXNp8K9i9DukEZ8rM8vM
7Kj603MWvOgPbtAmXqTQkYyw0zGkvs6plct8UqSMSYv26SELnLNp9q1ORA7pYrzI/cE+GMtQj2uE
LfNQ0yPtwOqK+DdhM7IkP6louREonIrVZJtSzaRY/SgCI3Ye9xGXeyEZPC4fZMZfkTguDmPTUTEA
jYQiOKMJFzCJaL9iErE5yiMbcpsPoURelEaqwjKXENzosJCQxDOPWEzV2ZpiG6aoMAyB8ds0MShc
gRpO923dEeU5Z/Mni2xSjO7qux+80ZHtkpGLs9CdnSZ2ZmEjRwtV0gziWfM9ErtwtEB0LUYiNrIZ
ps1h0ZqMsgcNyMhhMOFxZLh9SMsWCNy4OwJgijHYoXo4ZqNyGBr0U6lfkrp+hlRV9CJ25GExhIFl
o5uIuwwMh4l8GfDKrGlSpWCeKI4TuyUhPI0VelyqYLat6G8J1IbE/Q6eVZZsEZQ1B9gUrgka4OgC
cVpTRkQNiqSVQJyVSpQ00C4z3KkUwHU5r+U5j/bK/qX83f8A9yXQJ93oGVOjd6FijSm3q16MaLUJ
d3RAW5NUVUdRctVBsQVubBGtPSytkYiilbkKgqoVOsyxfobP0WHQ6wosECsMAzDFG1eGgHCRRlGQ
kMU0cF5ksBgE7PuTyDbAq47AnNEzsjqk5GaAgDIjNC6KPiyYCuZW/o0w8RzWqZ1HeqALWDWKFwVI
xTYEdB6PtWKeMmKZxIb1xAdyotT4IPU7FqZk+CPmcII8RoEZGM7kHbXAOELdiBtA1Ny6PqARlZDR
NIERjFxt4ioz5qQt6cYykbmuW8OylrvSlGZ4oQAiDuzKBNgSIAA1ElgO0oQtxEIDCMQwHWBv3Y23
w1Gp7AgLcjfkfcFB2koQ5HkZRf2p/wDoBAX+ZjYhmIN9OnH1oS5i9cvHN6IStcvCMhgWfvr0EywC
lftRBnAhgcKlfgocuLMrwIN0TJIjmwYfWogYAdJjeAqaELTOsIDJfDGmBVaAnBTt2ZGBiK3GeMQG
d0ISkZSALSwqdwRtXYyn5chEiLAKN8DTGQGkblMZzkPrVqOUa+oIsHCZ9J2YrVceRNA32IGINm0z
mU6FQjcvgAsYyiQTMjFmQESL5ZwBSMfWtNu2YwmKvHE/msoylMgEcNuMtJH6WCjPmLxuXWYgNIAb
sGUYz0W4RFSC8j2shptxmQGE5gE9wwC4pRiNhACJ0wubSKH6ESJz07AH+lOYSnvnQeoMhAAQgMo0
C0Q4iz6cEReuxtlvCTVC3y9m5ekBwyABBI3Aq5EabUMpMYSO6JUfxl+5K4MYg6otsGpRlasQEo4T
0jV608UHxKL5JzEHewWkAONicH1JgFpkzHJNJ5WTgdiEYl7V14n1UUZADVaJE496lEPKES0onGK1
RkGR0SEmxboY1CjzFn4d2JEgRgSFqvxlGQxADoG1IMaAZgb01yOoswk5cLmASNRrqPuqUJW3tgtG
ccS2aF22RdtbsQmia7Ct694IVQQfFMZkjeUNiJtHTqxifCfyIyJNm7kZVhLvQNy0aFwY1iVbN0k2
4jhDNRPEg7s1gjKTBtqIgNUjn+Ra7h4jtTgdgQeUoGOBiU4lO7PIzwCufiZTleAOmI+t1GNotcjS
3LLsKkealGEhSpx9SjCHEYjTqwd1GLkXol9WPcnqIDAHocp5F7h8MQpF9V2WJfwokkl8StEO+WxA
RCqjFXI3h8W2Cbb4FW7NwiUbkm0gYIyu2xOcsSfsRhLhtTLxG8o+VxQTjArcq4IEFBkboiJSIYvg
jEQ0vVoBlI3bTSAeD5omVnRpxJoFpBcnEjBOfWmmBLcQibbQnvwTXoMDhIVQIlwEUITP2ELRMvEU
Yp2xyCwMS7OE2rVs6HKgwfQDTYnOPR2YqN7zQLRqQcVZsgjSNh2LS1FdhaDRkBJhkT1MX3JxgUHo
SiLsBMHMhGNp9JrpOSfBUw2qLCu1AkMUwoENrJ9VFVEKU9pZSu8x4Mgje5e2BdBfOh7FZ5W6X1Ey
MTnpDrTGIiMKUV22ZmbMxJcgEOy4k/tHwjehMVepKriaMmwJzRJkBFGMSwOJzQGsUG1GQwkapyGt
j/mKAcdip0EDE4I3JjjOCc45Bapw8yeb5KF6UYgSAI1ZK2JnVCJ1FsGCEBZiBHwkDiB2urnK6YXN
LaZl3Yhw6N68dVyWJyG7qarswDjpzKjo5cmLsSSH7lMCJhOOMSRgqsETEu+JTEAwOJOC82xbeWdS
PoUbsZHRNniDgSqfSXTBGMK7StpzPR5VqJlLbsQtxjUSaROLryjETzAOSJj44SfFoxGSN641yB8U
45dqFqNwSgMNQchebdlqmc1G2ARL3gpXJMQWaWJRtwpGQaoA6BM+E0kNyN/lyJW51I2FV+hXWuNc
A4IgPqKJvQImMimpEbAjVabQpnI4J8ZZkpyWARL6QMnqexQtVjGUgGCHJRucAZpDEjYULk7ZlGI4
IE6XV2xciLXMRLaTQtsRjqa6SPLbF3yUOW88yjOhwdu3FapcUhmiJXBE70DE6gcCnl04hipYVLiS
0hpHYDVGZoBiELpI8s02FHQXBxGKpDHKKfVIf/bKlcEpQOwFnULlwtIYJ0Yw8MKdp6TfkMcEz8IP
hxdcAlbmMHcg+oFC3zFoXYDCdtz3UCAhcaRxhIEEHZVV67TiJDeHRu8tMWpYiLU7Fp52BiM7kIyY
LVZmJjd0NdtiY3iqM+RvGyT7EqxK8v8AqFjVA43IB4/SjPkeYIJ/dTYxI/RkEJX7UuWnlessYd4W
qJjzlr3oECfqWky8uecJ8JTxGiRqJRp9SFu/bjzNv3iCZfajP+m8wbF3O1IU7KrR/ULUiPZu24uD
2stVmWpsRhIdoRN22DI+0KH6E/K3hOMcLdwGve6083alaPviJMXWq1ISG7qmE7cTGWNAtVi/K3IV
jQOD2ptP4iI2AEEd1VovW527o8UQHH5V8K5GR90GvqXxIuRR6g+sIy5a5KAygSdIKbmrQMR7dsSL
/WvhzrsII+v5BF/DpomjIh1EXQJ6TSWEvWEQROBNQW1D6U40doJB9RQ1ExfAkcPrCIixI2FNIN2p
lGE5ytwlTVEsyeR8+17Ms2Vy9zHBqt6YlsCua8ziueZMxfAglaZxEJZEUBT2502FPAiUdqaUSEZw
FQcYSaQQ+I4Hs3Rj/iUYm3otxqdJcErWaMgVUd6fJOnUYe2CwWmVvRIZJ9IiQMlVNkmHQ6DBgEYa
n0nD5dv9MDixxzWpxG6NqYimXTXow6ADl0PtTSVKoCB0hVrvVOjUcqqXN2qxh4hmqxMpZumt21Ua
QnuF1SNU3X5r+U5j/bK/qX83f/3JdBj6Jxgt/omlUJ4U3JiK9WifoiXaqidRNBiicUf/AE3p/wAT
bHHHFEENOPoiPoWzqN0bukDHoaIc7BVUtsNsqKsovsTyg4/NqiSDqyotEvENioSBsTKKfo4foTyk
YjYFUOdpLqkWRADKVomowCb1p00Qg4cLZ0EIwNBLBNkehuhuo0alcZDrTEAlaaSl7sXKcQJkcAf7
kJTmIk+zEfaXUp8wZSJwqo6jKcRUW5Hg9SGm3ENhQdcC9ehbJwEpALy4GXMTdj5Yces0KFvkOSlC
JbiuxL19QCieY5mNi0MY28PUEJ8xcuXyPeOKBtWIRIwLdZhgcVMcw8rYDkNiyjCPJ/Gm4t3nbRX6
VGWeBHRVSiYGIGBKIlg2KrmoFlcPMnRb5iLeYcIkVD9q8w3YiDPqcMruiei3ckCJN7qFkVEOF1Yt
e/Ik9yncuSESA0X2leXOZMh4tIMtL7WTcpAXm9qR0A9jhRMOWlYYcJA1Gvaow5i7GQw0ksPVBTEt
TxIGoGhfHJEmEdIkRENlHMo+VCnsk0A7kCTGQzhgg5Yn2RVE+VJ8sENRjAOwEakoykNZ9omrDsRI
ItmNRIUPqTAiMfelie5EXb4IHshabFmcpy8JkaeoIC7LyInGUCx+gujcvTlfOyQx7U9iMbcpYtsV
VVYppzAlsCEoVEs9yId4+8tLuVSNDmmEWTxbvWoweWdU0ocJxGKjOBaBL6d+5XuXfhvQ1jd/6KF2
3IRu5vgRvRBiYSOQqFEsIGFIyZqKEbcSZD7wmuo7lGcQ2rLoYhwclHmbQ0mJOqGRBT3JES93SXU7
fLlxdHHIYscgjM0gM1rtzkBsyl2hNzA/D3fZuDwntWqXxbJwuxwRMC6fCWRTTw95Y9A+1OtZrEUE
X2rSJmEAOIOSB3LyOYjC5y8aW7j6SpTjcFuALcX2EJhzAP8AiKe7dJG4E/WvgW+L35rVIu/qTmia
I9aEDMA5sfoVyfMByKR2l1HLVUoWozidVJTJqCphuIFoSOY2rXIvLLp8uB4szsREZaicZmvqTZLj
eNoYnMoCEREDAhcJo2KFa7CrdsgccgGXlxiITiGgQEY3rPl87DCUiSD2OpRnzMbc4FpRkCC+7ard
6ZPkRHBTF81cuwL25RYghmRAOqG5OKgrd0ApiWfAoRsxNy4T3kBW5wgYXojTIHBTkZxmJ4wqVqw3
JjhmtNykRhNa4zeIT3WNsbkYWGaOSbFViC3rTiLAIxmMUA9FxY5LXarH2lIkMSanaqOUH+lEbQja
gZCANdP9yN+ZlERLwJ2qNy8wMhkhz8JGQnJpg1Yrd0vluQZVWDJo1OachN0NId6IxVexeW4EmxxT
yRIRIxZ/WhKEhG47h8CFKV2QlclswAX/APEIz8vRLhOLnAsvM8vzRgZOwB3qV25WdwuuPhapRh94
RR2oqSZDWdJwcBUk+YIVJlthqtdyssk8QwXxBQ4BcIq2AVQ3QS686fhHhC1SwC18zMRGUULzmL1a
JpJWuW5ctawbYBmoXrF0ylHx2zhINgFKMxLzY0MNJd+3BT5i4NImaDYBQLzLpxpGOZKMYW4gezIk
qU424yGIkQxbcERKfEKEaRRG7P4k5YvVCRNXaUcxvCuXC8o4QJxKwpvVaBGOkSiNqjK1GIkAdQwc
JzQ5hFiiBSOZTBEnBGYBFoYyVyIiI6RjmVPmeWsg3s5DLepTtHUJ4zlQDsV7kucGi7NpWpHCYzYq
6bxDGJAG05IdA0msagKLigPEOxRnZiDaapGRTHFVXwwRsZW7F2AEonxszjevMhwyCMpHVLamcmRw
AWu9QZRH2rDSBkmjWSjF3uSDk5RRkHltkUJAGJiaOGX4fmoeXdodYqTvCjC5di7cE3AEm+1Xb/Ln
gLASGZAYlRN19YOLu4VB3ok0G1QIiJl6acwhpGkMKJgehyViAF5QBkTShVswJi9TqyRhaHh8UjgW
QsmAIwCeY0S/NZauYvNZiaCNJKV8F4ikTLEoXZtISqImunolJ6mgHaiTicU6jAZlC2LfmwAppFfo
UruviIrbOIQctN6J0ZzhxnGUSR62RlauebbjhGci5GxaOYj5J2l2Ha4Ce3MTjtHXMZVBxCN3lLk7
Nw5A8JWjnLMpxdo3YsfWzLXak+RGY7ejcjMRNqfvQp9CIiRzVkYRk2pu9ONfJXnYlmi+9D8VZHM2
v+rADUE/I8yWGNm4xbsQjzNkxl70WMVqiRrxEoSaQ9RQYfibXtCR4vqWu3L8Lf8AzTpr2UTXYjm7
I9uNJtvCaMtE/cnSSMZASicQUZWDKzcPtRka/SnnGN+G4kyH0LTKXlz92QI9TpxUHAjqkztjUfbA
aXrCMuXuNHEABpDsKaV2RjEeGUTKvexQHMWbkJnMReJ7EfLlUYxIaQ7iiZwqcSCQfoR8i75gGAuk
4bF/3dvyiKPEGQT2piX1+r0NeoLkayhknHiDiUTtRiRgVpOWCAGAxXDIhHXCM394fkZNLUNjFx9K
Gkxn64EeuizbFw0v2VG1ca7GNHBqyj5TwtUMhtXmWpPGYcgZFUWlxIb8UNVsHsLIAWjq3ledEhpU
lEh4t2LjtW9Mg8REMC6AlqsHNqxWu3puDbA1Qeu2MhpKo8JZiWHrTwIkNgTgMViOxbNitCceOIEZ
SOaDBgvp6STktRwyTwi4zo6ED4bn1lU+Ub+tT01Cy0z4u1cNDsVCOmpCqVmVQUVT1adMrdu4Rbn4
44grVLiK4Ygej5r+U5j/AGyv6l/N3/8Acl0dqOw19AOjd6J+gyAqFh6lXBV6KCqDRdGZGCBAaOLn
YhaEsBVk0aDb8gMZBwcVOdm4IxljFlK1cFR0sMOkAJ2XFRarcXiMTktIiXGZoEZXJRgNpLInz7dM
nr6kL2uJjKgArI9y1QtlstTBUgD3r7lxuIX3J9YQeMYjfL8i+JdA3RDriBuH84sPoWiM7Vs4MCHU
Y6vNuT8MLZBKNq5YuCbOBFpk/qq4APw7eC3K2ZTl2om8RqwLBlIjDUVF7gMj7IyQlABhjtQE8Qt3
TVYU6RPDahMYFMtqoOprGIQmMQt4p0UR6KJpER7SmgTOXqj6yjpMdW0VPrTzaUvekSVqBjXYiH71
SkdpRk/qTivU0zuwidhkAULUfMuEkgShEkEjIbUIcpyemD18wSw3yoAtfM80LNt3AtSLDcIhvpRv
Xrly+TUiZxO84rzLXL24TFBIRD+irmjEihDKX4s3bs7dRbt0lVTNiJt2CWEZVIT9Qg5I3ZEvAOFE
yDmX0BASk8GLasArVzl5zjbFJaaCVckI27QIlQQLifeojmCNQDxiC4AKM4yEACBMyFf8KBwAxaXi
7UOCLxpGgonIptQIeJ98ZFQN68ACPFGOoH9I4BSt3R8C4XjJ6etG1KEoTFIyhgB2oSvcxK5GOEdv
e6B0DhwBDjvQN2UrYykHI+hG4Lg0Z6qB+9MZ6rkgQRbGoNvdQ8izHRENEmNZFa7s5CUjUB2ioynb
1tUm5JgT2J4xiBsifypmIbAHNeXLigcDgQsd4RAIDeytLGQkMHwR8wnUfZyCYCMrrtxVITAgtjsT
CICbEpjTopgm1BMmkARkhc1yjMR0hi1NiIvB4nAjBMDpORBYhSFzmBG0cZMDLsyUY2Imd33zie4I
Wb0THQS0trp4kFblquSEBm5ZG3y0BcJprl9gUuc5i8LM7lIQMdR09iNi3MC050znwx71pv2zB8JY
iXYeido3P+2jF5WzVydjrTbnpkamIqyBmGBwmMCniXCe3TcVpnQ71iiD61XHIqoocVIzhrtkMIja
tV0RFmR4IYF9y4JgbAszHbiFpk0Z781piz/SuAtvkWXFcj2u6fivzyxEQV5t8vLEQZA3anYaBCQt
hxmKD1J/V0MVK5MgCIUiC0SfWnKEpjhyCZqbEW7kwWizAzlmRl3rz+aYGPhi712rbuTxIjfj4ZZ9
ilDmuY8qUZPoMXMuwuo8rCY1Ww2lw7Dcp8pZkBOdCfdChdMxctDxs/2oxxtkp8QVuTPROvOsyAkA
1dhVvWGJFVRUQc0ktMqgjFSEAWljsXkyoTl2oiAZ8VTJb0TmmYIEsNyeX0lMJAPkgAKyVOkzIeMh
UIW9AECWAGKt2ZcN2IbTtRt2YuInXLbRVxFD00qgHZk5K3dR1g6wZFatIfb0RhkShbGJyQtAygY+
GIQkz3jEEjeyhbvxIuRMjqZgQS6nZMhK5cYCOeOKJAXkwNZVl2IbE5PET9C4y0Y57UZAlohORq3h
aoSB2jNVookgGODpnyQPQ5+7H0rYAmlICA2oytzj5UquTUK1y3LVLMZdijLm2MJgx17DvRmZARFX
V+5brGRDHawZSuXDSIchNIARiXhFRuXxquYiOUe1MFG9bAefjjvUbkbZEhiHp3K7OYMIFjpGNdi8
uIYDAIRh9CGmBlHOS+tCUgKeE5rFap8MNm1MA25VKibwIEqiAxIUbYiDbuDAZL8NycTbgQ5kamW5
RjctmFsB5SArJQjaYaaGO9cuZH43mPEDHS3F9iBlOU4nDUSep9idmO0URnAuR61qvH/CiLIAIwCg
4bmBI6moGRJIAC02nAzmtR4pnMriLI24kRjiTiVCEJgWz4a1dfDJvTDasad6ENOm5DG2zN3ZproB
G9RNrhuRLgryeY4RlJRs3JtA0hPI7FIS8EamSFwkCJz3LhnIxHsDwlCIiaGrYIasdy+NcETsxPqR
8kuMzgqkiIzVUTJomNYywKMr3+EnE9ilcnwOWi1FrJPFkU4DHahKcNExQjb6lEVMDg4cBAgMMlVe
UPBb+voYI35CuEVEWg5zOS88XJWrgPCYFgPUEDzUZX7RHjEnIK1WzhiMx0mFwCUTiCjPlLsrVzPi
4SF/3NrzbeVyJcjtonhciJe5IgH6euYyAIOINQhOwBYuDOLgeoEIR5kRu2x+9BLkepA2rkST7LjU
O7pa7CM+0VRnyV6Vs/8ATlWJX/fcrpOV+2w9aiOXuw5mHuypMbsUdJny10mpAbi7U5j+JjmXEZfU
hG98O4Mplj3FarBN6AwiSMO2i03oeTdNHLRk/atXL3PxNofup4tuK8vmInlrmy5QetaoSEonAxLh
AXY6mwOBHeEDyd2UY/8ATlI6fqKEebsgf/cgXie5kNFyJJ9lw/q6um5ESG8KUeXnpzEZYetauZ8y
zddvMgZH6aqMuUJvgeLVc1CQ/RLFEcxyxt6fEdQ9YBQMLsJA4xJD+pAwe1IVjKBZkDAxvxAqJE6j
2KI5j4UpZVIHbRaoSExtBf0f4vlg4xnELzINqPijmCnCMSgRj1HgTE7QWTXhQ+2BUdrK5ItOEYuO
xS/p4jbiJl4mceIHcVK1PGJamHV8klpxrA/YjLMUkN+1FiQQckI3sTmQ4Wq3elafYNUfVQr4X9Q5
cyyE9Vs+pff2JQ2xuyP1gLTKcTtMaoXPHIEOTsUb0A0bkQabUDmED6+h8Fp9kLcjBlAx8UZAjsUS
cWr8ibpfobpwT9Deg7U3zLzX8pzH+2V/Uv5u/wD7kuh1GQyofQMgE+Sp6CiYoDamyzVQqocY7GVZ
1RNvikNgQJsnQcJGgKHmNCOwYoQtjTEevrN169eN6MaZkY9NFwh0BbiZE0YIXOZnpOOkV9ae5dlc
hE+zEae9lCNqyLgJZyGiO0sjas2YGIFJgkR+pF5yhMmkhPhHcozu81cuXI4EsYjsBRu3I+ddljKY
B9QwWqNuIkMwAnkWG00XFegO91KUCboh4jAOAtVqxOb0GSgOW5eF3UHI4hp7Sh+KMLVv3LU9J7yX
UpX/AOpytWpVFuEtUm/SdR5e9KZsz4gfLGuXe68m1yotCIcXbgIkexmKnLluUhCZcC7KRJPdKv0r
zbsrkJR8cjIMewAIjxXJb8N5RuQi4Ga4qSC0T+lebb9S0yoUBEdpTpyGCY1VOiqMJYjBVTp4qvRv
RUrcjwov4ScU+pEQ4k7MDnktMAWPtIgTLbg30ps9pDlOS60gsMgAsS200TO52BVHctIoEDjuQMiB
HcjGMJ3LgwgBiiLHJxiCKTnIgDuYOnvc+bQPijAA+rBkJcxOfMEF+Oj9rVWqxYhbl7wFfX6YE4Jx
UHBW7jiEouJFsYrmpQt3LxoHlQA/mshE+E+E/YejjkI9qBhxyyAQnatEnPVUD1K5EsAaCVQ/YFxH
y7UfblWq/E8zcF+zGhAjpZ88Shat6oCHhjiCUOEAk1LVV3ZAAK3SkySShIYJyWWqZAiMSV5cbZIN
BMFx9Sa3CQh7OonD9FTt3IiUMdJx7AjKEJwmPZkS3dgjcuTGiOJFStNq3K5zDsI4Edu0KUOXti1G
PiuOa7g4Wrmr5ePggRqA7cEbYt6yamZLE9ihdAHAKAUxTwLNhA4IebHzJRoQ7DuQ0xFcswvhyMfz
cQhOYJ2SjigIFh7O3vRF55bRFNOcoDAOKLzbNwXIHFi6fSHKBFHyWq9IQt7SpS5c6hChdF2PYtiY
ZrDqMQ42J7UtB2YhabtvXDaEziG4rS/mkZRGpPG0bZy1Tb6GJXl2ZtE4CAc/rSWu/MknEO5QMIQi
fflxFAzDkFq5ko2RbF2dwUsxAHeTkvNnKN4sR5Mg8Ig5RclECY5VzSE+Id0qKweXu3IXYcUpg0mB
tG9TncifOmPvHcqXKT4xElpYKXLwnqY4EUdNMaDvW0FPEuNhTSodiBxQoy0ux2q1L8R5kY0AD0CZ
Mzvitds6JPVsFG3dm0pVTW4AfnGpTamXjK8R9fUoG3pjxzHsgomZaGUBgFXE5IXLgY5BU6PMuh9k
StFqFZGgGAQtxrI1nLaVsVSgXcbVrjLRej4ZhGMrmm7gJvpovw9y55l4nVKROL9quSxOksNpXm3N
WmcqxagBQD6oEOVqFQVuWfYjE54FaZl28MlxFqtRaIORvWNQEYgViWIWqHiaiEx94KlNKY1ChB6G
AdMSIg5LDvKbAetEzLjJiyBEjpBwQljwhkwHSJs7Gqje8t7hqwJLHsRkPaHCSiZeECqvMNMTMmIO
x+nBitUlwwptXECE4LjqM+KMZYZHqE+7ReYRqFvJC2bAEjhLE/UrHLzDxuTAkdkXTRFMmRMCBqiD
IbCmBdG5MsQANJUYWmJAeTYdAerYDIIiJZ8WVCarhD7UIQi+2iFuNGT4JtgWiJaOZQjEMAjbt+EY
natEYSJBcSGDKPL24m5cIQhfsm3cNQZD6lpbhNC6kDOYg5aGosyJJYYklShEtAEjtQu3WlcNYg+y
jpoTmjO4RGTHT25KUbktZke31IRwIZ3QIl5k5gEz3bAiThkAgSwdeVAsMZFECWp1ogNUjkFruF5Y
gZDoMYDVJRt3Y65PQ+yChc0nXANw5gZKJtWwOWgW0mn0q3PnICMMIDEetblcNiRiHLGJZDmJSMpw
oSS9ChL27VJd3S/QWxU5kvKVG29Om2NcjktV409wIABug353HcHScgrtw3tco0oMQjdJrM8ETkja
t3R5khU6cVI3paLsCRGUcRIL45a233oFO8KJEjdBzwARiJjRGggI59qYjzLZ9g4jsKHKmRjaw1Z9
hRsBrnuy2BGMC0iM0bMSIticySrdmySbl1+PYpG9A3DLEl39ahcAEtXitlarcQInEDEJ4GUxsGKM
LngfAiqFo0ANGCAPFIYRdC9PGXhGQC0aA49pO7AYqBtxJhmQce5OAwORyUpnEhgEZHE1KdRtjMoA
YRCJnAwApjigwBeooq1ByKE7bWpjOIoe1mQjdj58B7cQQfVVNbnxe6aHpZapR0z9+LAoDl75u2xh
buM471o5iyYSGMgXHchomJPgAeuZwkbNw11QAxXxB+KtZyAEZD1LTF4SOEZUfpY1ByWuI8q570AP
qTCUeahsmGk36TrTxWSMYXHlE97hCPM2nGVyIcOtfK8xLT/0yxj/AHLTznLCQH7yFfsTcnfBh/0p
jD6l5POWO8hx2goz/p/NStg18s8UU3OcuTEY3beHqXwrg1e6aEJkZPK3I4mBAf6FwXPOgMBNgW9S
0XbZtHByXCOiQl2HqMQ42HBCVmRsTGBjhXcoi9ZjzcQKyYakNERYug+GT6fU6kbF2EoGotkcPchH
mbTk4mAI0+t0bcy4wImKITsXJ2pZSiXCDxjfAxLtJaLoNqX52HrTxOobR6AxkHBoQvxPLHgJrEZI
TutAlUOobU0R1pC5M27sQdBIeEx7shl2ozjwl3AGSjOP3tvhnv3rRLqCUSxFQV+Ijnw3Y/aiPZkH
BXl3fDLAoCXHA4HNao0mMQcCiRwyzii+KkAFOMqXeWLjsC0d6Me9VWiOGZTBMnAzUbkg4jXvTfJa
dDdTBN0P6Pd6dvkfNbfwnMf7ZX9S/m7/APuS6TD0AdMMBmtI6GVelk3Tgh2oaQ9EaJwFWJ0tiUdE
SQF5F/lBLU4M2fFQhYjotwiAwpXqN6ZiQO0oa5VOAFSUYSjPXkNNVKdmOgjwxmC5RhMW7UZesry7
rPtHQFGdi3G5PAwYmToSNq3bh7gLFGd+5KMDhbhIrQTI28dD0WgRjGIyomlcjHcP7kdMzMxxERgp
XbNoyjH3ixPYE9jlTF9oJIUfKuTgfb1NEdye9zWsxqbQefrUr07spXDjCIbuUblqxGUZn7syJke5
Q0ct5Fn2tIBkpGFydmycNYD9yuRlcjMXMZzDyQjKGuQ9smqEox1SGBkXZOYgns6CcScEZydyc0zl
tiOnEYrevLuGm9eZbTHEdLCiaVQnj0DYVqGCAT4JupRCRNDgExxVC23JCgMQuEMBkqyL7Fgm0hBo
rjIA3lNEAnaFSCGqIjvQMZFk0w4TwgATic/kZlMgAHE0U7s70NMcWIJ9SkJ2pxFyGrSA+o1yClDR
pDknUKxqotcJkcCCwAXFcJJ8RNVp0BszmgIRAi2SiRRW4Agzi+sZuSSrtu4QBMMH2r8dG4NAYCI2
syC5iYwMirMMxFMyBMTJ8gpT8szjgYYr/t7c4yxOpxGK0G6Cau24JrVgm5MfDGLoQmTbiRxzAZtw
UvxEtYZhEEs+0qXMcuNN63IEzxJBovMZpj7yI27U5DwPtDBaYzJGEQasjAF8PoUpQaJILxNPUha5
u0YmI+9gibF7XHEagxCGqUIjbVPdJlBsRSqJ4oghgjI57UC7k7FARkfLnIRnHIvRCMjU4J5kzuDC
EftVuGgR0vKIGNdqEjJoXGjcG5CQlqjKoKdqps9qrguKcR2kJpXovsBdNATmfzYlfC5SW4zIiF+6
s+uRXxuamRnGLRC0Sed0YAHUe9Ecna8q37zNTtQldmCSeIkqNu1IaYipAYLUMOgWuTDygXnd9mGX
eVrgdcp+K4akno03IvvQuSkZ8vIGEhnHYUGuhzgM1K8aRkaBRuWyzyeZOKaQYwHi7EbUZCYBZlT1
FVHeqVisa7OhguIsFQ0Xe6FwH4gwOaGrxR4ZJj1S5GoZIwiQCcAMUZSLkoRgHmUJTGu9i5VQx2Ia
YEg7FrvFjlFa58NkYn8i02oAbTn0YKV5xOcfZBQhfiPLNHGSBBBfYnhwXo1hJW7XHC7apGUT4kbv
9UnKRPgjMumaivRtRexAsABioXZCkw5GxOKut3RWoKlakGq8ZLTKklUrzImmfagRgm25LXA6JYqI
mXnFAyDSTPXJaZAkbQhRjmCtYkRTAJouX3KEpxaUKEblF/aquxUUo7QiRPRKBYvsULEJgycPJ6qM
fFTE1Vm7bAjK4CJAbk3SxVKNsVVQstqYUKZ0DI0Cco0dPtFU6lI0dyp/ijwzo2aN/luIyFJHYpzn
xeQA/aTRGNmWmYjwlSuXjqmZcRKwwUg7FsU06ualDSBICi0xAdnRADNmt5WkBpjBACIiRidvT5dv
E4lCI7yjatBzmQhCIpqYxQjM8RFQp86IicwwiTk6MZRGFJbEYylQE129EbEDSVZtuXnSDwt/SVUV
TmilaMeCJ4StQxCN288bYYufaQtNptxDAlERmJxJoy18wae6j5dN6MbeGclSssynJZP4Y+8pTEXJ
pInFC5y4M5zPC+AR1w1XThLJlABtVdTbUTcIBcaX2uvL846fddOp2yHEgVOxIsLgII3hGByPSyjG
GOMiq5KprsVTogfWVwiu1VohAS06qaihHVpNH3oWLRMjMepQumInKQxKt83ZGkxk0o5FBgTM4ACi
uTvSDzkZMDgjy0rIlGVNzK5Ll+KBxt/kTDhIxBxRndL5okxZswoiUibRoJbAjPlqyZjLcgweUsyt
HMASjEPq2L8PzEWtEtGbepERma1AOK12iZWjiMivNcQAxBxdabPCPezVZllVaImmQTzJMpZLTjMp
h0C0DSGPb0MEb0hjgjG7HX+bitEbJiDgclqIYCgHU8yJlbuYiUS1V8cG9aGMwzp7ZrnE49TiAl2h
0bluRtyNeHaibsfNtDEhsFpB0SOAlR04Ljd1iZQ0zynGhCMonzrQxiSX7UYya3MeySnBcbelrkBI
bwn5WflE+yaxRlcgRXx23Y70RcIm2IAaXqXAWkcDHhLrgIvxGMZ4okGVi77sgdBX/cReEvaiHj3r
XZ+Hc96Jb1rLmLOWZCa58GYxjJCUS4OBC4gD2rXalK3LENg6e6PNtjEjFaSdEjUCScVG7qfEgCdu
a1cvceD1jJeXzFswkcyHCMoFhLHQzFD8PfuSgDWD17kfPszuW/Znn3qMb9sw1eEyzQlYuStyeoeh
CrbF20TQvUdqaUtE/dkniXG0daUJ+EhG2PDh3FEwLGOGnNGFwPLb6ASHhNJDaFGcZaoHB+q0vBKk
gtE625VidyGk0OA3oWLxZ/CSnxbAhP4ZjA7VxxrtHRxDgvDRIdqlAYCVOwoEZ4oxGJTLemCedGTR
FPk79WvzK/o+a/lOY/2yv6l/N3/9yXT2o7/QCWZT5JurqknGCfoZYKQNsmI9ohPIOsKJtI9SaIA7
E6r6WpA7U85gbkBOMw+FERaibcWcSnE17FIDRCGU5CpQhc0xL1uOXKiZzIlH2zKqciRuHCUg5PeV
LyYiBiKSmCX9SIhCFuHvmjqsYxuRrKbkykjblbLg7FGV3TahmZEOgIiU4DG5GNH7UPJtzmTQPSqj
CzGE5zD6Y8RA3qPnyNu0fGQ0e4IzPNC3bjhCU39aueRPz5NxmIDR7HRNvk7lyZ8UjmVE8rajG4T4
DBwO0lDzpR0A1hDhcIzvSNmLNphIoytXpxEvFVye9aRAS2mQclaoW4g7W9ABK0LspUAOAWtm1F2C
kUTvTjo8qVRktUaFaZY9BKOoUTw9SadJISdADABGcsHTDDpxTJ80YZjBcYc4LUI9yoGCK2JhmtU1
QUVIlDV6gvhx070DMutcx+iPkDok3IgDHiCjHVKeotExi4JG9THJWdZgwaeMiS1AFGMrMrVoivl8
Jc7yoTv86ZSidQtyrHvVy9zMI3rtw1IDAUyARlCHGzapVIGyqu3Lha1IPMAcRO5WLPLxlCQfilj3
oTGePRw7KKMdmKMrUjbORBRE5mZBMQSXwXMWb0zIWjwvtcq5OWEYkonEzl9ZUcwwYbE5NM1KM7gI
qKZ9iA5O3LVecQg74ZlCFy5K2J/ezenYFGU5G5GHhiRmdqN6MRARqZDIBDTIA4EHhKfWB3hGzZBn
Enim1KITg8D2YoCb2yfGIh4+patcPUQVqtSBAxMcfUUBMahlILgkdWx0LYGiYDEHPvRN5gBtUOW5
d5gFyBiSo8zc0kxP3T4A7UZkiEInHeoWhMTnPIZDaUL1yfn3oF4W44PvQJeMi7RjWSkbtyUdRcAi
rZqcA8ojVAPiwRMjpMcijZtC3psU1Td67lx8xCA/MiuLnLh2sAE853JnfMr7oHfJyuC3AdgCxA+h
cdyIbaQvL5cG/c3YL4kvw9k5YUQnMedc96WHqTAMBkFG7YnpnH2cihO5ARjBjMk4gZIXLUhbsSbU
9GZRt2DKPLtxzwlc7NyFuMRGLVCePFbJwVC0thRkSwWqyXbELWRomD4ghY5iXFB2kcwS68uzMTOZ
GSe6HjLLcoczysfKlCQMxkQvOgdBGaMZVA9oJxUFbDtCq5G1OCsekwODuFc2EutUsqp24diExgtM
pB9ia3U5lGEKz27ESS5OJXDhmUS1dqpmWCrkmgTXJC5feNsVriUIW4tEYBfGuxjuJATedAntU4ec
KhgIF5ITuzJtzBJgallImBPm+ANULTLXKIFG2oi7IWz7IOK0zpdj4JjavLY+dbI0zxBihZly4tge
K6HZNpFcaYqFjl7WsTjqlsCleMWiCxCfat3RWoQDrVImUSpaZtN6RC0yLlOcdqAzCcJ3rsTgqqac
XR0A1yNU8ogKIiSZSppCdnH0hEZLt6JCyS8xktcxKMBiSjbkHjbprUrwJ863EmPcvs6GGaquEsmk
UyxXEqFBjiq1VOjTmVp96ihKyQ5HFEoWJSeVXPap8x7N/EHEgYFG1pJvSB07Njp3cmp7VipgYswV
MVQIlnJQasiuKssyqBG2YnUcD0aI1kcFWsjiUYw8WZ2IQ8vVrLal5ptxjcPtZo8xdOq2AHClPk46
SRxQykylZt2/LkeEydyOxVWCndjNpyYiJwWkS4pFyBgqlC3CTTkW7ljqYIXuYOm0K6c5IW+XjwxD
BRMoEacSELh8Z2qh7lqm4hkNqAiGCrjsUYXzQnwhW4Q+GY1BC/DcvEmETxSOat2SBGBIcnGuxRt2
4ARA2Yow5eRjCcRLSMigebuSMo+EHBC3bmYyPhJUrV3xRw3jo8234ZETj9qt34+G4H6jkstFkajg
ShKdZtV+iqFp2DO2ZUIuWfAZKAjhHAnNDXESnMOSVC1bhrszBIjnEjFQ8yGi1EuIDEneo+T7PqXm
Scn2i+JKErjRPsgIh9IFS2aAkNLeGQxTXRrtYeZHEdoQjEievADJaYU2xyJRDGGsYHA9iETE68u1
NE6QMloMI8Q4pHFRF74kP3c8xuKJPHEhwFJh5cnNFpIqU906dwTapA7U44pbSqY7VXonMmrU7UZy
qZF06jEZlRB4YjEr8RZOozzdaZkcOQ6zEONiJtDy7nvBCN6Iu2zhIez2oG3IPnE4jqnVbAJ9oCql
Llr2r8yQxTc3blDLWBR09uWraMx1tMogHHUBV0ZWJebAmsCMOxab3wrmDEEP2JwXBwPUcx0S96NC
j+Hn5kXcAlpDsUY34sMDqJftWmUok7MVqsyIBxiS8UCYs3tQevctNyOoChkHThtWUo0IRly9w3bQ
HgerrTzcPKlFgSntzEgdnQRKArm1UZcteL5QkHC/7q1ID3wKOnhKuYND1GnESG9a+XkbZbwvR0Rz
Nt4j245poTEnxjmuKD7A9AibZJHs8RBCEeYta3oJvTvXlcxEW7gqC4+grVyt87ZQMnBQjzFlwfbj
h3rTGTS9006uFEA1MgpRthmAEm2+hFoVEjQIRAale3qmxI/EhW2fsTSxjSQUdUNUR7WYQu2Dqiam
O5CMuCYxiVh0axQwOoK1zccJARl2owuHDBOFVMEJHD5M3X39L/LH9A3oua/lOY/2yv6l/N3/APcl
1BLZ6BjkmC3LBYUVA6D4IRCqmHRghE2tcjg6OoASngBtPpqlu1POcR3oCQkXwpitNq2Yj3pCiIgI
wt5SIZGN6QMpYzJNOwISuSMpD2jJeZckZHKUnI7kIWA/58hwhG3YtRmPfYgfSiL+nzJe2JFh3J+Z
5kymK1lRCUpebPCINfUhahYOo4BgEbbRtAB9ZcgIsWsjOLAlXI8w129gCTq09rI3bl8yl7o+oBRj
KvKwPDaJAJ7ggeW5UWx7zAltyh+HlchL25yDBRuyv+bOPszHCjd5oC5OWQDRTwt6doiWBQjCIERg
GVA3pHu3BHtKFuA+HA+I5oiOG5SR6QQW3oW3lKR8UjgvMt1zotMulnW/amBogWelQvMAEIijJo+t
OUwWKfoJBYlancvVP0P0VwQDYJwAFxSTRAfaVWTBQtkagSzoN6QykWiKklaBei+OxSuWrJuWYS0m
b45OAoeXbucvZkPHp1SkckI3r4jAl9DaZSG8qV3mS2pmt2ydPeojSbmjwCZcRfYnt24x7AB08PqW
vM4spROBC181ePDLhhbDyZTt2H8o0iDi4HQCCWBqNoUfLyai1Ta3KVRE1ZXIXqaCZSIqTq2KF3lY
tGQ4izEneuYOekj10VqdwPGJcjsQtcvbidYJD+y21NfuGNuvmESfWo3C8xB9ETgHWqEAJHMBNnj6
lolLSp2JU1ggo2pBpRWNBmvLiwArxZuvLvaDvZsVIwLgYZhB09uRjIZhC1dOm6cJZS7UfMmIyHsn
NGIth/ZkcULnMSlb5fOWbboqQtgmcqeZPxIy5q8SSXYlfhf6fb1aQ5JozqV7nLp8yVJQiala+Wt+
VZ98l5GqMxE+YOEyNd6ADudq86FpzIs5wriVdEA0ZtKPfijZJpeDd4qqdBvNrlhCO2RRlO9KIOEY
lgEL16/PiwiJFP5lymJMitMXlvJp615l275dyPh01Q5XmAGkWhcFH7eoNdZy8FsVlI9iF/naRjW1
y4wjvltKBhQxwbYsHtmu8J4l9ydtJ2hMJlthQrTNlodm2raF5nLy8uezIoWOdl5d2NHOBRjakJA4
kYMvIAeJqVdlL7khgDtXAXgcAVslmOh48J2hcdRtCFVj00PR5cSNKJlUtRGMJdskchtT+zmdqZmA
yREPEUBGFcHP1oWocdw4negCBK6cXyXxJD9F0eV5AM1JXBj3LXzE5EmtalPWQ3rQeXGse07o85GO
uXsg1RhqEdQLyaoA2ISjAXZTGqU5FmRPnShI5CojLtURyt4c1blgMwhyvP2hanKkLmIdT5e9AAwB
NqYFDsVvlp8qTfkdOoGnavM5iAnOQzyQhbhqF4lojapStQI01lEr6wnyVE63Zp4NAnMBUnVablRt
QlGqLpxiFvRExUI7HWqVERZkHOIRuXRxxHDsQbNOMJdMbkfFErTbOmIwDYoDmKC6Ae9SjboZRIBO
9Ts3g0gX7Qc+pRapFgM1wOQM0xqdiLBt6dwYlB8QahbVuK+pDYKrRbDyjUDeoReVJcT4AKFvlomU
5lqK2CGIiAR2BWbcSDchqMmyBaiYYp5HDFNA6bcc9qAqXVA4UZAAxkvMEWdb+lhicFrnWZ+haR4j
gjbuy1XJ0RvQJJBeMcgVO5cmRYizEKcjcM7BAcn2WRnG4JzI4YjMoyliSSe9blimHTKZJFxmickB
MebexfIJjIUwiMFit60W6yyAWu8Xls2dB05YnIKcpHXONa4KfNxJMXfQNqe/bOgDhZTv37YAAcQ/
KtXlxFy3xQIDYLVbgLjUIOIU7vMwMpSYBshsUpz4YPwjYFGcZsYSBLK5GEyLloABsDtUp3J6gfCF
G+KStY9hRtvxWS7bselhxHYtV46Y+6miGHQ8iwClInhh7KuTuEmQfSclKZGmU6RCiRHBaJiMoezt
QeOu5gIjJQgDpPujFF6lHU9MkL1wsPZijO7FjLAbkTPEUZ0QTwI3YBiCSCME18aNs2ovw8OO2KRL
VULoEpZyGcd613QZwlQTzHah5cxJw7PkpW5NpFYvgjefVb9zPuRmMZl27VplSW1G3d4oij5havMB
icGVeoLMTSPi7ehl50hTJcVzREGqEI3ARHFSlDA+gZeZAm1c96FFpvx861HCURVkPKm5Psmh6phM
aonEFPyxNq5tBLHtXxIxvW9oNQgDLy55wlQpxUbeq1wVGEhiFq5W5K5bxNs19SFvmYSs3c9QotUS
4OBHUa5ASfaFqsXDZkMAAE8td2Ix91Pej5Qykc0TAiQNXic0Ty82IrjVNzUTIe+BV0xAntBpIL/s
7pi1YxJZkI8zbcYGUjntTC5ES9104TSDg4go6BokfaiiYmN6PuVcIxvg2pDaCx7FqgdQ2jpqiYvb
ka6o0qv+vbH6yYk25jGMqJpASidtQmlaiD7wxRjanEF3jcqJdiFq7GB2TOEkRejG1djmC3qR/CX4
z/MkXQ/E2zF8TEEgdq4Jg7ul1dkc5H6/Q64msOJkZyx6onEsYlwo83b8MqXBsKFuVbdzPJitXLzp
7pQhzFvRMe0M+9PGcjbantBapzFq3kcz3LzJXDcjIOw2K9ypDGLyg6aYqc02kmT4plqI7Uw+RU+Y
qKvyGvoOa/lOY/2yv6l/N3/9yXUMfQaNoWHRgti0GfEtMJBynJeSe5JYpoDoEzhHErTE8Funf6Gq
1XCwQjPVF8CQtNngDHjmCylG1GGkYXCGClG8QZyxm5YdgQldnKUhmTRCVyWoiglJyAhDl2nI54RC
8q1Zjc/OBLBSHNkRkfCIzYDtT8zzpnWkInBC5K1O9Zj4TIhqL4HKiIyBFfoUY2p6bksQABGPrUTz
F8U8Tzx9SjK8JW7Xs6Ymu8kqHkRlKQNJSpELTYtW75kHMtnehK/a+HlbtFlKU5zs2zhAScqXlXjE
T8Thz60Y6RMnGUhVahai6YUA9O925GPaU1oG7L1BERItx2RxWqcjI7y6cGqcqR6mqMTp2retL02L
jjpltC4LkZDYVgO4p9JXhKAES6EbjFwpWYvod2T1Waw6MOiinKXspjElVBZM7HeqGiYIA3ADsTmb
hOAsE2KE4xGt6nN/QOSw3o3DMCEcZOoxkZREn0zMeEsrh5aERG2zCbiUuwK3KdryrOMowlxyVyPM
3pxtzPDad6b1bu35eZ5QaEWYd61+UHxbJxu9CADQ9Fu7ZjxF4zyHaVd1czE3yQY2o5PtKEhmiNqs
3Lo4IFpHZktc7kYxxckKcbOq5LTpJAo6NjlW0gPqkGxQ/ES+JLAAkuxq6EmBiAaZKUbUQHxYVRcE
diYgjt6Rdt+IeILTdDH1IOC/s3GQE5GUjjpoPpWkavW6YTMvzZrRcsmp4S/CjcDaTXS6cB9yAiWl
kI1P0LVN7YOMp1ke5fFrdNRcuVFPqUoxkbk66IwdBhG1AGsnqWWm3cMyH82/MvGPYvI5CLe/ekMd
6MrkhdubSKJrgpgB/cm0MMWCBnGUXwzWbq1zIDCJMZHtwVq7A1hIF151wtaIB1HevLjdY76BWLdm
4JRjqlLTWpoELduJnclgE/MXWk1BHJE2boux2Gi8vmbJNiVDKPE3qWgl4SwkQyEo8MokSirVw4zh
EntI6Dy/JgXb3tS9mHaUbt0+bzMvFcOW6OxOUWqEHq61QoTsofUtJmaZSCcyiBtdNaOq6fayCErz
i77R3oeVdr7slxUI+nsWmUKbSnsy1W84HIISEhXJ0LUQ5QuxBFuA9n7Vrm4ugnTvC8u8GIo6cJk8
OE7k1wMNqoeo5LAIwgWj9ar4U8z5cMRHMoQCNdQGxapDRbGMjmha5cFsAAtd4vdl/wAq8qzfIuGR
etGUo81zRN9uERk7nYhDlIjX70q1K1y5ptwWqV3WN9Qp3LkJXJSxOShy1jlTQMSXqV510aZEu2AV
y3OJEjLhbAr4lqIYNgtUB5Uh7USyFrmbouN4LgI1DtXk81DXAUhfAdu1HzLcSTWF5kY3bwtXIUMZ
Z9ihdNbUPBtLq7zUBwCPGMaInlwXPsog45hOKxWNFToMmcppDhzCAd4SqCuLApwmykngBIDFHUGY
4KpoAhbgOKSEY0AxO9AxGsHMIER0sFSqLhiFKO0J7lsTuQLClVAmDCBBI3ITgQYkZK3MRIiI6TLa
epTFC0DXGSiwyw2kokBty0O21aYl2q6eKadNyfJUUpnNG8IuZkgEqZlEAxzAV6Mw3lx4Ccy9VhRc
xplwayAUWL7AiBSOJCEY45lAu9M0ZGWlxQBCAwj9ag2H29J2rzbmOQT4nIKRuT4pZZhC9O4DGBcA
ZqfkPC3A0lkjy3OgRuguJZSCua5A6okCO0lCUSN4KdnOa3t1KYo1XCBvJzWshjmiAVwUhnIqmOZV
SyAuS0QPrKNu3w+ZFxPNG3Ze5clicmQh5eq7PxvgFGU4Cc5AEk5KF3lxw3KSgdylC1DRIhpF8kLk
cJYhTjcjp1YS2LieQOYR/DQkBPbihchaiTANdbxd6Eo0BFAp2jhIEKdiVDN4HtCMJmoNN4TQ4IZl
OzyzJT9BJoylOUgA1FeeXmXJU07lGUvDWUo5pvLEJQHDONCGQD8IwR0Fjg5UhYgbl0DimVcmQTMl
eVEEv4ijduSeZGBwWucWtxwcIQt1O5G5dkI5jejCyHbEo2IDTpFXR5W3acyLTmar4Uh5jPpkOEo2
7kTbunI4HsKldsgGMqzt7d4RvcsajxQTRifPwESuKgyegQnCURIVbajC5E27g8WwqtJjwyRjHguj
LKSNrmAIyGGTogdErj4CilOWMi6dCAzKjagK/ajKRJfKKLggNUHb6Qzg9qeIMSwfav8AuIm7aGMx
j2r4ZqMYmh63HAA5SFCtXK3jKOduSFvnLJty94eEoStTEgcGPV03Y6thzWvkrhIH7qRohb5uBsz2
nwlaokSG0dT7EddsOcCMkJ8nMcOTmqEOchXaMQm1RO2BNQhK0TbuCrg0KEL2m5aAJ1gF6ZLQWMvd
mK/StVk+XIVFKOmLyAcktRNcjo35J4SEhu6NNwagn5a5K3PNzQr49sXInCUTh2rxiEs4yoU4w6Wn
GvvChT8tcM45wlX1IQ5q3K1Pb7JQlAiUTmE0gCNhRjKAGwgBwomFyMDDwyAYntRhcMAAMTGkk9z/
ALe6O7vC+HeF201c5OjanCUJCrtRtqu3LUhIxiSGO5eZbOonEZuuKJHoJ3gWbhbrnl7pa3dp2SyR
sE1iXty2qh0zjQg7VpuRfehq8IyUZTiCY4BaY0iMlG7E1diNy0ESjcfVE4xIOSjOAUZbcUAMvnJ/
lvNfynMf7ZX9S/m7/wDuS6vb14vgF2LBPgAtLUT+UCduK4bQG9kwDBcRbtT3Z6jsCaIQGG1eTZp7
0utxyAfAIxNxiMiFI2RERGcs0dFuMR7xohGQIuPWeqnco67k5THtE5oTvXBOQw1F/oQgI6icAQw+
leWLQfIguPoUvNJiTSIhRSuSuXJWo72CFqVmd2MaDZRGNz4UsI24xD95QmeZ8u3i2FF5dqyDCJ+J
f8RPrTws3Jz7DitUCBMHhskEsqeXbBGQqo27kdBHiuai5XFcmZDAkuom7OV3T4RLD1LSbcW2Mmtx
EewfItV2YiN6MbETM+8aBNr0DZGieZMjvL9GPUPUI1FtnSHAnH3Sm5yxO1LOVtPD+pXbB2Tivhf1
yPZIfkXwv6xYIydwuH+p8tL/ABMn/GcvJv8A7gVL/Lnf5kVSdg//AKkVTyD/AI4rwWj2Tin8m2f8
QT/h4lt4VeVHrCNy5yphbgHlKjMrm006gBLx2LVE0KIgKjElPIp4xaO0pitLOpW8wX6j3JCI3lkb
xuRMBsLuhGdi4BKsSA7+pTPLjypiQFu1KLkjeoz5mUJxH7mJ0iu1SN8ni/dxkdIUJyJuG34BLALU
YRMtremxwU4OQSCxC1Wbb3YGsjTcaqdnmJASg2BfEPRDREyegKlK0NMJyqHoHT3b8pF3MXJAGwOh
Ply0hIEk1JdR02gNYpIn7FC7erGogfrUJ4xfiG4oTtl4zDpwSHRhLEYFMekuGRELjR92VQmE46TT
UKkL4ky1Hop3pTEYgtGJxIR5e3AGMsDKp7gtMbcidsuEDtdEz5mMSfYh9pVt9MSQ854yJ7cURyls
yEA8pSwQu85deNWtRNVO9LhgAw1VPcjGzCQsyqBHGQUJXiTEfu44BaIx0xOGxkXcZ0RAlKOxfHLw
2nFPkE5Har1mJeRjqiN8arS1c1yvL112QRc2HKP0KGq1KzdJ0nTU9pUvJJF1nhI7Vcs84NF0jTB6
B+gCKJLEHJab1qMgdoRMRIHKqjrOmFuIGo7gjGyTZ5UUNz2p/o7AhbtR0jM5k7Semgboc4pmB7U0
osTsRNmTnYVpnEx+pMmmNcMgckbZlpjNpA59ihCpLVli7oCAIuYuKMrkbsnuBtL4spa8GqCmhF2F
AAjK5HTGUmESKsVGbGVuWWxPHDoY1C1WzpOzJNdDbxgqFEyOCYUjsQDPsQM6nIbEN3RqFGWkO3qC
EbY13pYlA81N54i2KlHmfI1eaD4qMV58oROqpi7sdroDUIRmHB3qMpE3rE/ad2UL3L3eDOOS8t64
sjqbcyYlt6B8UskdTDcja1MZbFXx7VK2AJWbw0kHAHap8teAJHhkF5vMWtcCXEsgjyU7sbc41iTQ
EHepcvZnG4LgaRFQyF62XlAGjKXky4xIhkxDSzBTjBUNOnBeXsrErTEvLBNI1QbxBFlrkGlgyIGA
UrssBgVpjEyGZyRPqCdDcqZ9Bt3aRkHB3o27Z1TNCV5gxJqFeMwHgHgd/UIjxT3ZISuAkE1WkCgU
pA1+lA+0RitoOKE4hiC61xo2Sc+LMIhacHQsXhqrU71KVsiMJVBVvnGe5dGoS3IT5e5oiTpntY7F
tBr3qPmBjFUDBEmNQjKQ4iUzs+C0SPEc0bbaog1TouvMmOEeEJz3BTuRLTbgC826DCEHd81KNi3q
hEsXVkxDSI4xnqzUjItMEaNrunnIntLqvRh0V6YxZyc0JSxK0ipyAWq7hlFMAwCaNSvLuy1zagyo
jftz0wAGqPYoW9BHLxo+agTbELYzzkF5UYCFGBGKPL3YeZoJAINUJSjpjDCKw4ShSgGCmBFpCgdA
TALIGZAhbDq9zGFshgNqMY+EEt0C9Cgm04neMVynMCouBpfX1NRwU7goPZCmJy1SuYMahG5OojhH
MoX+X+HOGzAgrQwD0JCAkanAKTHy7Y8IOe9G2DrunxEbUbgDE7Ay1yjx4knFapgaRktEAw3UWsxA
k1UADpgPqRjCgOPapzuXZGMi5GCaxBqNqKGuRByO9TF+AmIUA2lPDjh7hNQNxXmQ+Hezic9xWkw8
u4NtFpvEyhhqihKEzIHIrWWjKOEsCvKusDlIYFOaSFYyRt3eG8MJDNNd44ZSzC1RLhCxA0j4ujSF
50hQYKPkxLPWQUI3A50h3UgPCDT0rGoOSMrPwrvvAkOm5iHmWyWjMHDtWqB3EZjrabkRIbw618uf
InuwQF0C/bA8URVCJJtzzjIM3V03YCQ3rzORuaT7ksELfO2/KJprGCe3ITG49Ui5ASBxcITsXDbl
HwhqJrgldtxDkgAqIk9qR96gRLBz7UTX6EZ2p+fEexLFaeZgbJO3BGdo6ZZSidu5aoPKIGMDX1LR
cGs9oBXiAOcSa9PFFiMJDFPy90yj7UZfYtHMWpCXvDA709uQO569LXIiQ3rVyszals9lNzNvVHDz
IJ7cnOcTj0YP2ppxD5SAYqNyze0ziwAZgW2sp+fCMhHAaX1DtCJJPLykDGUT4ahlKyZsRWN2GDb0
8JxvRXxeXfeFxWjHsVLko9oVJuFSSxdPkoSiGjIVG/0DD/8AIsjHMgIThQgtcihIsQRiKpwXGzoy
KJmRFRvQ8dktL9FNvopOM29LT0b+lp8zb+rzX8pzH+2V/Uv5u/8A7kuqJbOtRefMNsVFqlRMMFgm
WNExc9FIltq4pVzRjaDA59HHMRVJazsjVAxs6nwD1QHkm1bOJHiZfDuztW9pNT3LzPOlKW2VUbkz
GczjKTI6IC4YhzpAoyMbdmUpCjDBG5anKMwaxAoBvXHOQbOrfQnvy0ygwiAKlu1G/emJR9kGWAVy
NiEbss5s7DYHUjK1Kd7NgpOTZiDw2gDVaYabYGBIYqQnCNycv3hK03iZk5AllpE5C37gNEIQiAAt
QhEHa3yV0ddwGQ9kVKMeXj5cfeOK1XJmR3nrMg1egdQlVThYLDq5r2vpVDL1lUlP1lfeTH+Iqt6f
65/Kvv5/ry/Kv/yJ/rH8qu8tK/KVoxrEl0IksCVWS4ZoyhxxGLdBtnuUhmUxwGJTAHSMF2rUcTgp
SuXBGUqVUYQ+LcnhGJVy1Hl5G9EUEahRMZz84yrb08ICjevTjdkzGJpGKPnRFyciTmwfYhKIMpCg
Mi7didg+35C5oETduimIFT9CnZ5SzrvQESRPPWHYNuUbk4m3Y0tKETp4juC03oTgxbievrQAAGnB
0I2Ym5EuZxjuDBSNq1OyGBAkMdqt2ublK9qwicCQomxaEdZOo7NytQiHEDKn6SEWY70bRJMrR+gq
vci1XTGVStJDnMp5gRG9PGMpHsojcPFGRYjMdyjK3CV0yB1WwKPkmtcvHl7Uq1xT8zeMznGPCF5k
LcZTG2v0lSm5nczERg+S0iItCWJIqAh+IJumHgJwCe2BoOY+1eVy4EruYjl3oT5+fDJpaXqNy/7W
D2WAPvISkCAaOFSXA3CgNTwRGWRZDQXdCEogsgfCWqyBAd1dtYgFwdxqr3L81F5SiDblmGxQmHuS
iXAlgFuT+G9GsLgxBXlc3DzIwLGWaBtXQ71gSxCc1HRR7l2RaFqNSSo8xzxeLvGwPDHt2oGNBkyx
67quK0XIuDtTx4oPjmFHmAdc5FtAOCa3Iw/NKh5twSvWxgRxOjeuBoyd+3IJ7YMbrv5gORyXmi6b
kBjAq3MBjIOe1QnciDOJ4VIiOqlApQjEwmK6UYypIYjprhsRlaOk4tkuI4LTEOU5rLanGKAAeTrz
blBkEbsxwDAbSmAAAwAVznZB7mFp9qPMc0dc5Fy+TrRdiNIIjCOTIRhARkBg2Kny/PWMATGYV2xb
vSmJB4RkPD2IxGIkWQctLNAxLhAANtRz3laiHWoUKZ3BRjJi4UrbP5Jx/NK42MDtQu3CZWTiYq1y
onGEbkeAk/WuCQkZCjF158SDPVrMW71KFuWmcC0hvVaFao4bFu2KmaMoYhaiPiYB8lq9apitJHeq
p1pHikhZtf4ijGUnRiDxYqtCqdImKEUdRu3rojAl96hZ5efBLxDKiNq8NUZCqu8uDSEqHd0cCIMa
HMowdp4qnejExfVQOtADtiiMIjNYg9iY93QBkKqMCWiMUZ2roMSXMArfIQj42jHuUOXEtYthgjyY
iYRtSeb5ldioahGtVpJoqChw7kCcQuGkhmE5r0apfdjDeidgopX4RJbAblKUtQtAHU+CMRbMogsS
ELsQwlVeZystBfijkgb9x9OAyTOSq4bUwNVVYo1ZkA1BgnkVpLELTb8O3JOazzKclapHRA4HMqU7
eNwGJkcXQuQJu3JUgBRS86092VAMmK869ASkTSOQQ5m2BE28RkxojK3B5AbVK6fGSaKUpRc7EH8X
uBcPCBgAjMhhtKlCIIvDCWSjYJJMvEdyDDTEfT0YqMiWnA8PfkvJIBlZlnsy6kpR8MfpULTtdnh2
KULh+FbpJsyjzPKlmDSgaghaZgRGwKRjWWTozuPN/ayHYtMRU5okB2LkpmAObKqLBt6NeLII6zWW
O5cIYtVNtwQsvxYstADtiUxCMdTPinjcMohHWOKOEswh541xyux8Ue1G3LihlMD615nLFtxwKFu6
Tbb2ciVoABimrOz9MU4O+MhiF5V7xZHKSM7U9BORwRlcrqL6gnQiNqjCIdhgEI+AZZIwlcJAGPyB
pAEHEFCVki1MbMD2sow5iIuwH7yLuhK3MF8nr1/iQr70aFauUuGccoTlgtHN2jbkMSDQ9iBhcjJ6
gA16hjOIkDtDrzOTueVIYR9koQ52DA+3EFh2p7UxMbusddsAnCQxC8zlboYDwyJL9y0c7b07JRGH
amBExswIWrlrsg2ECfoR/EWmtg6dROC0nSJ7qSBWuyRcjhpIqBuWiUjEg0jMUPYuIGIwMjg6eJBG
6vQ0gCN6E7Y8ue7A9qYNetfSNyecTbLs0gniXBwI6GNQVqiPLn70aLK9bGTnUEYzPlzGUs04qDn0
UXHAasRIULoXLM+MYiWCJv2iB/1bWHeF8HmBMbDQrjgJbwuK2yrBYFcMT2oglmDhCD8IwHoI3I5U
kNoUOesB7VystjnJCXLXNMsTB1pvh9iaFrUd6IjGNsbRijK9MzOTrmOXoPhkj1srlqQrCZDrTHD+
yHNfynMf7ZX9S/m7/wDuS6pic+rQIXbw0wFWKEIBohbSqlU6KqgTM5Wq9IA5RJXl2hqHuw3I242Z
ymMgpEiVkZRaqaV06XQ1HUTtRtixA6sJSDmqEr9qMtRd8MURZ5eUYM0XoC2aBhCMRLDMhBpGQOWA
WuYIJqTqoiYwMYvWQ2BfD8wk4gDE9qlqmbdXEGqU0dEY5ExYqWuEb0pHxko+dUnIEsiISlCJxiCt
NuLbScVqMQ+1vROcApWOSYGNDcNfUiZcxOu9k4vTftK+9MxslVNesiR2gsuOwQNxXEZWzvFFwX4F
97Lxx9YVCD1OOcY9pVbusjKNURZtEb5ImV0h/ZBYIn6T0t1mNZHLZ0R6h6QJAMuIRKpGPcV4QqRC
pAKkQqDpxWKx6JMWcMVEdDglNLsKJiGBQTtimTmg2J4hbkbZaMM5EOe5CAtintDFHyosTicz8jqQ
O1SibmswDzEBqZaeTstbAczI1HdQKNznZmxAFzbdtWwaRgjK/M3iTqMDSL9ia3bjHaQK+tXLmsGd
sxeOLGRYIznJ4xPBFmZC7dn4q6dyuRuRAvFjCctmaM5yAiAhzFqegWCSXxLlGMgJA5FHyQIi21Bg
o+cACKggMWQsxvm1C6K01ORhinhzESN8UY+ZbcbYkICNy0CcaFE+fAZBorXf506tgA+gIiwbtz84
lvqRuXWBHs4koyFuJLsCcVKdycbcYhg5qvL5Sybxzuz8PqQlzcwCKeXbFAhEWgGzlGvrVGidoK0z
uxEyH0mpPqVyMR+F5YnOkimjESk7ymzkoGTkLgPbFStNwyiZR7QmlFojLetRiRHIpmTQpJASDy97
BNKIO84pjEGO5WuYApcDS7RgrN/ARk0m2GiIj/6dMSnFQpRuDxYkYp9cpcuXaYxB3ry7XMG+IljC
YwXk8vYPmyLG4BqERtQmImV2VZTl4qrSaDagBUbetXpojH6VK2ajN0J6dTFhsqjLB8kLV0B2aEkI
kNEmu4rRM0yKcFwiLJAGQOCFjmvGHIbAgLQYAxORCu3rYeRPCNgUZA6ZyqgJ4nA7epwjhmcUNuaA
AclReXFI0iFWOqYzTnhtBCIDAUC0xVuA2h1OxKQEozcA5heZKQhOLaJbUbXMxAuAcMzgVOPMQGkY
FqJxAdyN2zeazIvK2R9S1EJhGm1Fk0u4p8QtQxOITuQdinqJAarYq7ESkbhGmMSGQjEkg1btUoXq
iXsqNxyLb4jIK1bF0kXKCUz7SEnDK5fOkSuy1Nn60LIlouw9lNLH61qjRVxVOitU9vEZKJ8uoFUw
jVBsAKhapUfALhDDMlcFTtWqJDDFkNdDtWKDScHokN1E1viEKM6tyPFddyHp2KM5DSZB2QvvqjzH
0EdJlsRzJxKcnHAJivC5KZmGxYMn6JSPcpmB4skbl6gZgHxVrmrYe1ZrLvBCM7p0RFSSr16A4Zy4
ewBkSSyDY7UAz7VQYoxjXeq1CdE7FsgPpTmgCN8QJgKBXLV0fCEXdsF5MSdOG5A+VGU7nFIkbVC/
CDQlLSYjImq024ExC13SwyCogMijbFCMCmFBmU8qlATDAozFAagJhU5ALVdoMopgGATQ4pbAo2rz
zJLMMFARPlm1swZfheUtSnatlpHMq1YuWtEY1kZBpIWxaAAphVXIyANsHDBGMYmNrEj3mUTEECVC
NiMjUihZGMDojsjimAJJWoCoyWm7KVWDBQt2xxnwgYujPmYB7eBIqoXLVz4IpKIVMFVGEgD2oxgN
MLo0kbwi3QQNlTuURbjpMiw3hThImMIhjI4vuWoHzbN2TSehBKlaiNIOJKEdTyOSk4oaFabQYDJM
S0RQtmtManN1WirVkXIiAvhwM5vRapVHu7FWNVIzpprBkbppJaIAl8ShaADjatVykhiiLYDthuWD
OmZGUPhzOYwPaEYmWmtB7JXllvNAoJD7UGB0bDUFf9xbMJkUGIK8/ly2qpgcCtMqSGWYUeXlJ4As
ZDFTsS+JaBYOtfLnUM45hG5MYKEYw1OautWgCTOtMQw+RMQ42FG5YkbMzWmDry+ag4FNcQajavhz
Etw6/HES7Q682zM2Zku8dqPmR/EQGBiKsmJNuecZhk4qNo6dMg4OIK8zlrkrVweEDBaectmUP+pE
J7UnbEZ9YxnESBoXC1WLhtyj4f71puQ823HEtUd68u5wmXszDOhc5WWkgM2RBTSlLV7Ik+lAc1b0
xFRMVC1crc1RkeOOKAAnC4MCADGQ3oSuA3In2ojA704LEYxND08cXbA5rVy9yURV4E0dCN+0JA+3
EqkwJChicQelrkQdhGK1cvd1RJrCX2Ly+YibcveahWqBEhu6SDUHEI8xyYeOM7YxHYtOohsiqkFV
AKrEFaRCLJ8OpETLQJGo7kDGtuQeMuqeR56L8pepq9wmjoTDy5S5W1ejgxWudwzOTlcNVQprcJSf
PJGUZjzbgaW4FEDMuTv6lPlzfN/NfynMf7ZX9S/m7/8AuS6oJDOnFXTCJPco27VsmU8HoFZnflHT
dOmWmumTOyBMdchmVWgGSYFli6ZYLBUDnYEDdOiOwrRykIkZyJRv85d0vUxElqhIRGZ1LRYhrkcT
FFoGI2ZpjEgoSdmQlIGRGAZAkmBGAZQmbgu3AwEGdgU4twg4715Xkxic5ugLhlItWtFp1y0e66EI
RAiE+kPtZN6e5KOJBAKJOJ61U4LKlw+sp4X5DvKB87WBlKqOq5oH5oXFfmR2p5SJ7SqLFOg3Vw6M
EHC+HEmlSmUR1CD6cAN3o1BZTvjTG3DGRLP2IDYOqTsTLRMVykvejtCYCq0kgkYpoj5Ebl6QhAZl
S0y13R4bTMZKFrlrMTO7lEEmA35Zpr97TOcnnc1ElvdAC0yiZmTayT4mQhagIRGUQye7MReoGbLy
+UBnPTqjIxLE7ArkJaIWpiOmQcaW8TK75w1+YRIgyJAYKNuxysTLUYyuyxFclCtcFpOQDkbUYiUp
ADUASTmrtqRl5Ug7VbB1XeVdmKvKi5W4MJQaXazqF2OMJAocxekdEgNERjJxkhOdiULfvHMJ7EJ3
LpwcMEzeVZNdikealK5KOQwUTGkHq1GG9SMZi5N+G3GpPYuGPkWzUSNCvOvTN6UspZHsWmBFqG6i
EYXI72xRu8xe8uIzkKK3L+lxe0X1XzwuVHmYTF28KnXg6a7a8ufvDBGFwxEwxBNBJFyA+bo+USTA
OZjB9incMHuNwHYtU+KWNd6OsgnfgmOntTxody0Sq2Ek0hXanhjmFcccdppAJyrN+IqYse2NFUOq
U6DO9ICOzEncAhzN6yOXsAuICk575LRy9uNuO4dDFMPD16im1OnRY0QlblXCQyZW7bagA5IzKnKJ
06ATXNkDgUJnExfvCY1C+HL/AAlarp0Q2BC5CR8yOEigDaaJp5mSNwS1k1dS1DXKWG5C3L72Ga8q
7iMD0mmFUGWHEncg5MtV2pyigYgACjBEs7ITfTIh5bgrdm1eFy6SAwTco8b8Q5ILKP4m7rjE0Bk6
twm0gDoOYXlxpHcgDV1qgHTux2LcnZk8aFF8N60yj6k7dyBcB8ihdlaGpWzP7vVVlcMQ0QtBaW0L
XaJEXemStzvcxKcYyAlrPhdCcZAxNQRgpcyAASBEE5soWrhELgFN6Yl9hWqNCmwKZ6onPJAS8WfQ
xDo3DWLUClOQYRwCaeGQCMo5YqMwXMn1HJfCqXURIEA4rTaciKeVD0HySxktczIQjUkqLDigAJKN
+BcWHkY7in6CNq4nMtgQlLEKqYYpkH6SBiaIOHOPrWrdgpRDC4JEyGe5X5Xm0mEg20kU6KhGJLMm
BcnBOnIVEXKYeAYlbAFr08D0U7MogCIxyRsW9PEGIFKlRldhrnMOXyeqt+cCbZ4YkVUbcY6bEC4f
ElNAAPkgAfChkiHeRDALzZ+I16HKaQBA2rTbFNqc8UjiSnK1kGNs01bVOehpYEnFfibTzkSdAyBK
keYtvL2Ww70LohEXJkmRG1efECNy2QYlapQaW16I3rsyHLsM1ARLBlqmQIDbmpzNyUwS5iMFqEdF
v6SuCIG/NSkcRXtRvCgiclatV8wYTG0BTtXLeqLmJkFIkjy5VEUQEXPYqcMShKPiBd05zFU+auTh
HVN9MlGM46jEcJw0rRzXCCfEhZtEmMiCZgYMXU9FyQjQg4FHS87h9oquaOggHAPvWqZEpSrqyRD1
CcZ4BcR0g4J8e1RgIiMgXJGaMokaQ9EHAfBMWA2ZoGZJlKrIG2GbJG5KJlcPsrHTqFQjN+I9FFRa
SHG9NHwjB8lpuEkjCqqSWwdABwECZMVXHanNd6j5ZYuhdjJp+1HIq3C1ZELw8RnVMwicCR8lrVeb
YmbchURYM6083E6TjMBgETbkJAeg+JDi94UKMuTvaon2J1WjnbUrZymBQp7UxLdn0sQ4OIRuWZSs
3dsTRaeZh5tsfvI4p7cwT7poeu87dciMk/LXXt46ZF/rXl85bMGLasnWvlbgMc4vRGMhKzIe3A8P
ayDiPM2xiX4kNJFmY9gkgFDXbjIEUuRKiLdyNyLeE+N9i0zBtSzEhR+1UqOiqE9OmYBDjeh5N03L
ecZYjsXl8xalCe1qEbVqtyEgdnQ04iQ3rzOWuG2c4+yhDmLVCaTjgvhzBOYevT5toi3dz2FEeVqG
RjULisyHcuKMo9oPSyfSW2pjQnB6JivwnMFm+6mfqRjszQnHApp0CecghHl7ep6AmgU+S/qcoXuW
I4LZLyj2KUuTJ0ZQlioxuwMIyIckZITmDfnsNAtFuItww0xDfK6+g29Lehp8yc1/Kcx/tlf1LSCf
+7v/AO5JeFhvTykAuKTlUi5Q4dMY0CEbo1DegYW4h9ytzZuIepSuCptmNwdgLH6ChqmK70DGoy6K
qi4qLiNM1psxe42J2oyuSR0yruUpTxd3KIEjpGT0WoRDv3pyOHYESQxyqs3yWsPpzJRJAlI5kJxE
A7W+TTAGogUCIOIPXpgq9V+nBYLBMychYJ2WCYRqgZBkRaAMm70QLZd3JQjcxHS2oR7U5IkDmPkI
HvSwR6s57AnTqocbCmgNEzsUpTLk4fIPLN2InscK4LFvzBa8UnYdytXbdu5b5QvqEfFI/kWr+oXp
ipNq0a6Q+Mt6lzVz41+VNUgGAGwJ4xEScSAtUyIx2lRtwmJzJIf2YkbSrUeXtRFqeoSaRGFKnJQu
87OWoAibTJ1bG3J4B5M2qVS2xdyqrx5rmPJsg6mjWW1TjYkZWQxhKQYntQvT4IyDA+82bLVKWqzM
ERIxpVkbcLeiVs8UjiXKnL3YlRgA+uYJ9am1ZWxGQH6KouUtC3L/ALSJFyeIkSzfQFCxd5WF6+Sx
AjpiI5KNwWYwMg5i2DprkxE7CjLk7RmBQEu0uzcv+4vNE+OECz7kJW4AkYONR7kH0hvC4wTx4zuw
U+XvREQaxnsZStclCUrpDaz4Qdql+Ou+fcd4W5E6YoW4gQhbFNOSgSHJDl8kYz4rZ25Lyb1sMKjs
XhLDdRfDhTZgg0Gd2dB5HTuCZnlvRjKFcl8KJO0HJTndiwiRpCMm125ZjJVUgWJIbtdSgaMSrvLv
S2RKP+L/ANunyOWj53Me6PDH9IoX+bl53MZe7HdEegp1MXCO5U6DI12Ba7YqXDDJaJtAszZl04FD
gFEHEj6+gdoRjsAWARhMPE4hSOkzsmtMYrXYt0fGWKlpAjKQ4XQ5bmOLU9ckbV3D2T0EBCJkxbFG
Uzqb6Vqw2AJ3qiJniOSFGipA2xKzLhL4oXLFuXnk+E4RVy/Z4ZXOAHdmp2pXIkZ6sQUJyL11E7VT
EoE4bEwpt6KVCbBOQmFAtWIC4RiEYj+9eRzT3LTsJ5xQnZkJEcUSFOBGkzDHaCEBENKWZTGr5KQs
ni9qIRE+YmdPityNAAo3LMhKEg4IVvSAfKBBk9Kow5ngJPDJaX1RyKeOO1MaFMT06jkpylscLTDC
RqEZg1OKpMAkVUoQHFDEpjRcBJIWwpjQ9AuRDmKaDiJwACNm/S5PiCvW7cdU5wIA7UbUw0oliOzo
ZP0AA0yKFyZ4livEFWSABdqqNsYCpTZdEZ2LhtTzIKA5nmDcEcAcEwmFSQ9aMoHhkfqQljIZ9OKO
UBiVsATYWo4leVKIMNinasR0awxIxUZczIm7KrjJQs85PQW4ZHCQCt2OWc27fEZ4OWVfChMFjiyL
ScHFS0moFEJXMyEIjBYrTGp2Barpp7qpQLRajqluUbdyPmESAIyQsNpaobJHluWjphGpJxKt2JQ0
RkRqJxfchaFsYMTmVcs2WlZ8Qid60zAjEVYIPVvUhYGMCCUJRrEUQFyAoM1wxAByQ1GgyVBjgtOB
wRBLg+ypXojRKJwOFdiJGdVUrhoNqc1O3oc4IzAOkYyOCu3pSJMKIWpxIDuB75RN+BtieEmovJhx
klzLILTE6iKl6oYAYUHS067kIVECKAbE8RXMlExGqa0XYVFYnNCVx4xydA2y1wHIo8dTitWoHtRM
p4l1xlyAwO5AwxGC1NxbVUqp6cVi6r9SMo1jtWoqrSHqKd2IRrgmRvQAkYeyc1atyi5MhqtyWqEf
IuNQxX4fm4+baekxigbNwaj7BLS+TGJqDQhG5y05QuDAGobYm5uEhDK42fctVou2Iz9BpmBIbCtd
icrM8aYL4sfxFvIxxZMSbc8xKicVG3o3IzgTauH2orTej59qOEgKsuCTSzgaHr8cBLtC18vcNku9
EYcxb8+2PaAqyexd8i9joNK7ChHm7euJwuwGBWrlbxI9q3Jf9xa8ubfeRwZaROPM2moG4uwojl5y
s3QawuAsNyA5q2QD7cQ4T25avr6WkBIb1rs/ClsGBQEtN2G0AuFpJ0XPclQpxUHBMajYVriPLmQz
xWlxdtAULV7FpJMJ+7IMU/q6eKIl2hcVmB7l9xFfcRVIm3+iUIfeaJPE0BbYng1mVvETpRQfypSi
xBocFPmYRiZWg5hDHSM15BHxoYxOKM7kAwxdHRbEbcTQDNActCULeAIDD1oXOcuvPFo8RKbl7MQf
fNSnlU/LN3oG9A/Vr8yc1/Kcx/tlc9pi5PM3nYfnyXw7FyXcn8nQNsiye/ft2xsFV8fmzI7I0T8r
qcDElRMqAqMBEylg+ShouRMpB2GS/DX7sOMCMmNWbBDmrUSPLAJIwbMqJjhpDFMZgd61ag210Y2u
IRzyREoEtsK4IGEMyENMptmUxuFkRqGkblXAU7UOEY4BUimiC5QnelpGzNCmojMpgGGwfKDE4FSk
ItA4Nh1qddgg8aLwrwlAEcWaqFgsFkuOcY96NwPcA91D8Ny4jA4yOQU/xF8W6cOks3cEJPPmNRYO
XPqUr9uBiMdOBR9+OIKY9MR8hsjaj29WU9pTAJ5+pCMA5OCrWZxPpowuy+JLwwAeRRjyQNy9mCDw
9qPL2pg25RGu9pYR2gK3buyj5duRkZh9c3QPlClGyogIhgMAE5oNqEZXYynIsIRNX3ry+U5cnTIC
cgQS35qE7125ZaRMTIu0Tu2qJmTfMTqBmBic1wxEewN0jo5i1ZgZiWGyoR5a+GuR8QBferflEARi
AY5hlbhdOq5KXDEfapGzc8w3eKRyBfJXiMg3rVuORNSpQkXEgQe9TtnGBIV/l7raZgSAO5NZtxlc
aun8q1Wx5Fk4V4u1SN2Ruzm2uROJFUI6YxgeGL0R0jSQRHVHaURroQwJxCOiQnDaShZttK5KhEKs
rdzm7/COI2hvqxRFq3GD0JARuEVyWkhwckSRSWDLzCNMaM+blEyNQKPsU4+XGTUbNCTCJYgblpEn
Nskv2q5KyRMvqjD61pm9u57QKJM22UdEagXzVZMMkYipKqGkVTNCXs3A/eFflbkwjEP60ZXZARji
StNseTY9q43FIblpt2jEZyxJ3krFjvXCQezqknJUq+xbFUqh6QbMxHcVK1ctazFnlHYUYExjbZxt
VVOJgCCXcbF5luJpiDVCd3hgNqEY0gKBDT39Fu9GrhpdLFW7lulsyGuOSBFQVEsDKIYy2KJjEybE
jJRMjwHAqiImeDJRhalSNX3rjFNoTwIO5BCWLZFPICOBAG5E2IiLeKbfWoWrswIxkIgCjo3pWtNy
PtSwULkYSgMzkUCtK3hb0yr0URdPEaZbUSS4OYWnTh7SpLzLdyko7F+JdpAPGccVESDyhhMY+pDX
cDD3gjeDMXoFKVvhmRVs1K3PmrkIxqLTtEqM7MhJnEu0IWLlBGWp1cvSOqzmfdREDqgnHcUxTHoI
zK8gH4k6HcF4m05heWzb9qBA1McFrjHRt3o17lKMfEcChqcuoNjn0SjuojExGqGbKBuUAo52laiQ
IgOSrt234JyLdGK4XJ3LgtEr7sDY6YzEQnnePcqzkVVz2la4ht6lcOdB0zMw8RkvAqRbvVJSHeuG
6QuG4/auKOrsTTtkBECkRUrYAmFLQxK0QDBmTQNRtQjarIEEnsUZi5GJYaoksQVbs2DrFl9Uxg5y
TqlAmGORQcblM2xx4hGXMFojAIxiaDBe7HaqYnEquOQXmziYwNANqumcBExwO1XObs29V3xUwfav
MuDzHFBsKlf5gCc4gNHIOpSERGcA8SKMQq29VwDI4qVyXDLOPYicURbiYRHtFMC5OJREq7AnlQlM
KgJhURxQndLRzRNh5AZqRuCpwBxVTpGxNAMPeKefEdvRWi8uyNUjmo2ZcdwsSMghCUCIEB22o8vG
GkzLF9m1W4yAjctjhBwJUxdbRpLkpnpuUoWxgap0ZSoAtFkd6/7melw+9HSNZFIk7E9w6IbAqA6t
q1yiHCqVQphgtyxTnoqUfKiZMvJpGWajZcyMiAW3qPlgyJxJUY2ojW3FIhVw2gICMySaMha5gDTI
UkfqRhKIL1dNKJ8vKYxHamug6D4bsftWqzcE4nAp7lsmO0VCBty0TbHLvQu85Ei5EuJwOzYoStPf
EsGDSDbQpi6AXyzip/i4zEWJtGBx2IQF+egeEPVkb3N3pGyAweTEFXuVFzzRaPDLNvkpjIPE4go3
OUnK3cOT0I2IjnISZ2jMV9a1Wy7UIzHofiQD+8KFGXJ3TKOUJnJaectG1Ie0MCntTEuzHp1N5c/e
jRVH4iyNniC0iQhcGMT6DXOJEszGiEeXl51kYRlU9i+LCfLX8zkgbkRftHCcNnYtViWiZzjQjuUj
L48WYZSRiJSjEUELhp2FECBtSj4pxkGXDpvW8jmAgJHy5H2ZUW0dIlIcQwkKFauWuyEakwJzTc1b
YuwlHAp7cgdoz6NU48XvChT8vcMoMWhI4FeXftyhciHkcmGa1QIkNo6H6lVxRB7QpStnQSXpkdyF
zVqhEUYeIe6UP6n/AEe4LPNeK9y5pXEkL8PcoRSQAYoXeasa7+WrD1LTBoQyjGg6X+Qv8ir0t838
1/Kcx/tlXoxswExclqkQHJ1FyhG2YuaARQ1iXFUAbEGgYxOJJUYxuGMm4i6Guc7h+hebDhlEfUrl
qEAL4DEnajcuy0ACu1RnGPmwgXOo0LI8ty48rlgAJAZspQ5rmZQ0uBABsAtAiGfxkppyc5gYJhqi
Ny+FalI7Smm1uGwLTEJyEwIATm5TYnLyIwVILhiB8slbIDkUJCJlHVD3gm9FGMQ5JZQnejquGrHB
NGAHcsB0nzbgi2KAtSN2RoAMERasgRHt4rVf5kWqknSRh2BQldhcu6qQ0+0pGHLysOOAMCX/ADnU
Zc3zJBFRbiKd6826PNmdtB6lwwjHsCY1BX4m3wyljHJaojtHQAoj5AFZjg0QW7ke3qwE5CJlVcJJ
3gLhiQPekqVlt9G8iIjaaKd43BKMMREuX2K2Pwxl5z+XEF5d6mSLgvTbyhE6Yw7VG7f5icuaatwH
DcEY2ouZl5zlWRO9MAw3dBleuRiBvV0ctaM5Wm1athzAVwA3LUCQbLx0g9qnPnZi7K6xmBFqjYV8
K3GB2gV6w7U2xX7cZyEItpbeFcu3iTciJSOrEl0TGRiWNYllclM+y4kS5fFQM/FVuwFEPWch9bqM
jlgnJU5wDA1LoPnR9i02wO049G5AXA7YIaYimCkPHcyhGpRF+f4ez7scZIQtgOPaapTuqlY0WK0T
AICDkyEagEutJteZbkBqOYUbto+ROYOmJFC21EXYERkKEVHcj5YrIMZFajLTLJfFFXYTRqRlRcM2
7VkU5AO9CcaS7cFK1aD3QGM8gofirrxi+napz5a0ZwlFiTSL7Shc5mfmTBcR9gdyoQdya7HTvyR8
o4J7cvUU12Oob09ggMPCcUDO0QBSUk8atVfWiNRbIBO/RVU6Kp4ljtV2UGJm1CHUozucJrvTvKJy
QiRriS2FVLQdUXdnRFwGMhgE5L70BHPNGBk4BYHeUAPF1HsXpCMcYLVEVJab4ujAxxo6mbgOi2DL
UMXQtWYyMpeEEYrRegYS3pxh0PHhO5NIao7QqGqrRlKwGMZ4nNCJJEQQXGKjasXTclDi4zVRERkg
bshHY6EwQQcCET0ggtLJaZh94Tgghb1vQRMeC4fUUBC3qr4hgpG7PXCQ8AwRMbbPiSmlAGK8ywaC
soISwdVlpuDCQRA5iduEi+keGSMbdwSu2y0w9XU+VIEvMDMrnMECUAHkwwCP4c6o5jo3bU6otQxd
AzLyKlORDAtFbiiIcURiiWYDFlrHcEKtpFE8senFhLo8s3ZGHuunArGqJlJop5R1dq4IAd3Q5oOl
mpt6CAhDSKb14R60QAA+bqpDmpVCOnBMejaFM4BkwpbGJQjEMB0aLdTmVtJxKfBEDIsE2aaJYrQ+
GaIJcEvXo02xXatU+IpzQBaLEae8vLMeKBzzUbMwJPEEjeyFmwPKtGo04lRt33hBqlqyUIW4ABgX
2qN6yREXBxwOBZGGgQEhXeERmF5lvvG1CccTkgLhocANieJeRGBRlck5xZSm7NlsCEYTa7E4N4kx
AgDmU9yTtgBgizR7F8MED3ihK4dct+ConNE8cPeNAvPlPW4OmIwdTMjg40oRizQY6hXFRlzUDGyG
oBjvKFzTrlMODuVvyg3mBxtBH/uowvzM5TpGJKFuFLhAIUpu8jitMBqknunsimtRYbWRN48W9PiV
UrFcLlVLBcVU2CcmuxaYh5HAFCE6SlkELlt5yk3chC60b83Y9qJuPOdzCI3I81AfEnUQO9Slfj8Z
uBgpXPLN2JGJwUpXLZIB4ovUIi2BplvWuBGuNQy8m/ajqAxNHXk34O2BBWmRMQdqMuWkJx9q2c0b
nKy0zHitHBeTfj5ZwL4FGNi21sZjNSjdsm5ZFJAjF9ijzvK/9xYxlZNJ21E2ZDzRjp4ZjtQIMr8L
ngep7FDmOfsGVs4EVY7wp8V2xqFQIkBaeWnITi5JHtAIW77Rn7wNCnx3/JTGYEonIrXysjbnmAaF
CHN2ySfDOJem9a7ZcehacRIbw6M+XkbNzaME92P4iyPaHiWkny5moEk4qN3Q8oiM8pRoUZQIv2vd
zCMJHy7gxEsFt67XYCfahLk7ptEew/CtPMWuLEXrRau0p5SFwCkgWEgvZkSKgtqQ8iWkjASDg9qM
hEl/aGAPcv8AvIxBFHBq21A8nfExlCUnCI5i35be0KhPbkJdnS0g42FaY/DOLxpVcE/Oi4ocQE16
MrUsHIotUJCQ3JpgHKuxGfLyMCcIudIK08xbeNeOO5fDkHx0nH0DyDnavNtgRk2G9RgYRkDhKvqT
TkIS90pwXBzHyenon9DT5c/Wf0PNfynMfsFcyDLSI3bgkBiSJlPbgZSGBKdgGDB9irMncE8INvKA
uXC2yIRjZJ0EYGqlG3akZyPiOC/7qZJOMQgIHQ2JRhA6mzWHWxVT8xG3cDxOKMrUjbJyxC4L0SN6
pcgviXo9gVbgTRugHay+HcjL6FSAPYU3kH1oeYI245uUJy+JdGZwWxNO9CPaVI8sBdEcS4AXwOWN
sHGcjluQM+bnYgaEEgyJ3BSszneuyljEisnUZclaAmf3ZhXvdCfMSgS1LbNEdqlO6BcnIu3sjsCE
oQAIwPWtC4HiJVRaLW5h4tgtQ8JQQ+QQG0hRAygOtaYF2FckDLiP0KmHoXOG1abl0DexZXDyZjCM
PBrFZ9itX74gBGosVD7yrl7mYxlK4XjbHhivMt2wJigkasN3SZ3CIxGZUoQErl0ewIn19ina5aYh
ZYGEwC8tsXUp8xpAvRGqJd4FsQgYW3mAAZEuS230ITnvVy9CyHgGlI11HJSuBoG82phQRAVQTufa
rloB4RmIudmSifDERcHtqrNq2dRcmSd27Ft7UDkKFA7FGQlkmcrxFlxzI2IaXjbOe5GUX1bSvEsV
t6KhYFVWKcFAycsG9aEZN5cQwCcgwP5qE7d52xjJSvahKINIuvH3BajXa5UjIgbZE4KUbIlIx8Us
ghGDyJFatEAb18cvM1LIXIzHkYmJxQt6iAMhgnhI6hg60Sid0ooW+aiZW5ZjFC5ywibMxw1+vemu
R0viGTUI2FSly0tF2NTA4FPeskzDxk3hkEZWA2kDVGWZKcntGapJjvTgv2L7FgqdOKeEdRRhdjof
CQwQFziEag7QiYcI2EVCN2fDLKuKMTUSNHyTSweqEtQGoOIjFDHTEuEXo3UlM5BSnFpRkXMTg6Jm
I29NTEKcZR08vqMRtkyhzNseAHSDkVatWbQlOQMjM5AK5chwytltO1G3zETGQoU4LrBPHhltC9+O
1VLHYUA+KjODExyXnWg22BwKFy5BmYEbEBaI0xVFjVaTIatjppVda4EmOcU8ZEblxDUFWDFa3BGz
ooqqiLVICnanbBjKgnsCINREsyEREatu5eXPEYSXnSnODnGBb1q9y0pvcABgZHEKdssdYIbtUuYg
0jEEmADUR8qMoF2Y7lpNDmE8ajMKhVU6pWLu29CQPEclqYiRyTt3Jo8MsEBIudqJJATDiOwL4cNE
dpXm3JaiMkJDMP0EEioUonAFUXEQCqyCbFMIqgAWLKsiqyKckrFYrFYlUkfWqSK8TqoBQ1RRqxO1
aoEEbNqEDEwXDIFeXaqcytdqT7YlaboMJb8EY2sZA1VTXGiYGrUK4sVWq0xruCeZpsTCi0QGqRwR
vy4pZRGQVw35aLZHC9KqdwadTUuFefbEjIe0cFc88GV+A9r7FMSAoCQdjICMRdq0QckOZvANkNiA
zRzOZTk0ARIHAcU44pkUR0nSNgxR80l1IRJGodxTwiNWZRc9ya2NMT7RWq4dctpTbFUrTYiZbzQK
PL3QZsWlsCjCJ8vSeGIGKjGBOsgiMRi6MocJONwnarcOdANmfhmPCZb0RJtDdzK5Z0ebbjIi2SWI
DrzrwAjbDxjkEbkRwxLRGwBC5GbkYvkjGIGOKnfu1uYRQu3OIyqy1RkImJfRtCAFIxqE0XJRcsE5
qdpWxEFeaYtb2o3QS+UcFO9zQ0wwi6lzwkZRxjHN0bnM2+IeAM6N65WJfTDJl5gkBmT7qjdkfOHh
JPslUi+9G4GBGIUISkISjQg0T2i4AYkZrXZOmWYyK03OCYxBXl24iUY+KX5EJB5R+kLTM8OUk1os
2YQlDxbULRt6J46zm2xG4R5sXrE+IKJiI6hWUTQupXLExbue7lLuRkX5bnRhIUEioy50C1dtF7V8
B4S3FGL/AA7J1EQGLIWuYjpkKPIUPavxNq3GMz7UcC6N2y0bmcT4SjAOSPFal/8ASuEtLOJxHybT
OIkNhQlyvwyPZcgEqNvm7bn/AKkSGZCduQkD6IxnAA5SFCtXK3dQH7uWaEebtG2/tAUHantTE+zo
a5AaveFCieXl51vKB2LTdibUxSQlgCnxBwPXYhxsKfT5c/ehRE2CLscjhII2+ZD6doanauCTH3JY
p5QAllII/h5DTKQkWDSHYtPMgyahiYs42ojlrhty3UDoC5Hzoj244phICWcTQ9TTOIkDtRPLTNuR
IONEfPti5AYSjimEtMswVtCMojRMgjVHeiDLzbQDilexaC8J7JBOKjb12UZSHFAuCtVi4Q5cwwHc
mvWzOL0mPtTE6SQ7EjBYuMvmenXI+YG63NfynMfsFc6YweJ5i6Qe2clWOmOZVZkyQlKTtkgGDKkQ
Oqww+cnNFx3AOyqEIQlORwLURjbhoi1JAP8AWj+L5sWxsByUZ3geYEaBg0SUBy3JxiDhJnRjdIg+
BBbSOwISu3J3bgq5K1Qg8/eNT6KvcoxIedvwkIm5bIiaHocYfILYO1S7KdWMdpAVqADARH1egqQO
1SMRK9o8RhUBActYuW7XtXSHLbgpGfN3IcucIyDEqA5ifnQt+GJA+lAi1EEUBYU6XNBtRM7glIMN
Md+1ShycIjMSdyR+atd6E7diQeIM20lsWXmXb07t2Qa4SaEbEIQiIxjgAPRhlUttUzfOqMqkAgVU
L1saojUCNyMpTOqPsAVdStyt6TcOqAJWm9OWhgBAHYrUIk0Dkneg6aI71KO5VXEcE5kv+3feWQld
jKVzMsU2mXqXhk3YV4ZDuKqW7QvGFSQVCOiqwVFQlUmQsQUxHqVQQuI45DFabJ0xrU/WtfMT80nC
O0rTKIs8vQiMSxPatNqIiFj0smaiBI4hgWQuGdHfSjqiJDetViWk+6VpuhxtXAaDJFgNSOqIJKpR
arM33ZrTfBjLDUniX2EIOGPQ2ATSNdijG2C4BdwwwRBiKVkXp3LVaPmW/pCjfjbJM8lDzYkSyjsC
MgOIDLFRD6QayJxVSZS2mikZFhHMKVowYSLiRGLdQWh2lapSYRqIjNRuQcSkKkFihy05aZWyanNE
ctcEr8pRgN2os6j54FybNKRCMbURAEksrvMAS0ym0A1GUb9uWi5CANwHDCq0yaTZhUNejBjtCeB1
jYtM3gdhTvRaoHHHetVmRgTi2C0jTJhiaIS5i+IEVoWAC8/lrspQiXlI4IaiSc2CYXACcjRGTA7w
iIEGOIBRjG2aYkoym4JwHU2dFcFK3cpGReJWIXiCMZMQUOYg+kF3iWKjYvTIhKLQlI+0mBBfBSvw
l8RzIRaiNo2jbnGRj3haLgaS1Q9SY0I6XZOqsuHxbAgLUNMfeknv3DI7BgmhAdqopQGLLSW71xXA
OxZzO9NCICrJVLqgKpArBlxSAXFcVS6wJXgWoQAVba+7XgXhZULJ9TBcMwVRinMVmCgnIbeEfLIr
j0NIOjKxJx7hWi4NE9hTumCJdgvrKpU7Ann8OH0lEW4ny8YyGCt2RJ7sRpJyJVuEHnM0NuOAXmXI
6rUamAPhVk2W06RQZHNWLnLFrwfU3u71KzeIhEjGIZ1UnYWxQuOTDIb1qMSNWC2xerIQg4GalauQ
eeIlvWp0TR1wpgXOwJxwROaeXHLaeiqEHEdRYErzIHzJnHcFIS4pyL9gRvCyBHURLaVbEoC3pPwx
nXatLk3D7e9Xv6bzExDmbMz4i2oGoZQ5eExOerUWLsFchrkQI8IfonbJaVwMO1GEeGRLEIwhQnEo
GZYPUJjIaMNPYqFNH1p5l+ilTsXlRIEjtQsxBmMzkvLszEdHjAUeV0arEWBkha5cGEI4QFCVa/FQ
4HApl2qg1EDA4LVGI1YS2hlK3QRiBUbVL2rcvEPyLyLETESYzkdyNq0R5YYPmog4txFCAwHQ5oF5
Nrwg1nmnIcLUDRG3CWmR9SEb9Bl2rXoItDMLyBx28qcQ715dwarJwmMY9qF2EtF7GN2H2pucjqgc
LscO9Au49mccQpcrzo83l5Bo3Wdu1Sv8vON4S2e6pSsjyr/tQNBJeXF3GNqX/wBJTRpMYwOITSpI
YSGITcw4bwX4/wD1JuZ8J8NyOBQILg4EfJtM4iQ2FeZyp8qfuuRF0LXN2wQP3sTQhCVuYIlk9fRa
ZgSByK18uTamKhsF8SPn2xiRiyYk25YNKlU+SPmwEnzzRPKzM4+zGRwGxaL9uVqcaSJwdCUS4OB9
BpuxfYcwvM5a5J2Ziary7sDctjHVi3atIlomfZlRNMCQ3o+RI2ia0wdHzdXMQcVjWmaAnA25amBk
G4tiezc1wx0y+xCF+BtybFqJ4ESG0dFOggxAMsSKFE2LxkGpCWDrTzFogM5mBRDRJzi2aeceL3hQ
onl7hMWpCRwK8u/aMZxDyIwbagYkF/QMQ60yiBsIxT8vMOB4TgUPxEDWpMQadq+HJzsw+ZKdFfRs
t/yZvRc1/Kcx+wVzH+rP9o/PDkgDejquAtiI1+pfBtyuVYZKIsRiIHxFmYdpR/E32GwywRtxlK+1
ZRiUPwfKucHNcFHUdEfaBp9SM79yUifZBp9KAEAQMHqmAYenNuYd1I3dRfwgUCIyy+QQzaqmd7dW
xAB3kEBsDdX4lyMe0gI2+XIv3AHIBoO9GPLcroGBlie5RnflcsQA4gZO57EwlMxxMXYE70IxAERg
Oo924I5MiOWtEl6GWLZkBNGdwxlWLBgNxQuc5cBkfHEB3owQFq0BpwJqfTHsV4O3Aa9yhbtmU7hk
4G1kbJOiAfUc3IUdTSLhyRkxxXLzsEfDLyKYllOZwiw9SYAyOwBfDsmI2yomu3RAbArlq1LXGBYS
27VOEbRu35eFsAFq5iGoHCANB2qPk2xFywpgqXSDLZQJhdlJvpXDIwAyCrdftAUIX4vzLESaOfcr
cTXVKsSGopTMADGLuC1QrcJ2ybc5NQ1Cldt3JgxD+pRtEyiZlokiiM3jMAOV5cQJSJYAIeZblEZ0
VJKkgmidctgTuIQ2ZrVIORUyK0WI1zlggZSeeLoAScb1UU3LiBC8TJndYrem6GQC0yDx3rzOXPbF
CReM8wjpg+wp5WyybwnemmNW/NPZlqj7pTXYmJ2qkgnBD5oRLSnKgZGJuVAIpi+Se9hOLxGYIQib
YIkWKJBbSWbJlCEpZUUsHFHG1G9MnRI8NaoQMaHGSNklycCogs8QwPSZGgFSpTOGXYjIu/0MmiWj
GiAA7UNcRqBBB3hNOBlMDLNTnIGItwAhA7TiuIUG1XeXszoeCcx9IUL+vXbGMTtUDCJF2blo7Ebk
IG5CNCWqEx4TsPQ0g6ezKnulabsTFs8lSSxTEhGN1zEYAUBWm1aAl7xQM9JMso4p7Fwj8yWC0X4+
XcA7ijZi7jAkUPUw71WvRXBNMDSMCcURGRIydcQB3usSOxShIvECj5oX4xBarKzG+TGwRpk7kAnA
oStzEhLwsVK6SRdkdQOTqdqdsAQlpBC03RQ4Fao0O1NLocllosxM5bk96eiPujFcMQ/vGp6ANqqW
Xic7Aj5cWfMrik62qkVxERXFN1SLpowAQAo6bNEE4dUBPkeoBvRhuRGxUkVUumnAEbUNHDtXARII
4gqvEN6xaWwr6lUVGBzWmZ1QOBQJVVwDTD3inA1XNpWk4jEIwmX1BtIxQvzPlxlWFKstNxgLkWhM
5lSM20EF+xSly8pREi2kLVdOqZxdSncDAfSifYuBwEIW7bjHiTX4aYbYoxECz0O1PI4InV8ICgRq
y02QZy2p7s9IPshcIc7SqpsTsClfmNFsB3zVyd2DmLaSc0JayXIeIGCjaszMpnxNgyYiUbMq41Kk
TAa7WXbmjdAEZ2+KJ7CvNvwlqwERmVd5uVtjdLsMmDD6EZnGRRidlFIRFASvOhIa41ovNunjYUFM
OjhK2BOT0GQqBiclO5GdY4Q2qV7mYMDSMWqpcxYBN25hE+ypQEDMnGexGdY5EZqVq+ASBqt71G/O
IjpLOBWqnouCUpROkDF15ZAEmbXmp6bhgZirKtx/pKOqTkKVS4GCErJIOYOCecnkcegklgMUbNo8
AxIzVUURbwKEr1MwDmjOHAR4d6lyHPnRcB+HKXhkO1TNvwk0bBGMg8TiCvN5V52vatH7EdNfehLE
I3eTnolibZ8JXkc1DyrhoRLAo3eUJnaNZWsfUjo4LwxykCtHNx1wHhvR8Ue1CcjqgRwcxA1H6SA5
garZ8N6NfWsp25DuRNseby58Vo4jsRucpLVbHisnEItwyGMTj8naYEhsNV5nLkWp/Q+1C3zMIyiB
94DiEJW5giWAevoz5lsOcxQonlbpkH4RI5bCtHMWTCUfEQadoQlCcSJYVqmuwEu1GXKTLZQlKgQt
c1blbuDGXsnehO2RKJwI9A04iQ3oys8Ej9HYuEyuWxRpVcLTdjK1LAk4AoThLVE4EJrkd7ihdGXL
XdVX0zJqNiMecti2MCcQvN5O8Ymj6TRuxEXbfmQJ4ZRNe9UmIyFDE0IPUINQciniNEmaJGWaeLXo
DEE17kDdHlOdNTnsKfhmMNq12uEs2nJ9q03ICUQH1B6tkqTAlnE0IPoWNQdqwMDiNJZfDn5jHCR9
nYiL1vy9OJf6VwTidlfSP6BvQD5a3ybmv5TmP2CuY/1Z/tH50YkP2oxi9yQqYwqQjGzZnOYo2xR8
kSjI4gxYDvQu8xzJAFTHEI+bdcnF6BSHKQhCMRSch4uxNphZt+8XBIUTekLYjjoJeXeVwW65knFa
YRERu+SEQ8cahSjftmLeEpj6eUjlEqR2yPVPP3YsMLb/AF9LXbsYHYSha5WUbtw5mkR3qUbNqEYu
R5kaqH42AiIl5T1Eykhptu20oQgBGIwA6mq5IRG0oG2DdJLBsHXlWpiJd2tn2dnah5sPLsly05PJ
zmUJyBu3AG1SK0xAiBkKenlK7djEQDkOH9S0cpZJlIPGZIYfpJucuSECGuWxLxf3Kdq3EgyHw57A
MlK4IyibZrqDOO9POLy2hAxiSRtR8uLRzKFzmbpuE10jBOIRhEe0U9gi7dkWERlvUrhuyEjWhKML
IBOM5yNA+ZUo3b0JTmQSRk2SBleAALls1EWISnYt5AeIqUrZn58jS2I8I3IQ4hfJHCBgEQZXQ/5o
qn1XP1FJgJSEyxMWICvXfDdth4SGLurVy8NZmKvuoUef5aEYXLcoggYESLKMrlIXBUDNDnbYJhbk
AYmraizhHyyIxmG1nerN29IC2JcMxmdhVx6ylE6Mz2q35QlcBPhkCAd6nfvaXgCRGNNROChZnAgX
DjGrIzHMOfcOKjy4PEcIYOyPmWTGOZCxqqSHTWIXCTHsXDN+1Vi/YVxiUTvCYSCx6MURblpkcCtZ
Gu5AV3lcdvTEYHB+hpxD7V8OWoe4cVIChtgesoxEdMHLSITyg4OEghCMSZSLAKo0b86LVGsjVcZr
EUCjlVCOZVEC+GCjCUiYeyHooyFCCRJaYAmLNqGRUY4sMenSD46dyERiUY5NVACsBUnat3Q6jcFs
mWchRSjbItljSQx715RYXIyOreTmjcuEGRIjCO2RwULvM2xK4Y55OpRswaMi5CuGFoCGpgAo34mh
AMonJ00wx2iqcEFMQ4Wq2TCW5VGuIzCYljsKxWKxRM4vLJlEEvsOYUps8T9S1AghaoRc7NylGcWn
HJMqJ5GpwGaeVDlFHUOLapERBEcSMVpEtUjVCRLEYKNyMhEtxAZlaSHAWqIGsYKxK7H4MC022HNR
lauCWvwjNE3LYMpVMs1c5aQcRwO5Ex4rapVaIVfBar8ifzQtMIiIRJTpnc7AmgG3p5SJVASqjSN6
e5J00I96eOGaqeo+zo1jv6j+7VdiIzVemUzlRPtWoYS6aK3FsjqTA4LTciCntyY7CjqDb0zuNhVK
HYVEnAFNbjw+8hK5xy3pgGAyTyLbk1oaI4GRRfjl7yhCUhC5ANKJpgoWbNwEWXlOQO3AKVrzZHTQ
Al6Jrgozat6IcyJqAEJXTpgMIoONRGDo6WcCgVyFyDZOqYojU52BfDiYjaVqvSMzsyXCABuTkoyj
F4jE5KYN1hDERUYWuGySGL4oylESDAaTmpRNvUZViBgO1G5cIEpVm2QR/D0lEOXzUZ2yNLepGfLS
MYxiBcI8JIRhKQNvYM1a5d6B5S+xQjEtqo60DCNEZmTk+yiPCDsU4AvE5lMK7OivrTqtELVqOozL
AnBD+nXh5sLjapDEE1YIW+VjK1ajUCoMioT5mLgU7N60MdchWS8oSEZW6SBoaK5CyBctEB/0s2Xn
k6JDwtkhGdwzA2rziHjhRF4EdhTQgT24JjTcEYs2fanAqBUKnQScAjZsnh9qW1OnKYYIXLgpknI0
Q9kkfUo2jE3bUKfnD8q0mpH60SgLr3eVOEs4oTtS1RPR53Ly8u8K0wPavJ5geXfFK4S7EYXYg7Dm
E9s+fYPsnGK87lp+VzMcY4F94Xkc7HRcyJHDJG7yhESam2fDJGOgQPtWJ+GX6KNzkZASHj5eRp3L
RIeXdGMJfYvO5c+Xe+iXahavjyOZjgcBJC1zEeHAXRn2rVEiQ2j5O0gCNhqvNsEW5/R9CFu9ATgB
W4HqhKEgdWT19HxxEu0Ojcsny5H1PuQhMRvWwKTYv2FDXLyrmcJUK44xuROeK18nMxH/AEyaOhZ5
y0YzbxjAoTtyEonZ6BiHGwoyEdJOOxE2py0CsSCUYXrTkUMsEJQmAcDEliCuOIkDtClLlrkrUiXo
j+Kh50IlnAq21ByIXMsvpT2bvmW/dlj61pvQNuYFScE8SJDd1CJxEnxcJ+XuG2XBXxbfmQfxR2Jt
QEvdkvMAaZDagyGmfm2s3DyWi4DbnskMVqFQc/Q8QB7QjKHBKWxMJi7bG7iWm7GUDvCeJBG75a/z
fzX8pzH+2VzH+rP9o/OGqchEb00rjHeCpCzpjGOBm7nsCrGNuO2oJUTNrUR4jGRJKYyEZGh4uIry
7ViWo4UxUiITtv4BEfWShLm+akNkYqM7xN0w8ILMmEIgDCg+U6o+OIoiCGmMVSqrErD0dIlGQLOG
KB2npAiCScAMVG/zo8u0Ki2fEe1C3K5G3GNBHYo2uUkJyljcI4ApW7M7Ytg1uAZbkLXNaJQBeVw1
mUIxsxLZlaYREYjACnUclhtK06xOeUYn7V8OAtQzJLzbcgT5kxiJSwdCfMXXLMYRiNKEbVuMQNgD
+nMpFoipKkLeq7d9iEQeJNbmLFmceKRg0ht0uoz5hr9yOEpD6TtTW4CA2AMnUbgtiek4nJ1zcbxl
5RI0SFKbIhSF3itANpmKuVdlZBFqLCEYDcha5iXxIgiQzdCNm288NRODoQ5i6ZRkX8sSwbai4jGB
8IJcj1Ii0IRiAxYVKkb3MeXtjE1K0cpy8uZmcDJ2Xmc6RYt4+WBVaeXgBEbnJ71K1ydkedjdus4t
v9qMiBqFTcNZE7UbmEcID7U5kSMxgjds+GXiB3IC5LRF3EQKFC3KInbGGmjIW5w+CC5jmSNqHLtp
jB9LVxqjy0HFskSnL9GoULMrgNyHBpBqSSo8ty9prcZapzJxMaUVqd6UrtyUB5jmhIyZcmbZ8uYm
5MRhAYqF66BKEw41epA/02IjKR8szI4TI7OxGV64JTn+8Z/Urd+80oyOm1MBmkRmrlmxEylMM4yQ
F5xECsJ5ncp3bUALuEe1GzdJhRxKNQpXjcEoDFwtMI6jsGK+JEw7QqSComWFdq4JkbjVVGobk0gY
71SQPUqo3YkgigIyTzB13C5lLEqMLkHEMCiOVND7B+xRuxgRO3ISjRxRDmNGmTNKIHrTwLUZAk+q
q1zxHhCMityYBytbV9iH2riPFKpQEQwH09RhhGi86eY4Y7lK6QxnQIEhhl07E12YnbwJ2ISMQa8J
3IG1fMISk4iKVQldmQYESBMnDg71E8zMQm1auhyvLn/t7Mdc5YGR2LitgtUFlPluVk1QJT2NkieY
lrsaabyoxtkiUg7DJebCBuWx7SY8J39FY12hPam+4prsSN6oVQqPMEC43sk4FSuSxkXZahwnaCos
NccxLYpytzFsS2Kl9DX8S3mU0X0g8R2q5djLiFYgZhaqCQGBRuReIPig9CiLTRIxdESIJRN2kWoi
Ix01o6MhQxquLHIqBkHjEuChMAmbeHep3pUMjgmNV5lnHOKqGkMQmzxTyLBNbGoqppsCaIJKefCN
64jqKaEQFU9BKIK3HqGW1OtJqERll0OjM4lOm2okYHpEc8+/ofMdDoEigqg2ThH19DhaZVGboiHD
IZJpBiM0bdyhyKFi74fZmtRIAWmwP8RWq6TOW9GES+3cnGKJ1U2JocMjQgIzt8Utm1W4Chl4myIX
4ifFORYBPItuVC7LxMmtRMic092egHIJxFztNehnfcEY2WiBiSVGzEeZItxHBGMo6YaBqMRmrgLx
ji5xkhK3GRhLwPRmzQnzJ80xGGQUTaiOIOSo3IhxMccQpC2CLhBZ8iibUwQPFVR/DR1P45KU5S4T
XTkFO7LMsOwKMiNIjkiBgnJXCqmmzpFmJEZEYlVjqgMJHAujOcaywZeAxlA4lXvMA/EQaUScQdq8
69ECdkgS2vgpShMHVHhHajC9GMjEcMs1KcneeJwXDWW1achiUFIXKE+Ab0A7Om0O30ombRB+hU7k
JD2hUdDnAVKNmweHCUtvRqJwWmOCFy6KZDagbbAPQFRtXmJjGqp615kCbd0YSGfavI52IAlQT9k/
kXmctN7M/CDh2FeXMeXfGMD9nRpnSXsyGIK8rmR5lk+C6Mu1CcS8TgQvMiTbu5TjSu9C1/UIarfs
3o5Kh87lcpCpiN6q0xlIYheZbJnCNRIeIDeha5poXfYuilUBe+NYyuxqQN6cHUBhIYheVzQ8ywaC
5sG9C7yUtduWMMR3LR4boxgflDSAI2FG5ZItzO6iFu/EThEfeB6qMoTFfZevpHnHiGEhQoy5O6ZR
OMJLRzlqUDlMChRAa42MSKheZykzbOcHYOha5y0XyuRwK1WpagMd3oGOCcR0El5NmhKMQYuTqGzY
VKEZCcSXMZAvBabrQOIn7JR0SEhm1UweBfVExyK125SuseKNCNO5CPM2iNR0gkUT8td0l8AU12Gu
OUopoyaXumh6vHECW0UK1crMMBW2cCgOZhpJAJlHAIEESORzWrl7shHEwJzQhftFyWExgUTEvkRm
D6JpAHtWqEjbLuwXhF2A2Yprnw5HIp4kS7PmFvl9fQ81/Kcx+wVzH+rP9o/NnFOI7SEYWz5shiIs
jb5flZzmMTkFTXC6djCIUZ3+ZnK4KtkELl+6JTjhqI+pCMYG5M4MKLTatGAyIDo+fdZ8K1A7Ao+a
TdMdtE8IAHaan5ceY5alwYjaiJRlCYxomuRExsOK+JYkP0WVY3R3AprYuCO2UUfxF2dsDBoom1zk
tYFBKKIBcZHqMtTO1UZCEiB6lOVyYtiAcvinvXhbAPCDmnN6m5kPPvk/V9CFr+n2og53pig9akLd
2EbDkawGcbkLXMmErYLmRDykhCNmOkblphERGwBuq87kRueqn5TCQHAZFxInYrlqUJXb5i2gDhjv
Wm4RatbG4yhPTrmAwlKtFSg9PK7cmBGOLFyp2eTsHzQH1kjTF0Tzl6YtyAe0JZ9yeAeQDapVLDLq
VK0SqCmLRGWSF7lZmVuBjGUI5oWuW1cuavwuB2naom/IXbsg8xiYk7VKcImNkeHTkiJHFMsTGJyd
l5l8jSasFE8vARGcsz3p9XDiSUbHKy028J8wfqivKtBgXc4yJ2lRth9A4i+aoDTLJHJg8iclonqD
bqKMou0gDHsZCMSTq9nfkpR1aJhnBNESHnM+CNursFKV28bFv2otUnYE82lcmNUiRhsCgKVxiNil
xHSMIdqld5gm9eZ9OwFWjzMjHlrT6bMQzurdnR5Vm2PhxjtOZVvl53BGdsMXKt2bBfRLXrahIBAA
KtHGWniI97NcrJuMTJLY6WQhcgJ0qSrVq1EC1fiSRnGqEbktEZVMWdETLXJB7bYEKMbIli8pRCuD
mQZQFInAuoTsmR1u0ccFKVoatFJA0IWm/Aw3qkvWqV6HAY7VwyfcVxRPaFjXesXTPRPIvknATxR8
uTABy6M5OXpQfWq29dwl3wZcEBGOWlcUSUxpuKbJACICfqynngO1B66jVANwxYKMYjTCIw6HOCMb
EQIj25L4/MU2RQt27ZNsGpOaGqRZuFsAo6R4IsQcJITlxRyi9IrjiHyKHMRtjUzFswj+FsnzJBgS
aDerQl96H8zbqdAy8c5CEI7SVr5m2JzIqSjasRAhUiO9eZOy1uc2AAwBKF2N3hGIkmIfeKrhKYh0
8eA7QnidcfpTXAYneqGiclC5b4otplFcYbNY1Ut4RnAAXY470GOGIRvANbAod6OiXERSq8nR8QU1
BQkPF7TryZNq2rWMQVKRLCWQTHAYIxlgvhy1RPspwWOxUVcdiE5gQAWm0O9PIuqRYbU92Wo7Fptx
ACcHu6pO3obNMcekdDqmPQ21CIxwVehxiOgPgK9Jic1KOw9D5mqBUZDMdL5lUzqtN0OFrt8UE06x
+pCRJIyGS8vL2StMAR+cnBLkcRKIJJOAG9NecNUArhiJA4lapnsGxXJX+OBkSA+LrTBoxGAREHkd
yr8OJ9a1TJmd6aIYbkXK08vDUcyaBCxdczJAMQpCy0JmLmZ2srgjN5e1sAQhal8OAFYhySo278fL
tEASpUqM5DVI1GwBCFyQELg4SclKcpBmoNqEbchpOAK1T+ITgdi8s2yGzG9G5OREswVGQoJxcjer
kx4iGHetQqRtTypIrh9aqX6HlRTNpmgHO1Su8zbOk0iCKko3eREjdu5n2RsQjzF7ixjE1Yo8lzRF
u5bwkPaG1ERGuLCoG5Q5m3KVu3PEjJR5qd03oBhKORjtQlGsSHCrVY9yZCNupFHXlx4rgx2Bapyc
joaMpHYmk9MSUJzPCMN7KoIfBVR5ewaYTkFROUww2IXLgpkELogNI9k0KA0kNTTkjGUzp2dJjMCU
TkURbHm2D4rRxH6KAMjEgtC7hKB92SFj+oDhNIXxge1CUTqicCEYzAlE5Feby73LLvK09QNy1WzU
YxOIRjIaonEFG7yktL+K0fCUTbe1dFJ2pVjLsWnwXR4oFarfw7uIIw9SNnnBqsnCTL8R/T7mmRqY
g8J7l5HOQ8u5gQfDLsRucpxW/atH/wClahwXhi1JRK08yNcD4bg+1aol4nP5QxDjYV5lprc22U+h
CF8RuWwPvA7oShMOfZJqPSabgEonIozsSlbuZEGiecTdtAcdQT2hCFyBAlSOsUK18qfKnnEEgFae
atmUCeGUS7dqOg1HiicR6EyMdM5AxMhsKl5Z8603DE4oG5LyLjMAWY9qe5EXIOxMBgNtE0bgfYaJ
pwzem3ajOwSZgkhzTS2C03OK05i5DkEeyUYXIm1MYSIZu9auXmLsG8Mq+orTzANqW3JPAuN3UaQE
hsKeHwzuwQDi9B2w4gEY3omJBbiFKIXLJESMWND2oOBcBLPVwN64iIS2EpwXG0eiMZxEgcUBZmbb
YNge1GV15ydmiMtqasSKFwnFR8s39anpn+Tc1/KX/wBgrmP9Wf7R+aPiSAJwGaI1kyHsgF0Z2AB7
sCCZFAzkLcDjHBCV6QERlB69pK1GAiMycVpYwBwaLOiLEJQjlKUSXRFyflQehwLIRv3DMDMBj608
YB9pqVSg3fMTzgCdrL4tgS3ihTiVy1uiXXw+bujtAT2+ek2eqKcf1KA7Qvg87ZuDvVJ2Zd6+7sn/
ABgKvKwl+jIFV5Gbbqp5cndj3FOLd6H+E/kTSu3QM+FXWczJrKWKt2xKVu2fFICgRjyxvX7kwwuE
mMQrcedmLkbdRAD6ygI2YgDCiaIAAyHVeZYZI+TGU5DxFqRRgb2i2TwiMantWvy4xyMp5jcjO/8A
EkaiLnSOwJrcREbvT6pkRjtK+CfOuO2mIJbfJXLeqNm2CGuAEGe4Kdy8BclNuH2Q2wFabcREbh1g
LsmkaiOJZSuWYNbgAYykPESQFc8vjMzAkmgiKlgpc5zMxCVusACBpo3eVPl7BhYiwJuanLE5tmVO
9y9k3b8sZS4bYb9Lamv3BC0cYQoPWpaZDy4mkk9sC5LbknMsMAKBC3I8cKI+bIC3kT9QUfOBs8rj
ownMZPsCEIhoxoAKBGcQNUdqEwzgMaugJCopIFTJOkTAcuxBC1TJfbqdWiZUZgDUsBuQu8rZE9NI
yYtJ6fQpT5if4gTOsyg79i1WrRjPOhdGAGkyIIfAAZlXLkPANMbe9hUo3+YlpAH0Ly+Ti0Yh9Zoz
oi5HzZTxkalAzB7FSmrwviwRN2RA2gbVC2LhMbfhBCPk3zESLkIzncMpHMlCM7euYDPgjfvRBlQR
HugZBCesRpWJoQrN20xt2+DVt1GpQERw5I2LXhuRiDEYaz/ch5spawGBBoFf5SI44EGUve1Cn1qE
425C3AGoxJO5ShzA1l+E5stHLiUos8myQuQ4gduKacSCMSKqkgnGCqE0JEblt3ricHemdYqhqgMs
1sfEq15NsgRDXC1HXauKII2prLRJwXl8xHTPI5FUWKr0MELQPhr3obkZY0qUZGgeg3Jo1RtA8WBZ
eWHEB4QFwxbfmtUi8TQOXTSlTFBj4cGVe/tTjFPM8O1CcCWyZHRcMQcwrd69cM9MhKEjhRarlwQm
BWJUrcH8jl4PF/akTimIUrIkY27c6kZkK5cPxISDAHJXBy9gxjAsZDao8wzxkHY4rTc4ZZqhdcUQ
U9uRifoVRrG0JpPE71QusUy1AsD4gpa48JNCmb4MaVGKuQiNIxHYtVuoBQAhwNihcY4I6iyiTIm3
L6AtT0WmyGjnM4LUeOZxkV5lk6ZjIIeYRDaVwcR94riLqgaO0p5cUkw4QnUiiCjKOHUA3dOoYjoA
2oDpbJahgUNgqm6SDmiMjUIyOJ6X2IT2pskAMAnQOzoAyFSqYBRO0dAIqDktUPFnFaJeE/QtQxFQ
h9PRRg9VxDVJcAEQmi8juXuA5lPdkZn6E0YgKpCazAzO4UQ5e5LTMliBiFI8txXmfVJXAZarkqEO
7IC3aBnBibhQ5eFv7xoymdpQiQTMishReVOrh4EqWttIBdSHMWJGMH0zjsC8wDRCAaEe/FA3S4rU
7EJQI3bEIy71QiUGxQJpAYR2rcFa5cbDKffQKLF3XZ0kirYoxvy0yuFoSOAQHKzMhjKYUL3M2yww
LM29kAA8qHVvRhclpnChCuXy4BAFs7GzXkXAIXRgcpDaEYXA8SjYunVy8vDI5OvIJe3Pisn/AOno
ABZEGmNRuWoVzUrkZUmXIOIRuiQlEY7kDPA4BCAx2BGcyYjJaSOAVC3BGxYNPamEwzWqS3ZBCdwd
gVuUbfC4K4gBKIDFUAB2t1iRwTPtDPtX4T+pQErEqQu4gLzOXPn8oamGJA3LVaNc4nEdHncsfKvD
FsJdq8nmR5d8Urgezo03Yg7DmELtomcY7DxryOb4Zignl3rTOInCQ7Ub/wDTpOBU2jsXk81EW+YF
DCVC+5E/fWdj8QXn8tIQvjZj2ELyubAhc34SRucuWOOjIrTcHl3PdOfZ8pqHGwrzLbWrozAoowva
b1oYXKv2ISjMRlgYksX9K8rYfbgtXL3M3GonDYUbfNWQw8TF33h0J8pMW5mrgs+4o/iIGdr2ZAuR
2pouJY6Tj6Cia5AE4uMVr5WbwGMGy7EbfMxlanKsiAwdPZuebbA4aOewrTfjK1cGIIWoCMwcSjK2
0iSWEicDkpH7kxoQS8e0JuYhG9bylCq1cteMJu5iT9Ca7b1xylH7U0ZgSziaHqtOIkFI2SdUhRzg
mu2zcts5mMR2qIk0JUlF6GqJsSEgcYkfUtN/gPvMQE8C43ehp0ESiK4om3OQpSJNHWmdvV+cFjpq
xB2/Iqdb6vmzmv5W/wDsFcx/qz/aPzG5LbytRnERGbozhblchHGQwon5bl5gH2zgo+XcuQ9+UqDu
C1+fMzwJNUbhAMzjOeKaJE57I4o2rNicrm9VE7dw+ECkQhPm+ZnOYwANAhO692UfCZ1ZNEADd81M
cELmvTEezktMB39BhJw+BBZeXGdzS/CQShO5fnCOwmqbXKW8oiM2ORIBQhKNuccyYBefOMbN2Q4Y
wDOrcr0yYRAe3kUIQGmMaADqvIgDaVKMJC5OIcxBCB8sWbWwy4yqRneuSqJmXDEZIG/c0sXMYHHt
TW4Da5qfkEYzJM5eGEayLblEchYuaCSJXjHDsCjc57mpvEvC3FmHbvRFsVNZSOJ6BBxqOAevU1SI
A2lRjZHmyls8I7Sh5sxYBBBtRJ1P3KOm22oSE71xzKIwWrm+YiTEM0j9ilyf9GsPEBvPlSA3qF3n
b73I46M32upz5eEI6A3GNROlw5deRZ1TkQwEHH1KI5yRDANDciLY0yhUAZrTLA0TITjXdtUeZvHX
LGER4YrTOTSGO9fDuDVsKqCtMi0StcSxGYUZSveYAXEBuQlGHlwq5JYlDzpSneZpyJy2BStxOgx8
DYMnidEtscEBMibZlazASkc3UgADOA8I2nBQuc7cAgeI2gal8AowtxiLZ8R3ZIRiQTmckdYBiMcw
h5cjCQ8IxCOt9xFQsMcDkgJM6qNJQBk0d6+1CTODV1wF9xQi/Dhgo32eTu5xT1f3WX4uc2jcLyhl
QNFVwXNyDjl4zeI2uTgrty2NNyMTJ9rB1G5IizAggDN8KIm1cIAO2ilLmNMrkfCAGJQHmTtS901j
615tsC9bGMo4pjwnYVwkF0Aa704p2KhftXEO8LFihVaDLh2dDOmOCELoqMJLSS5GB29QyJwClcll
X1qJasvtUeXjU4yKDYBSuRfXuKjdsw1RuR4pSyVu4GJOLKbxDjHRijIkke7LJACT0qq4blrI1Nkj
oDHMIRuReIqtMQGGAVAiREyYeFaDZMBEVJ2qMojRLSwl724qVnlIRjelHxvh2KFm9S/F/MfEknFC
2JA37xELUcS5OK+M5uHxSFKo8lZl8UwaHZtULl6euDvcCj5WoXJV0hGdqJnGNC603AYy3qhBdcUQ
U9uRjuyVY6gMwmk8TsKoQnJQiME8S02Z0wFHUdPCAKq3AVtxFTvXEHVRXAAIeZwWhhDMoRiGA2dB
EOKSeR7lwhhtWqQ1yCYcI2IRiDKRwAxT3bcoA5kU6CUUxquCJIT6WQLBkAndYhYhUOKdwWWCZlWJ
VQQiCpEp+kMgRihEU6m8IzOJ6CpDZ0PnLogej9GqjIUNV5lvxjxBaJ4jBGOWKMXYBNbBkqyEB9Ke
cjNcIAVSAvhQMhtZRsSnoJLFkZ8qfNuRLSJyUtVZE03IzhZJmCDMnB1+HsCRuTYasGdahd0TkOIB
HlJ/fY8WJUpkN5Y1DtC4jKN0YwZ0b+o2bQYWwPEwQtSuylEYg5qcYH4khpA2OjfvFoZPmmEhG2KA
bULcI1Bog/iKjcJ8uEPF3oCGDYnHovG84ESw7BghIeEV6ZeRVkbU7QnAl7gOxRkIGJnSDDhCtiRM
5QqXwKfUNIRiwMX4SdiMiXkak9DYTFYyzBXk8wNN32ZZTRtzDg/Qjyt6WBezc90oOGlGkxvVSAtV
ouFGU3E5CoCiTLiljFStToJIRDg5UohMRrgStoT4BGzYlTCUgnTlN6ghcuhzkFC9O38IJg42Nkya
ciR6AwuREoHEFEwe9yZxhjKA3Ic3/TZiFzFhgdxCHLf1CPk3ve9kp7chMbQXWmYY5SGIQhzT3LGE
boqR2oTgdUTgR0CTCMxhJloujz+WOYFQhcsyEhmMwtUhoujw3I0KFvnh5tnCN6I+tC/y0xCeInHA
9qFj+oQ0n2bowQEz5vLnwzzAQIY7JBNc47QwkBVPA6h8pY1QnECFwZgUPahE6b1vKRdxuT6hCQoY
yKcYek4oiXaEZ2WtyOWSFu5GPMWj7RenapN/23MDNyEDIG9ZHikCJHtCAiTEnASo/oeOIJylmjc5
W6Wx0FC3ztkxlH28kbvI3jHVUxdwU3MW5TtgeMYoQLOcBMY9ilLlz5ciXAyBR/E2gG/eQoSv+3vC
5EmsZYhaeZtm3cw1gUO91qsXfNt7JY9iMb0JWpCpfBPAiQOY6vFEOKAjEL4V3VHZLFeXzVoh6O1F
q5W8RMBmfZkV8cSkAKkBcLgmoBGXpGlFxi29SNuRFOGJwBQ8yL0rIJwfXT5Th8nfq09JzX8pf/YK
5j/Vn+0fl7ksyY3A6ELJE5HN6DtXlQjC5VizsO9GV2MZuGA1EAKU7/E+EATpCpBo+6TRaTMAD2Y1
+pEWYSuEU2IGwTGUvZEcO0lCXMXmOYBda5Ayltw+pfDgI7xj85uQCdp6jXB3hRBkZCPhBNB1WnMa
sgMURYhoYtquURJueeCWMY/UEQbcLAPhOMgtUh5k85STRAA3enechEbyynCw16dsPICQGOQ2p52/
I5eQYR1tIPmWQne1Xr4xuSkT3DchGI0xGACMpkRiMSaBHmb12MbI9p3B7Fcs/wBOM58wYkRLNp/O
JK803pSvwNJEuD/cgJNDmYjjh9oQNyYeRaMXDkpuWiLYciVy5IAU2KEb9+fM3okkQtVEfUtPL8rc
tWhLhwi8d5OZWsi3YALiVyWsjepW7FyXNy1aWEtEX7Bkoz54QuGMtQEXruL5BEgRs2xizRC8j+n2
zzN44EDhBX4j+pXPL1/uoY4rRy8GOczWR71LzLcTGEtJIFdK1CtHrmCtYDRkhSsc+j8PI1j4Uy4g
KZ5oyEiAMXKMLFqXMXCOFhQ96kL1w2rZY6QGJUntmc5tqkWy2KIgGk3EMgvNIYA0AxIZfdH9Jqo+
XIwOzD6Ebl0hssiVptwMIRrLadiiLdoXbsqynIqYuQFuUMap5lgfCBiQq3AxyOKkCTpliQomJNUY
gGQiOI7FSgOzBcMwN5URPiADEnF0NR8TkBtiOoaR7LJqSjsXhY+pHyy2htUTiHzTiWu0fDLJcYbe
MELh8BwOahqnIxuO0dw2rXb4oToWU7MQYiYMZSOw4qdnlyBy9mmo4BDlTDTsb2lGzCQNwh5aqCKr
aJG2NfqV61y9oStEg3ZzLaTXBfiIT8wSm8xkASpXLZNuUQ5bCgQsAEzlSIlmtN62YNiniX6MFwkh
bUxoqHp7FimWKEQccQnuBwKtvUZTpCNQjc97Ds6BoDzlQBCwZGegOxPDHuUjbmX91mCjehdlZve0
Ac0DG4ZXI0L01I+XIOx4Sc0DOLH60SeEAVQlEiUDVMQmNN6MLfFIepPoiRmxTFoSKcHWRsoVGWpr
kKVzCFyJLmJeuBQu3yfxFqbgyfJazMbxmuZuXI6XAjaB90KU5kCMQSSdiHM3YCZmHg/ulSNmAgJG
oG1SvaT5ZlSlGdfiDMiQYUzJRhbBmRkRVabsTApwQuKIK4CYHcuEiYTXIkJhJYrHo0WxqmcAhcu8
Vw+odGqdEYw4YpohyUJ3i591aYcMUHzWqOCvXJAG5bYRHartu6AYmJqclo2ksdwTEu68KNMfRv0V
WCwZYsqVXEFvQuW4NA+Eyo68rmIaJGo2EbuoY5IRGXQTmiFxYRTDAdEdxVEPeliozGJTjvUb0PCc
VGVvNRuSi8pAEunZgjJ6lNatmW/BPckLcTkMVC5Y+I5acjkrdqYcyJ1HcibVssJVJyX4XljI3Lkh
xdqOm9pnMcQxUzfiZStniBzK/F24aTYaRiMwCvOjeiItUGhClzHLzNuMWEJkEeHNTtS5jVKUaMGB
Wi4wu+3M5rRr1y2RRhylvSD7WJQu87IkGuk4lCMI6bcQwT3JasgoDLGgR8wuDEEROSnbi3khg20K
uCMYy0ylSJTzIMgAHGbLeaepPsRhCYicA+ZUGJAJeiBuS1ksZumAAjHDciJAagSHfYjKMSQTjkiZ
y7IrVhGjDoYKRujhjhMYgry4y0jOeZUvMmbmrAE49i1jDCfYiTUEOJK2LY4jRimkSb0cF8U1ehRi
YECOMkNcg+QzZSMR8PJ1U0GaNiwWjhKQ6AE30IXLgqfCFb12SYCtRRkJwAjkY5LhiIk7B6M3+TOi
5jK37M1KGjy+egKQlQuoz5k3+V5mYJt01Qm2SlbMYzliCaAKdu7ACcaTgahG7yZMreMrJw7lwlp5
wNCOhjUbFLmeSuGEzU2z4SvJ5keReGUqCXYtMgJROL1C8/8Ap0qe3YJ4T2I8vzEPLvChtz27kZcv
8WyfFZli25a+UlpkPvLEse5GPhmKStyxWvlpGMs45FeXdHl3RQg4H5VVAmIjMYSA+tExMb1snAvw
oxkRCcdpoexPEgjaPStciHykMVK5y113xhIUKA5i2bcsRcgCNJTQmeZhntbcmBMC7NINXZ1n6dNy
IkF5nKzMSS7HJG3zlvSRQkChG1PbIJFRKBYhPZueax8MjiFpvW5QmA8qUCNy0dJmPFFlom161EPE
kVLZISszNi49YScBab1uN621JDZsT2LhsTJfTLIr40ROLsZQGW1NGTHYadVpASG9GcJG3M5x3pgP
OgwfM70YyjKzLGO5PA+aM649i4om3XTXbsVK+jqmZhuopaZGciaAmjIwlBmDmWATPXZ8h3qvRuW7
qU6KfMfNfynMfsFcx/qz/aPywisyMREOjCMZmexlGdm9PzZH7sQZgyBv3dOyIH1oRvSGmOAiAH7U
IxgBEBsE0pRhuQIjKYOBAogLVswhJqs5AWqV2ULQykce4IylOUiaFqBNbtgb2r8/arsxEb1o5e3K
5JnB9lAXrptvjbjUy7FE8rB9WMroPDvqgOYueYxchmQFuAi2wfIPjXoxOx03IWgeJmuAgt7ylLn7
0bmtiREYNkEPKswjpwLV6TKREYjEmgUrDDmpSiXtQq6MOZ5ec7UifIs3CQAScgo81zMbYgRWNyRA
D7QF+Lsm3RoXI2okCIGBKFwXjA4AxoVDyI3Ob5gReUrhlo1bEZczpEJt8GDtHHagbdqFsRDCTAfS
ja5OH4m9EgGMTSqM/wCpSAgSCLccQ2SeEYWYAVlQU7V5P9PtnmLpoCPChd/qd4wgaizHYtNi2I7T
me9A7EZzOkCpdSkQJTzcOhGAMOGtGB2MuIeVbDmEfaKMZUyIT5FRnGjYqM5SZw7IQsQZ3rJ6DahL
mpa9oBIB7VosREI7ujy7PiHilsTyk7+InNMHfYKqNyEyGLyfYrdrkxrlXVKIzyXnc7cNyYA0wyBQ
kSbYlEOAKAoMOEUBBaihK7FpeGOk/Wg/FIjDFkTAHUgHkWyZCTaYZDNHSWB8Q2qox2J4EwKpMHtW
uR1TZgdiJBcomYcFEaRIfShetPShicxmF5kTq5aRYwOMCdil5c2icBkvMvE3SBwwjg6PMX5+VabT
Zt+0Sp27ktUHpEVL7l5diJhDYPtKly9iRhzFwvO4Bmhcnq5rnpfdxlgDtRPNRBv3STOWJqiLcpW5
DYVcsagZmWonAmiuzlWRDRAxJNAhIkiFwZULYKxdmdUCTG2+UmU4Wx8WTCJAdt6navAzg1LgyKjM
3HtSLB1rMeH3hULVEvu6lCsHTSBCpJY9BOSlI5lQJxlgh0GYxALIz24ppAvlKOPqWuBFz3ozoUbs
qPUblOBt+CoO1aqRgKaXUoSwkF96Y6cCcG2LTZxzkU87hL5BNgmEjvTgOd6eHCRsR86InE4UUpQt
m3ADxPmviy1S9ZXh0RejhiUIwuabgw1Bj61+FvXZRJ9nIocvfNbQ0iQwLKHK24nyYPKUzR5DAJmo
uY5fSPJsSbXjxKV8EESDEszK5etH4QPwwNiN2+TG5CPGRtXlWXkTgCEBfgYrhkFg6fSx3L4cyNxV
RqC8uAaRTnimcZHoYVnsWqZc7Fqlww2rTbDnaqlUQADFNMugeRiTdOIyI3qRvWjAENLRXFRlcFWJ
Ho8KOz+jum7WIESrRtN5ekaW2MuX0h7okcMdLKvTqKpmh0dqYYnp7CnOAx6Idq1j1IAVMqshbAeU
ZYKIjDTEBnO5fHu9wVIajtKIjGmwUQ1zEaYE1UbNkyAfiIGKMhCXl4k4lS5i5B5CTVVi9AcGtjFa
xMN2q5fsx12iwI2tmjy/LWPLEvHKVXGxcMAXy0p7tiLb4rTOyHwGlGMgZTn4YjILzuckwx0DFA2r
Yi2YqUTKgGAzUTGDW38RVeIKNy6/wzQbUbjcRzRaixfcg1AC6iS5rUpo4LyzJhmQo8wbgmSXAChO
4dZjj2KXlE3LjNpAzXlG5IQm/C+S1TOojatIxyAWq5V8kw6MW3lG1cJALFwiLI+JHw71bjdiYxty
fCrZomA1RuBwRgjauGka29+5a7geQwJwCdn3qcIlpM4RiTql9CiZeMCpKc4Zo2LBaOEpIqqf1BC5
cFT4QhO/bIiA4JwWuDU+pcZoMh6Vy8LsfDcjQgq1yHOyj5EJcHMN+0VHm+SvQuxp5gLMXXnc5bBt
8wA2nADchOBcZjMLzbJ8q/GsZDA9q8jnB5dz2ZnCS7cOgRuggjwyGIXl3Qb3KjwzZyAhOzNyfZOK
1GOm6MLgoULPMxN2z7M82Qu2jpux9oUI7U98NIeG9H7UIXcT4ZjCSaYfZLNcYN20aODULVEv9fyv
RKIBxEgKgqVy1IXYnG2zd4Xl3mtzZwcvpTxIkNor6UghwckDGUoMXDbUZS1zqCBEMD/etJlO4wBE
JR+gqLS0kjAjBPt9A0xqRlyczA4iP5y089A/pALVHTcGB2oS5eZth3MHp3IjmYGduumcalt69y4R
2EhPauylb901K0c1blC62LZbUJcpc1QagkVp5qyYSPtxwT8tcF0Y6ZGpGxRN+Plaszg+xPAiQ3dV
ph9+aErJMTqBkHxGYR820Llo5jHvRiDplMaoajlkynG5ADQHJBxG0LhkCdj+lEjGo2L4cnrgdiaV
QADqGCBiXf0z5+ir8x81/Kcx+wVzH+rP9o/J6yA7Sg8hKRwiDWqANk1wD1QeEoWtkfEVqM52rXuu
5KIhOYBxrivhxYnEnFPOQi/rRBmTIey1VI2OBsHiSSiLk/Li9DhTsTXbpmcSWr60GhqMQwMqpgG+
fnJYDElARjK6ZFo6Q4J7U5kOWcsInFlKNqEuYmaGU6R7k16Xlw9yGKEm1TAYSlUj5AZTkIxGJKlb
sxnevDCEYln7UGunl7MhxkwaQ/RUDdkb8rZJiZgYnaqADs6TcuzEIjMlG1yFqd25UamoED/U+Y0Q
FRZthv1lwaI6ccDMlHyOXjrt+C7NiA+YRPPXTc8w/DtAuxGKNjlrDctdcXZSpKWxu9T5a9Frlstt
piCjyV0tbveAnKQ/KpWrY869APKAIDPgO1SPPGPL8vMcFsS4ojb2lPDRbiBWZIf1o2P6dbPMXjQF
uFC9/VLxjA1FmK02LYjvzPf1CIOWLyAzCFrlbZuXwQJBuGI/OKjzH9SvGd8VDHTCO4KcLUxOMCwm
Krz4ihxTHuQeu1C1dwFYjJ0+CD+LcuEetGQoQM04q9TvQjGj4lSt2CYxiWp4pMhG6TC3QUoTvWi3
GUTnKjqRtgao4SnU9wWmVwgICU5S3LybwIg7xlmCtQOqMw2oIQtxJA2oSIby8R2qnUGljHN+l3cF
OCzlg21cdQU0g4OIKAZmwZATvaZSDiOJK8q2BAGlay/uQndfScTjIrRbGkbGWmMdd6Za3DaVO9eI
lfmWlLIN7MVxFkJQLTGG9cYMSPaCeU9X6SjauHTpDBsGVm3Ylpt25uZnaWCEREMMHqjytiPw7kBK
UR71T9SEgRLQOG2rkAdeqZ1xPskUZEcvD4cPERtKhzMR5lqYcHNimdpbD1XaqeMjFO7xNCUBnJRs
xwzO5ADw2ww7emtXyUp2KwNW2IGY0244yK1RL6RxRGYTRnwmsRuX/aSAJ8ZzXGAbmZHRC37DO20o
SKxTYpgeJatTOmEgTgwQuczJ9QeIiULNrhiE/l6pH2jVcPEHYITutM5dqjO9pjppARK0nHEb0JwD
SFQUbcSIEhtYxCnYncEpzmZ6jm6FgETndlGLDIE1KjGA4QAAhy8gDAx1XB30WuxDy7gDCQQNokwt
hmiHqEDfPGfWvg3a+6VxQ1DaE0wYneqFCccJY9Hl2qnMraStVyp2LTgBkmiCU9wtuVI12rZ0XJU8
wljtZXRdbRpLuuHBqN6B5Fk1uOpZDctGkCeL5LSZAnMLip9S3eg5iALaoRQ8++Y2cLcRiyF+Vw3Y
zOkmWITSD708D2BNIMhEKMtoT5hVT+rqEDFwy0jE49Ee1GJzUSMNiNyPhmAe9ByzbVWTnYKozs2j
p96SadzSThEUUZztapk4mqtW7sPLiCwJwfJEybS1XwZT0AG3InhWqQAYvEbCq96qAjQdwQEYuTgF
OwbYEjSq1jLJkZwtjzDjMo+aCJAswzUZWokAhpRltWkARrUrRO4dIqFpBcb1RytgKcl+hiKJ2ri6
aNSaLzBInmDXQckI3CZSlU7lLlTB5R9rcQ6lchMgZgoyhXJlWgO1YPL3ijHZmqFCrBfELNgiIUiP
aTGdMtq1kgXIVi+e4qUrI1RxlbOR3IUY7RkVpuy4R4RtRjpBJwK1mjZLX7OQKc0GaNjly0R4pdGo
4lfYhcuBzlFWpXLRNoVqKKLRDHGK0wGkbvTytXRqhIMQUOUhq03DwB+EjJDlubHmiOR8Q/RXm2Jk
xPf3SCEZ8E9mR7FouxcZHMJpfG5XL3ooTtSEh0GMwDE0IK87k5lo10DLsX/cgkChk1Qd4TSAuWyv
xHJGUovxRfJeVzcdExQk+Eo3OVLOH0Gse5aL4Pl7DUjsWqJeJXmWTolsyWi8PLlkThRPl8rMbkQX
zaqFzlrg4R4CKSQhzMRbMqOHod6aEhLsPpnk4LMSC1E9icwDSQBei0XBLyY+CRi5718Q+XcFDE07
1qBcHAp+vpnESicitfKzNq5sejLTzUDdt4C4BXvXw5AvjE4+pajHRMYTjQoStT84A1iTktPMWzCQ
oSRghc5O6RHExBovL5m0ZRMmjMMQdjoysyMJMQQKEHsRjMC/bAoGqdy+FOfLXQAGNBTIoG7HzoN4
o4phICXumh6tU04vRgcwoi0RqjiZVcIyuQMJS4SYj6UIiUZW8iRUNkUBI6ZZApxUH0jGo3qnDsZY
6y9H2Ix0ESAc7GVC/o9noOz0FPQ19G/oea/lOY/2yuY/1Z/tH5HxzEW2lARuCUjQAFRvCEDGZAjF
3lXcECYRtROO0oRkBDbJySgNGoj2iapxHi2mpTyLDenmZNtai08tCdfb0uFEctckx8UpDSAvMu8x
KdzsojdmPMuH2pKgA7B8/uaBNrE5mgjEuVKNm1G3EfvJyp3Jrsp3pgVhAnSTvQFgDl7TVcV7kPNJ
vEF+LatMAIjYPkHxLg1ZRFSe5aOQsSJdtcgtXPcxKQJfQMty4BxHGRrKm/qHVN5CukYqVvk7Wiga
Z371K9/U7pnHxGDtEI2f6fYNyYwjbjTvkpedd8iwWyaXYpXIRHM350NHw+paWhy9mMsA7zC4x5sy
XMpVbcEwDAYAKHMWiPxMPvQPcydagWIwIX/8S5e3D8VACJMzql5hIi9Vd5jmufuRvW6zjhEAmmKk
Yc3cvwtlpA0QjYtiG05+vqmcyIxGJKMeXezy4obxHFL9EIxsxatTmScSSpeZLSY8Q20V6XLS823I
CUQxZ8wCjK4QAfZ2IxOANFqHehOJYhRmMfaG9OaBfDG6q4ySDkQtdwgWwGIXlcrHywKznI5KUzPG
pYYdipcJ2AhDz+F34hgpaJaxkXVQ5CAFAmiNT5Zoxl4pnVp2AIUd1JsZCq01DDNU60Yzq+O5edbl
rj4gtPMwHlEFjHatRLGWEc0IxAswlhM1m3YtNmDEeO9MvI9680jVPeUZXieI8LigU4ShqMMKYnJk
Z3LRnzFygZtNuOQCiICAjE0l+VQnHSBBgYx9qiMtIoGMR7K0zDfUnoqMVqBZqlajcjqwDiqjdvTB
MnBmcarX5kSMmK5q5fFLtwyDYHVWiu83CDStRMpDKQ2KN23aLaGEAN2C0XAZQm5llpWrXqtb1ix6
pgc15chh4UIRjxGPi3lOMXqd/SaiO8q4eanE2zSDFGESxehWiTGYLUxKIJMZxdhkQiLUdJwJzUiD
KcpYuEJWpeWTi+KEbkwbscJoQv3HbYnlIyGxcMAExgFpEW7FrBcHEFC3FzE+EbEbeZxktUrprgEP
KOqYUW8ftdqGosBi6jCILAYhNbuOB7y456iMQKBMIBaIxEQcSBUIW4xFyBpGZFQp3uamZm8AJH3U
ZWpeZcI4YjarZiBxDUTvNUDADUz6UJyi026GlEFPF49iERLVGJcleXbNMDJaY95TCs9qlmtU67k0
Q3VN+1Py44A7ULMSDy9yhnGhWmZeUYgP1MUWlpiAjG7Kg8KMRIyc0ZeHvK2D1Kk2K8dR7SJMtRO1
actieBpnFEk6WRwaNCdqcdW/ckHAgCrcJTEJxDEE5IctZkJ1eRGTdNQ6MrZY4sUYzDMm2lNkoxBw
IQPSXT9EEZHEiiAQ24LXckWYcL0ULViAOotKRyTAOtMoj80qUZgX4GrZjsUpXfDIuAcR3ryjcJAo
4K+HGUyaVwRlJhM4BfeB092R8yOQzWapJpRqhduScj6kWqTitOSxHeqF9wTxh3lPOTdifE71SnRV
aYDUdyjYkRCUi21lGNsG4CHMjgtVyyfMjSERh2qV3Q1w0Y5BRjGIlq25LzAeMl5lcQ4d6a3EDo4U
QzHchIO+8radiMp4bEYQDHIjBTuk8bNEnaU8iZErzoRLHFeZaGmftw2o5GOO0JpnijltTYAYMhqP
hGJ2I2LB4fakM+jUckwwyQncDyPhCEuZtkRbhcURlAhwz0ozOnmXbD5DGU4gyj4SclWkhhJGNxoS
OE24Zdq+JFq5HDeEPNOuBwmMu1OKgo3+TOiftQ9kryro8q+MYmj9nTrtNbu7Rge1C1WzdzD8E96F
u+PLuZ+6V5luIEyKjKS0xDiPitSNf8JRhJhMYx9oFG5aJuWcTEL4ZaecDitNwOPpVHu2OziitVuT
7RmPljSiC9C4Wrlbmhi7EIx5qAYFni9RtQMJxL4B6+maQcHELWxExgXR8n4lskkxNW7F5fMDyp7T
4VqgRIbR6DTIahmCvMsk2rgrwlgVp5m35trKccggLdwas4mhRjciJAp+WvSgQKROCMectawCzgZb
Vrs3PKuO5Dsa7UNUPNtj2hUppgOQHEqGqJ5ebA4RkSQOxEczYDxL+ZDHtQPK3RKJPFCWI7FpvRMZ
bWoU8S4w6uDoyiTCRqSN6hKI1iAFcS4QkCYCUi7rjJI94BOD3Z+lrUGiExRvZyKeAEgcti03OE57
E8SD2Lch0OtZB07clpMjTNcEg+w9WnTX5NTrV9FzX8pzH7BXMf6s/wBo+nacmOxECZJGQCMrbWwM
NQqUeMQg9CzFGNyYkZeKRqU4gJSNTIocIphuTzIiN6PxBIjKNUZWbepqV29iJAMCcAAwQPMXy+wV
QF2UrsY4CWCEYREQMgP7AvcmI9qlDlreqQDiUsCn52+In/pQq6B5OyAZGspxqPWv+5umb4xAYDsT
W4Ab8/kGqchEbSiOXieYmCxEdqI//HsSGPhk5UDekb84VBltWmIERsHS9yQiN5ZR8gC8ZEiOkjEK
3KtqBMxcqwb2WQuczcjcnbcCAL/QpW+R5eNqIp5ksO4I3v6jeMo0+HqOj+9Ss/0/l5G4DxHSwANX
X/dXTGIIMYxAHrREBU+KRxPQ03lMgkW41kQNy+DZly/JyBGuUtMj+ciLsPOuyDSuTJJqrnLSrAF7
ctsTgrXLc1clDlrkuLSc8kdHMz8uWMRiQvK5eGkZnMnf1RbiDdvy8NqNT37EL3PF2rCyPBHt2lCE
i8yOGEcWCH4KzK1YkPvZMCTudHXzErl6MWuP4ajwxUrFgi3IDS0AygebtStkvHizbNG5Dxxx7FpK
ZeXI8E1olgcDsREqg+0jZ5QSnOR0gg03qFzmpeYR7Dkh9qNy08deMRUU3IkQMjsBWq4DCblo7lG3
44zpKJ+tSFuWm1EkGZ2IxENQzlJBoRO8BcAERmQqGu0oEkEjAnALU+onPocYIMHdAxqPaRIOkxzK
jAEgSxywUDE6g2K4xX3hip2JXDcI8ERXuotcjCxFy4FTVTsWY+ZM0lI1bsKMiSSKVTREQYexgStM
joEcA7epCFx5xwqPqR5jmIvOfgiD4QiYBrh2mq8Z0u4zK82RmZxkDKQz3K5K2REXZO00J3zFz7tQ
jKMST7IGajNhF8iVp1ADYKJwx71x2yw3LSCQWdRs8xEzBHDtUoAabci0wTxFirdu0GtxiBHsXL2u
Vi4nae8BhicU2kG3GpAxUo3+IE/DIxiF+HM9WwFMKS2HqRkMgUBM1kOEIR2JlitMqgrVMksXjVcI
AGe1SecYzZoiWJfN15c6yxD4LWTUnBatTHtQEMNqeUzE7AgNRcZoB8EWKAlU9DZozn4jhuCEbY4c
5IyPFEs4KBtCuDhR18N2IqDmvNuUGQUp+yaALTqIiMQMyhpi+11rAbUKAFHditMZN2BTuGJIhgMy
nnHRuKNuEXhEcJlsXmSuyjeOzBcMhdG/FNftmO9eJjvTQq+JUrcT2namFBmV5dkOcytVzDYmiG68
bZIjKFCCo2jIajIGmQCkQcQG6DGGWJRNZLABPIvuWCZapdwW7YgEQ1WRWmXcUxWaeNNycPHswVJO
uId4qnHRdjMUMBRGUXOo0ByZahQ9FOlpBwtVk4V0lMfEndRO0dG5FsBTpgMmcpsgoEYZoE9oRuEk
k5ZKNuQBP5oYgIW4PA5TXlzm9yhEvsUokao4ymo2eWlrlIkEoeYNU8WyVWG5NKSlFwbeIknzT6gG
RlqAJpRcMZSKcQ07yU87jDYE8nkd5VAB0VWLnYFrlHTE4GVEeaEnhjpAU7vMARHsGSlzl46oGkIj
N1LmY2iOZPhjj3o3uak4uVMJKVsQFu9HEblDy5abhlRti1XZGRRp0s9M0S9TtVASuFwE71WoS0yG
OxSBlrJPhCEcIjCOQQmWJ2leXKuoZJ9LyiKj347e1C9aPCcCPtWg8JNbcsn2I6uGUfECjZsFoCkp
bVxLUtMcEJzDzPhih51kmAGYoo04ZZbGWmAYHH5JomHBXl3gbvLnCWcUb/L/ABrJqY7s0bnLnUAe
OyTxRTxoc45hPLhuDwzGIQtc5xW3aN0DLehKBEonAjo0XYvsOYQjzEfP5bAT9qK8y1Lz+XzD8QCb
CQqMpBRE409i/DEfpIR5tp2j4b0cO9fieTn5d01EomhQ5fn4m3dBYXPZkqYLzLB8q7jTA9qFrmhp
nlLIp8vllQD2o3LJFuUsaU7kI3RG5bA8dfUhITEZZwkWIKfqN6I+ZAVzT8pcNMnYsm5iHGKExT2z
hiMx196YhwcVrgPLuZSjSqHmfGsZkeILTCTT900Kr6kZgeXMhiY/kQNufnWnqCHICMOYgbM8pgMt
XKzjets5iTVAXo+WTg5o+xGUaE5wKIBF6IwEtidpcvMHiBLAoAgXYnCQP0FNqEZYaTj1mmAUYxxd
6qW2MaSfFkJSJLAagcX2hAsYg4Pn6ZpAEZoeUdG7JSN06m8LB1pPDLYmEg6sxmNUJXI6x+a9VH+m
8pHTKJBnIBgIimlVck5rVy0iJD2SvL5mJbIrVAuF2fNfNfyt/wDYK5j/AFJ/tH0bSLy2CpWlpatj
IgarUNrVRa9KNvIkIjzNRliZBytWkSkcyF4R6k5LDejruxBG9aeXYtXVKgRjZEdHvsib8xKR25Iy
m85HE5JoQA7v7BPOQiN5ZGFj4kh6kDclGzA4Q9qSNu3auX56uKUhwxRErhs2PciACfUhJjOQoJTq
R8hMrt2NMgXK08nYeMg4m7qNznrpMwXaBRNq2NRqZGpfqEzl4fEBVu1G3ysfiO0aagd5U5/1C7Ac
uDqJl7W4blKHKWJX+YFTw8IdSjcumzYLNGMQJdilfnpuXSwJLEltylb5SzG3EU8yRo25G5zZN0lm
g50ii024iI3dGq9JiziIDk9yEeRt3LfLSH35Gkk7nQv8zKXMc17VyRP0BMKAYDo/E2w97l67zE4h
AihxCjrPxrXDcHZgeoZSIjEYkojlj5VnA8zKj/oBRtcuPOvXHMruOA1cUlbuTEbNsuJWiSCcnUTO
Pm3Y43JEkoABgMAEZ256TNnB2omXHdkXMj9i/DztAW4XGkcS2RTnAhEjwSqCtWzFAjEVQMgZXIio
2o+dI24HwxBqB/egLURFqPmnMtJwBRiIkgO8t6aRd8AixJbEKVA5o+zciBwjYE0qDZmVEwOnMDai
CAGy2rjrrfDLNBos9ABUpogsMiEBOLA4kIx1CiFqwOKVRI7EZxu0GNKIWueiJQNDcjQjuWrzMBqj
I7ChZsid04QiKR7yn5q6LcDjbt09ckbegU93EntRhbOi2KsSyE4SYYEb1E3QTF+JlOVuUYTOEXqj
K/eMTHAMK9ijzHMfEPs2jgAdu9BoEOKgFqrXV8GzXmxuGIyqHWmkhkSHXBB5xyiGWgR4BSmIVbso
7jVTN2WuHsrTcj5e/JBiGOCrUKN21w3I1GxSneFQGgAoW7sQYg+PNeXbm0cn+xTN2XHMUkc1OciH
0nSNpZTnfloFyZlAGtCo3LsxpuSDXTRjsXnvpNqI4hmifEBmgRh0CRDjBiiY8IAwVRRVcKhWKxTo
VrEuCoyIDxo4xTsJ5uC0vUgJQdnfaqTAHuyoU5mB3puXHDKkSR9KlG5cFyMTxtkmtW5T34BUsgd6
47RA2iqYSba9CjDSRGOBOafM4JyeEYlaQ+pC8DlV0BJgRTSDioSuDE4KZgRXiCMZ8BG0YokyaBNA
FqnceBwEtqEbVuOulRmnlbEAPZ2rwuScExJmPZGxRtzGiZFZGlU4LhVrCH0lcVsdy8mzRsVTwjEr
yrQYZlPjLb1qlliOiM7zmcg9KMoXI3CeXmfCcQVIe6wHQdBYSqVTvKxCrIKlTtWohY9BnLJF/AaA
LUMD0Ma71RZKsgto3KOk6QcUYRrEh+w9F6UQ5FsMEI2oGdweKAGBQF2Btn3SG67EVyKfxQ2hWydi
AGK0RNTii+Z6QExwWjZRaZFmFF4lqfiwdAksRsTk6mwdTtDh1BgQhpYSHtKs6os5XDbPaVUCK4rp
HYnkTLvVIBbOipWa84x0wGZUpibCOQClGcD5UH1SluRly8h+GgQ+lR5aYMYQNbnYhZ5X/wDGgw26
tyhYlAwpwQGEircjKMhAubZyCfS85Csl5ZmImIqDRarU6Qi2oUquKRlLaaqhc7lSJbaqRYb1rMmi
VKQLNR1x4nBOcQXRjboSXkUDPOoTGchA+yEdM5OMlpHFCYcHYjOeEU8anegYcM41iUZgcJpdt7Dt
CfxWZVBGMTuUuXuFpYRuCj9qMZBPIBC3b8P1oSlHVM4BPdtsfZ1BawA9AfU6BnlgPkxBDg4heZYG
q0fHb/Io8zykhavRxbPdJaLvwOZjh7s15d4aLuWw9iMJjVE4grzeUOu3hKydm5ODpuCkoGhHQQQ4
OIKN/kiz+O0cD2Jof9vzcfFHBeXzUNccpiobejd5dp25VMMQ3Yjd5KWiQrKxLAo2OagIXBjGX2FG
cJedyo9g+KI3LVaL7YnEJpxEhvRlD4ln3HqAnga5g4/LWNQhJhCYwIH1omEhegTUEFwjC40JRwOR
7HTwIkNo6zegOuIk+LhH8OYh83IKOoEwh44yLntigTEwiSwkcHQbPr9q1NonlONCiXPMQ2ZsmkDb
nslToMbkRIEMhLl7xgRQBShztoXBg4GI2gofhLohMYxnR9yfmICIBYkfWmeNx8sUfJkYE1bIFauY
t6zHCUB9KezdOn2oyFQStF2EhIByQKNtTxLg9YExBO1cEuF+8PsTRJmQX1SoCNikZWzDSHL7NqeJ
BfMdBiOKS4YxAXFGJHYm5uEonbCoQ0XzXAkITsX43CfZzXFHvHWNGO0YokEasRIYupczZEpxtzIB
iHLx3Im9W5IsXot/QYzD7815liTwxZCF0abmBfMp9vTgn6x9JX5JzX8rf/YK5j/Vn+0euzvL3RVa
PLkZbAEdYlZhkwqVIxuThacsTiQtQuSMjiStbapnM9GwLjmBuQjZiZE4E0CNqxGJahkMB3rXzEhP
810bt4CUjhEYAJo24gDcmAYbv7BOSwRGrXMezGpQNqPlQOMphmUo3bsuZlI8UYj6lptWo8vabhOE
kJ3pSvXPeJ+paYRERu+QarkhAbSWXlcv8W4c/ZQ16OXtP4avII3LkBcuHF8PUmhERGwBul5EAbSt
NsG5LaPD60YQaopGL071q5q8AZsBCJeRO9SHI8sLdWN6ZFR+apXeeuSuDHQZHRFGHKx8y57kQX7y
p25y8i0WaQHEdqMiBOcsSRsTRDDYOh7sxHtRhyYlobhuiLiUu/JC/wA3dncvkcbFot7vYhCI0xFA
Bl1DEhwaEKdlvhT47R3HLuUJk/CucNwbjmgRUGoPR5f3vMS8FmFZHt2K1ekJTtydoQHBAjI7S6t3
eau6JRBa1GI0gGnrUbVuA0xzIck7eoDsRkSwzUr09Ny5mGfBXLdmBhpjHGmrsClEDiHhRtkbijr8
WQUdQGg0ZRJ8JrRONQfBkJScxfh3IxA48huzVqdyJ0MQe9XJxk4mQQHzUY27ZBfHIIzNezapGZLn
wnYo6DKU3aQIp3IyM9EhU7AowtjVbHin+RC5ZkJa6ghccSDudDy7ZMhg4ojy0IiV+ceJvZCieccE
0ETiI4upW+VjO5bJaLCjKfmyFm3bLGRq53KEfNN+cQwEjw03LzotbEPDpYI2mBf22YqJvyFwyrQu
z7UTbAhpr5mGCEyTMbHUokE3JZnYvxUZkSyiaknYhf5h3wjA4AFSlqlwGrB1USJGMmoj5Rdss0NY
OkmmxAxJjPEUdA3iImOJwdcMRbkfaGBTOz+FsChaMtFzYcCvLmGjLCeSMAI3Mg6MOZjG2RgnFQtJ
ANo5oyHgJ4e9DypmUPZO8IiXjjR1xEltpULc5iE4BiCrXK2LghbtS13ZdmxCELhloHgOEmV8xlrl
4Z28ghahKkq6cguIsdicVzRlLE9GHRQrFbVULFODVVAO9VJLYOqgE7SmjLSN1EdI1E1Y5oDDcE4G
KxYblUqU4HiAaXYhpmGydfEqBvoom1HTONTJfEuSluFAqg9roASOmJWuxLXHMDFOeJ6EFPLF8DvX
l3oCIlWLp8SR4UJ27euDVqjhGbVCIhN6sTsQlckZGOCMIEmU6DcmGWKaPjlgmGJxKEI0iMTtVAnK
c0ZeJcJclGM5UCbUBEbMVQajtKMpgCIp3oKMZuJRDEtsVuxb4hGVe1Tltl0NuWk0Zb0ZSHEcO1VK
0nEURBQCFqOJx7E6Y4hMckNmaEIiuarhmhOGBThPnmEaVfHom/8A0xTvUbluI13STOQFXdG4QBcg
RpOdaLUcBisCO5OC4TE16MOhjmvh5Yhaj4sgjI5p9p6NUu4dMrmWSErYckVCpFMAB3qswFxXT3Li
nI96ch+1UiFgFuVTiqOewIQjF5HJDzWgZYDEo82bnCBqAiFOVyJaOBJxROgR5eBrsYKNrlrjC2OM
DBD+nxgbkAwnPYo8v/T3iPbmyFiUdD+K5gZImJ8uMq6TjJSNyOkWpGM4bSoXYcNm9wShLASyIULp
iJaC8WxWrXlhmtBAns2ozMQCdq2AYoE5bFGVstViFomRoAcox0PHCqwEY9qMBx7GwWu7JnwC0gEw
FHWqoMqsvMOApEFASIidgVRhhLBVL6TlsWkDSM2QGJGfQb8cAPiR2hGHLEizQtvXmxpdGI2oG5UD
CWY3FeTAcJ2ZoSnHVM4BRFy00Igu6jqDg4d1VpgGHynzuW4LntRykvKvDRejlhIHchb5oG7ZFIXA
HI70JEm5ZykMR2oSiXBwK8yJ8u8MJhCxzUdJwFwYFOKjb0ahwXR4ZjHvX4f+oRBB8N32SvO5KYi9
dHsnsQjcj5F8LyeciA1IXhhuUYSHn2PexLIX+UkIzAwiW+pC3zcdEspOnBcHMIThwTGYz7V5d+LN
QXMinBcbR8uMJxDHMCoXm8tdDgeAihCEeahoehIenantzEuw9evX+JES7UTaiDEisHZjtCAkJAmk
SJUdccDOTsTHajpxFJROI9ARONfeFCnsNcgPYkS603R5MsGlg6cFwcwq1CFyI0XGZwKepPbmL9t6
xkHIRjL4MzUTAoJHJHVKF2EQ7xxLIaZgS900Kk8WMqEiiIEtY9nUcBsUpGJtGGNadqI4bsAHEhj2
JzLS2IlQpgDI+pMYkDbintyEt2fSxDjYUzkUYbGRmAZTzD0ClaoDtCJJd+qJRkQRgQUNd0mIyK4p
aJHag1Rkel+gzlhGqF/k/BMmM4YxaRxZXLtyIBkTJmbEv1LoOGglEwLTFQtNzijsKPOcxwWQHiDm
ykOWJjCJIDdq4iJDemuQ07wuCYO5U6relf5FzP8AK3/2CuY/1Z/tHoqQEwlqlsCMI25GQoviPbhs
AUj5koW3LVqyeM5GW0ozZ5EuSelpSD5DNAaCXwQj5Z0FnI7Vqi8bdKSPrWu5X80YKkBRUDdn9hD5
lyIOx3KH4SyJA+1I4Innb7v7EKkepNytiUQzC7IOfpQnzd4ykK6Y0C+HAA5yz+Q+WZ6rmOiNStP9
PsEQBaVyVPUpT566ZxlhbJdu9NZtRjvavU45BzhEYlPYgYxznL7AtV2UpkDHCIQjEm6TTRbjwjtW
kzjytl6RhHjPaVK7cjEyPinIOSdyla5O0fMjiZBogFS/GXZGBbTbB+tHyxU4yOJ6XuSEBtkWU7PJ
NOUAGkXIkTkF5/NXiZ3A07cQ0W2IQgGjGgHXlK3F79rit/aEQQxwVmXNTM74MoCA8RANFblylk8v
y0vHOUgJyG5C7dlK7zRczuknPJC3biIwGAHXloBLeIDEhQhZgXJANKAHaUOZ52QFwAtMlmopW+Vq
AS1w7HXnhgJHiAyT+0OjypHihgmOWCGsOQtds/EAYDBR/EeAUbdtRjy8yY+5LBEXZRAFGdyUWIq1
E8jofAvQo3JSAhGpKnY5aBNygMyMneiJRMXlbzj+RH8N5l6T1BGCJNjQBXVdLK9/UOYmbVuBeOgV
kRsdQlbtyjbfju3C8iMFHkOSAhMxAnMCkI7e0oWI0gBlihatA3buwF2K08xLy/diQwWmMgDAORkt
Qhq96T4I2bZIjLxIR8nWRjIn6lKVyEYmVIRHiQv34G5N+CL0iFruyNfZNDFSjaHiOKnPSJxl4gvN
8swBzAwRlFrlkHaxHapCPE+SBucuJgZstMLhskmtuWHYmuPO3lIeKKiTHXGPgmce9MSyEs9qEp4j
ZmibUywwio+YXBFRmpCY+GWNMkBCuZKJlQMxUmwfFODVabobUG1KUbUtU5AgNk6u35XDKV8v3ITu
3Ph3ZAC5LEFSlCAnchFoS2lCxMaAQTqGSjclc1RkWFEblmGuIxWi7AxkMkGPrWIVOnDooSFSXr6K
xKzCxWKFVipSiSNXiCxWKe2QdoOactEe6sUzp4HSc9iE7ZAOMu1RFwtAmpGKAJbSKHMKMwXMAwqy
HDFs3WphqarI6aPXoM8hQIylQCqMsSTQIE+KVT0kAamxZBgJSlWuSpEBYgdyqqBUr2IDJ8EBtKhG
EQzBW+aEWnIVHZmpEe8W6PUhLdVajgEYjwxp0AnA0KExmjckjI54dAGUqFCYzRnJGRz6DaljkiEJ
nElke3ouiHiNunajbjb+HbLEzoNW5RjzIEbT8OnB1Jtj+pRERpOaqO8I6garzSX2hPqqqFae9+gT
jSQ9RRjIMRiFE7ZJzSKiB4QadMZDF696MJU2diZ04xWEj3LhtSPcvKjEeZsJQlf0w1YZo815oEAC
aDYja1SAAd0OWswM8Kl81E2IgXy1Hqjc50iL1BOKn/UZXXsxJIKGuJhCApI5r8JyoH4eAAlI5hQs
8nA24U8yQGKNg8NsuTI4sV5kp6AaBsSEYgOAHfarNAISlpIZSlJmiHB7FrHMQiY0nEliCFzN2xHz
eUumIEcHMQ2oK3y9q35NsS1Y1J3qOuTzG0uStU5EPkFqnb1VYkoC3ABsDmiZSNcQjFtTjMZoym8t
gWuDBstq8y6HLswQl72AVRwtRPMhjhsoiwDxyQahTRkQBkm1vKFO9G3IEOMQjsTRHf0GUi0RiUbd
o6bI/wCZCRHCVr9jMbFKVgt78Nu9Q5gtcgzn81C5ZYkUY5IXGAnQSp3oGZwwHysTjwXRhMI8tzUd
MsBLKS12ifKzG7engG2xfhPYqFpDGOYRhMOCjKB82ycY5ha7Z7Y5jo0XIiUTtWqy97ls4HxR7ETF
iSK+9EqTgX7BoYHEb1qsHzLOE7UjxRR5jk56LmJhkTsK8nnrYt3MpEMPWgLYErZqKuCFpJELg9kn
HsRjIODtWrlzrh7Vs/YmiWmMYGhHy8icRIHFwieXn5RJfvRjzEBOMcJDGSEozAJxiSxHpmmAQjLl
yYzOIJoURdtzg/70bd7IGyfNt+1Elz3LjjKAdnODrXI8KeB1FExA07Fp5iHeF5touDiMx0mN2IO/
NPyc9Uf+nNCHNx8mZ24LVAiUTgR0ETiJA7kZ8nKsTq0nH1qUbtuMLgixlgSVSWq3EtoOYTXh5R24
hARuQkTgEdBMCS9NqFhxKRGIFaoXbhM5nGMqAFsEYeUbUgH1xNO1a4HzbYqJxxHamvUPvD7U8S8T
gR0A3piAOCMbB4du1E5n0OmfFDYVqtS4s4nFPKgG1aROL7HRkcAMVCzZlKHNOBc1eHCv0qAugGJI
OqKF6BeGkP60PiAPtKYXAU4kFOL+KBiEXq+xRjzAeOwYr8Lyl427TafLK+IGfPpoSFSZI2GqAux7
wnhMVyVPmHmv5S/+wVzVqECTG9ci/ZIo63jHYAyMpzIi+C1OXRIiNRxPS85gIRgDMnA5KMIQB1B6
VQIkY4anKErkySGw2pzHURg9Uwp/YXVdmIjehDl4m7I0cYInm7sOWtPQA1ZG5Yt+ddiOGUgWPYoy
uzFiP/TiE+nXI1Jlt7Eww+QarsxCIzJZRjy8TelMsCBwqYuHyIS8BiWYdiE7gN26zGckIwAjEYAU
6XJYbUI23uTNGiKDtXxZQsWyWEQeKSItWvMnHxSMgSf0UJiExEmpmW0jchLmbspmJcAFgFpjKOp2
EIsZEoxsW42of9SRx7EZcx8WUqsSTEdi0wAiN3S5NFOHJwHMXIFpB8zsGalc5+cNN0B7Yc6AMghC
3AREQwYege5MRG8qXlgXTGJkS/D+i+1S5yVk2LfMSMrYA4d7Lmo3rUZ3ICEoGQdg5BTCgGA9EbVm
JuXZEiLChIFaqFy6R5lt3g7iT4epXeYN55EgmBYAB8B60bcsJKUJZFPkVG4MjVRnE0IdAjArYjZm
Cbewj7VrmabVrIYnAhODTejbukTBwgKl1LlrxMbVCYnFsgvMNsyuSDGZY03I3LYIsyrowq2SMHMd
oUoyGqEq9hQv8wTGxjC3hqCEIxEREMwogLZNy7cpah9p3I3L8+OXFOWcjuRtcnE2rL8Vw7FGcb5E
xUlkLcbpuSFbk5M0R+VSMjqiSwktELlJeII8PEQwJqvi8UpeGOZKNzmyZXpCkTQRG5G5bgL1uWD+
ILzbtvTNmrRThcGmMMJbUfNIIPhYI2hQnA71cjzTGzdIDxKlM3BcskPHapcLWxi6Bh4xV1ISgZEY
nFTFuJs3MYkZqEL1riwMl5cxpicJIxHxLW5fBiNI96hRJHl6amQq4CeZ4DRygY8MBRxmiLXDM+Le
gSH3OiI0OxOKnJRga0YsowuPGURsULdgtC1LV+kVA34ucNJ3Lz7UBGcuHY7oebIgxHCMgp8vbHGZ
PLep8zG40z4YjJC3cgJXYRqczJQt3LRELkmpkEeYhck4Yacao2YFpxDtLYgOYAaWFVqhbMgMTGqa
TxIxcKklQjq4KjqhK8ZXiWIKwC8KrErArNYlYp9S8SpJVkqyohGM6yoojchaicKyXmyHCMOlge1S
izQOaYR1PiU8O8KqbxSTSPchpqdiB6ADZN2ERSQ2KGh2jQxyAVdp6CpRziWTDxGgVelpYiiFqPae
oHxjRC1HKp6YyGWKjMYFCIyIR7egk4eX9qteU1AdXa60SIM5SGnbRS7CtRLALhl60xFVwlnx2J9I
fan0BRJJMZM23sTjoE4BrgHrUIYEY+tC3DAYlDc6dalN/ZIKjduw1SIxVLUVS3EdyeIjpwYDNG5e
OkGgR5yEyYGRIOxW+XMCIQJHmHNDk+UgLmoccsmWuxBualHADCSne5qJvEhwWwKN+LyIk8YA0irN
jmhpBI0AeElGMJaoCptiiF2cayGGQXlmA4Q8VGTMIkFlrMhpZeXfOgxwO0KE+WDWgGeQ8SBmRIjA
Dajy1uZJNJF6DofwxGMk8Y6jlJE3i8wHEVIxYXAHD5qIhifG6xWKx6WNQg5oMlECjYkZpoonNYqh
WuQqallrAarV6dUiwGaNi3S2DU7UyMTSIxQtRLh2CEtOvUGLV0oT5yzr/p97hmcRB815fJ3tFqbT
t3CaaTVR5C9HTzQkIHTWMjtf5bpmOw5gryeZ4rUqRuflRu2OO3KsoY94Qu2agYn2o9oWm6RGe3Ir
aChd5ci3cGQoD6l5N+JhcFDLInpN/lD5V7EgeGS8nmY+VfGRwl2IXIHyrsfaGB7U5AjIN8QO0u5e
TfYXMWf6kdHx7BxhIOR2LzLfE2Ns+OJTz47Qo/tDtWq3ISG5agTbuDCcaFCHOB44C6BTvQlEuDgR
8wa48E8CQKFcMhdtvmC4UozIhOOIyO8JwXG0elMZBwaEIGzclalEvHYpfjbcZMWBFH3oWwTBjRzR
kTCQkAHLFV6NVs0zBwKAfRdzic+zqabsBMb0Jcnc0j/pywXl83A25+8PCVqgdQOY6PxdkcBLncUb
jAE4th0AxLEZqML7icQ2ravOiKag3YFEsCJRDo6C74CWRVzXbAIDvlTMKVyxIW7gyjgT2Ly7w0nM
SwIT2w90+zkN613ZO2AyHRv6KJunDqCUJGJGYQhO4SAn1F1Iay2ADrzboEpywiVEXLERHMjJCNoA
W5RFY7U+qTrVrkdyzPapgRrpLK7K48eXhw6tsgUZxn5lmReMhiNyjC5I27ZfjO0ZL8KCJRtltW1q
dGtnG0KnS8SxCAkdQ3oCfBLengdQ3fL+a/lOY/YK5kiIc3Zkn/Eeh1xTA715do65nYhbtwBkQ4Aq
gbh0jOOCiZnTEBiBmUGi7bVQf2GeRAG0rSCZzyEQhK0fIiT7Qy7VLzbk+avEMYRwQjysRy1o+ImL
EdibmLkr9XIlg6EYREYjAD5D5Zlque5GpWn+n2TGOppTnu3o3OcumQkAPLBf6UI2bcYNm1eo92Yi
E3J2n2znQIS53mdU3pCBoDvU7XI2rflxqZVee51K5zggBMUhU6Vq0hx7cqlabAN2WDRBaPaj510W
LWQiOIoyFsSuHGZFUwDDqNaty5iT6Row1bHRPPx8i0TqiIyOoDYwXwLUYn3mr6/QPenp25spx5SE
hGI4bko0ke/JT8u7LmucnHSIWoPAd6hGc5W7EBU3Q1TjpiF5XMXrl26G03CfC3uxwRhZGq5J9d0j
iI2ehlGZMpQbWIh9OrB0ZXns2jJ4gGujYRtWqzARODq4fMlAQcRiC2BIqjatkmRbiJLAjahavkSu
ADUYuz7l58BxQ8XYtJ6Pw0jXGKZAW2EvaJx7kIyjOMvzc1DlZgtLMZNtQE58IwkVH8JExhgbpGaE
zbFy/ibkq12hGZrKXiKAJoE5IYYnYhoDnAbSmKlyt2QM4AG1tO1DlbAHm+1LeVG5dnGNy6GBcYbk
/N3DdMWOl0I2bcbdsYf+gnu3Tp2RoFpgBKZyFStFw6YZWoB5HtK1zs+WIjhD1beuEeZMlo2zipnm
6XpeGYwjuCiLkYX7OU8JAICIoMulmboMJh4nEIW4UjHB0fLJjZI7qIcIjdEcMpBsV8QmMzQscE1q
XnW8nxCibrMcQcYpgdX6S1A6aVZAyIkN9ETb8JxQbhALtk6AEg8fZUpMBWlHTSo9HAWoVAxyWqPi
GAKNwz+JKpi9HQMA5lQ7FxBiFplHVHJRvANCzISMdoXnnmYCLOQTX1Lz3HkXA0PsWuUgI4kq5cAo
JMOxGcwBoDvsQEpfDNQ2aM4h/MoJHFQuCIMYCjnF1K0YtMyJkDVXL4jIRlLhOSY2wbkYY72Vu3Mc
FyVWyCN+yZGQIAj2qVpzb0By6jbuS1GQcMMkL1qAMDmjauRAnEsQ61mzIR2smq+xPJx2hUkF4gsQ
sQmzVVTodujBUCeQYpgKK3fh7JaQCFw4CLoP7Rc9iEI0A6HxOQXFhiSsOAYBMO1b1rJeRwQk+BQm
M0ZHIUTq3GXhMgCgABpFGUjaiIynHVIDagcj0HtVyfsuyjMYCnUBlQyqhdHYelhiUAcTj2oXRgce
pESxyU9WOtHt+zolAUe2pQtT8uzCnmbe5C7en50cDPYpdhUmyD9HEHAzzVC4TAJjUoGBcCrJwG94
Jxh0GVukhj2IKctgb1pzgE+WXcpwxiTghpsy0xDPlRa/KAiaO6eFuLdqkRpEY7AoWrhNyT8MTg6P
HRnMI0QuaeEkspQlANACQG19q8wRFsWyCW2OyAhAaCNi5rlgQNU9UH+kKE4gTuRmDADdintwl5pD
aCM0YXRqckwBxAXmOxyKaUiya3LSAHY5rSfhyGJOaELkvMYEMvLs/emr7FqxOaDDHJRgW1e13oDV
hggQDI4KgZVmQqknow6MlksuiixVCyxfoqnBWqRYZry7bxtj6U5RkcAtMajILQ2q9LZkhZvx03DS
EspA09anr+Ly86TtkOQ4qQrd61cNzlZkiySaw/NVu9APdEgYvtR5i9biNPissxbaCo3I+GYBHf8A
LDGYcHIrVH4nLnGOcV+J5OTTOIyluKkI/C5kYxyJXlc4DoHhm2XahKBeJzR1RGrKWYXxXu2TTUC5
BWuBcZjMdGm4KjwyGIXl8wPNsezdGIG9CQacCKZrzOXIjOPEItiV5PMRFMDgR615kJaJmuqOfavi
gROGpuE9q1cvMWbw9j2ZLyr48u5vwl2LTICQORWvlpHSaytGo7loYwuDGJ+YdMgAciBVeZZnEgBv
LqAUI8xEQc6Q2R3rhkJdhf0pjJiinBYoRnbiz1IoVI2zFhUAliFxBjiE4LEbMV+G5iVW4JH6uq04
iXatfLXTbcuY5IW+Ytyd2ExgUYFpQnQhGj2j4ZfZ06slESIOuIkCN6tm4DKBq4yC+HJzsVVO7LhG
kglR8skxiG1SoU+roqqFVFFQgHYnZBujB1gqLDqDNi7J5GqxohKMmIwKFi9U5S6DLJ2RhhqBD9qj
bhciGDyL1JzKtc9cvC5yxLmMTkzow5aemwJcJFCE8pajtKA3rypz4pbcE7MciFwtJeEqgZUCwXDI
hk10CQXE8ShpmFSQ9apVOaBMCH2fJGTdPNfyt/8AYK5m1aGuYuzBGxpFG1btiUhQsgboAh7oLIGY
0wHshcMA+1OIh9v9h3RM7gJGQqUBy0Ax9o5Iy53mtZP7uFUI8py4YCl2VShLm7pJxaKe3AajjI1P
yEzuyEIjElRhYtyuyueA5KYvnyBLwkFtI7EJSj5txqzkhGAEYjIU6XJYbSuF7snYCFap5EctazOb
IQ4uYmT4iCYumtE2oiXDTSGR/Ey82U/FQMhEREaUhEVQ/CW5aHaUyK9yEuZuzMgXiAcFpthnxOZ6
pgD5l1qQioyuTnytn2oijjch5USZe9IuX2+gLiVwROmRgHY7EWkeStuNEpEDhzdStcr5v9Qv3C05
SlwSI2jYtX9Tv6LP/wDr2iw7KLRy9sQH0+i1Xp6WGpsSw3KZ5OMrNiQAtXTQk5y7FqINy6WM5zLm
UtvSTMEGfi00dG3ZiICrNtUYkSvwiTGUWpFzipW5RpWMgVK3ljE7k6jciWIULgPiFRvTp8Ajy/Kx
ldvMQGwdedzz3AaiAwitEANHup4HSdhwRN2TAYCOJXFEEnwxxkV8KAjEYnCIH2oyty8y4RUyx7ke
biGhItKOcSo3rRacC4Klz9uMQSdOgGtArc+ckdNoNCL7F5l2LyGCa1MjYDULyRwyq7UdCRkYQ9yI
aRO8prVB9KFuBM78sIDLeVKd74l2545fYEWADoQnJ7IrFisAAqkBUmCdgTGYXjCYyCpIIkkEZqM9
QEoUidye3LTE7ChGR1HasFqNdyYFAvTMIiOCqX7UAGL5DFGYnprp0mq1RaEtu1W4Qlqn7TbVogeN
vEVrajs5wKeIebVG9RjIF5B+zolBn1BmVyODSLBWznGh7kKkjY6eADHGJRhKIjaliAalRtzOgQoH
VuNiem1AvKW1CUTRkAAPLEan85HUKNVXBKJ0RLMc15wgISth3AQMy9t3pmpXLfhmwLoXZkRLMAz0
UeXEQNOzN1K+YDilqNeJStQFTHTEDFW7lwSjF3lqqCoW7HDMmukVZXJc0NYBaIOKEeViIQEXpmVC
fMgm7KpYqVuEGta9I7HU78DKdyIoN6Fq7E24kPqOxW/JlK6Z7MFK9dmbYEtLHapWDKRlGhULvm6d
QdiNqrfZ9y+/x3L776F9/wBlEIXoebMjilLapctbpbAEuwIStlw2Kqqd5QEatiUwzxUW8Ui6jMbK
oDatIwhToMDl9SFsYs5R2MgI+ImjbU5jGcgKAmquXbwMdLiTqI3dBH5yYYlGB7EYnI9DnwxqhEZI
wljgjE5dHmSHCMO1aRgEYHFkYnEJ058MalRiMM1rFNUg/qUu37OjVkLdfWhGBGqMjqHep25EGU2E
RvdHsP1I7wjHbh3qlCaBCMjwk1WkGmSdF6b0/tRw3oPnkqdDDA4It7RQtwwzKGwCqETgSSUIFxA5
ZKdu3U4sVDTI+VcdxsKeJcEsVC+Y4YtsKMjcBk1IDEqVkwFyBJMQaEOvxMWc8MreWlHl4xEIHxNi
ULMbhMcAZZKXmS+IcJb0BdBncPhGLoSha0xkWDBCXMHjIw2KpYJmcKBtl/eCc0ZEQrcnhuRnMvKW
aYZp9PFt2KslUOtiqUzVOCFrCRwUoCR4cwFcnxNDDejdIOp2EXUJ6PiSxrQK2IQDnxSeihCEBoOM
icVoMBG0M3xorgNsxtxdq1KuXLglbtxw2oynMxDsBiUNMoyB7kCR6qpxmqKsiY7EyeVAKoW4eFCM
I6rsvoXmXANIHaohgDjE7JDAryLoEbkcNkhtUhEPA8UrYyPvRQ5wAzsCRGoUIbB1cly8otaiIyiP
Gd4ULMom3dtjSYnNvltQje5btlbyPYtUXhfj3EFfhufiHwjLKSFzliZ2/agS6pwyziUxDjMFeZyZ
0H2oPQoWrr274pKJoH6GkHByKN7kzqg7ytHZuTR4bgxgcVon3EYhebZJv2ieOJxCIwJpK3LELXyx
fB4EbNi/D83FjGgJ4SPWtUCeY5bZjIBarRdsQaEJyGnlIUIQjfGuGUxl2oSiXifmHiiC+LhGXLyE
HqHdwULd6AJAcyBYHsWuB7RmPRFeYXKJ6QoiQEwMBJF4+XPJvD2IStsXrwlyhy/MkmJpGRxB39Zp
BxvWuzIwkC+n2VLl+Zs64EHiUoGgBQFO9eUaNsqrd27gQ0TuZRs3IGRjmMGRFq1OEncGLYo+XbMh
kSKo2pyrmNieRQbLFU6jx4SNil55iZeHScwv+2AaWRNXRt3omBNQ+wqrMtqwdYIhujDqicC0hgU/
mlPK5L1rS7yw1Ljm42IW4zlpyi9EdfCJBwiNifGSJFCENXijT1Doqqh1UMdyofWiQAexMQ3Q4WJC
YSKiDDWBjEqV6UdAOEfk9VzX8pf/AGCuZuAcUrtwk7zIrUBU4n+xL3JiPajCxAmQwJBYqMuavizF
6W4ipUvw9k3r2DyDD1ISvyFqHuDD6E4gJzOMpJogAbB8hlGtycA84xqw3qJ5OxK3YfinIgSkPzUb
vO3JTifDZ1OB2oeTbEcnz6j3ZiI3puTt6vzpUC1/1DmBECukFu5fh/6VypncZ/MlgxzQnz/MyEcZ
W4mncoiIfTgZVREeOUQ5AITxh5NmQpxNLtWq6Tdu5zkSVpiGAyHVlqmJTiH0RqVLRL8PysgGLESq
hjduANrnU93XJkQBGpUIWLZuG44gXAwxdC5/UL/4eAJMiJYjYAER/SOXN28SNMpAmJL1KF3+qX/M
zNuFB2JrFuNvLhFfRa7stI/IjD+nQlK2AQb7U1bnXm89M3+YmOOvC3ushGIAAoAMB1HGSEjHVOVI
xU4i1GE6lX/xECJiT6xFokHBGQ8cKg7Vpl39HkyPDLDtWOueUQoyvnybAqIA1PanswEXxOZWOOSe
PAdy+8DLyeTl5l00eIWvm5HXLCH5ShKUhpMdMYRDALVcIHacKq9ADTy9yhMsGC225E6J5SaieYMr
cqSD/SnlpNoikgXK4Qd6BiWyIC1CWqUa9incZo5Devw9gC5zBx2R3lSMiZXZ+Oe1PB5TajoG5w3C
PUq3H7VSbOmNwuNijene1ai0YxO3agZzxwGZVH3leMoTjOhwXDc7iozlb1NickJWhKJbDJEkay9G
TiAAXFAttCxYnF04k4VF5RdzQstUJPcILOp6SJh6jeqnVDOKFyHatRJdmohbMj5YL6d64qjDsQJD
ADDpMxhc4grlgn84BNsQlpeUjwha+YnxDCAyQMWMPdCEBnkF4jEbAcUCKbCtMpEjYnZwcQjbjFoy
8S8qY0gYMo2rYOiJ1GW1kJagGFQoSdrUaVwO9aiRp2q9qZvY7FOcshTtULhiNUw5UZRg5k7gblG7
KIlKYck1xULVsaTdpEDJPKDzxMt6ly8qxthyNuxECAjRgQMFqu1twJjEDNk8ZabYlqIO1Az5WEhr
06jsfEoNOIAGDhDji3aF95H1hfeR9apci43rUbcpMPEMCr2iDXJDE5IWyXkMe1ajimHhGPatyA2o
gYRoFKBxGClM+yEScTXolP2WZG5lpZT7lblPwutT96v+URTFRG7oD+8jsFAmyNELowNCmGJW8/Wn
Oa3HFC7HvQiMShEYsnW4oXBgcUIjErfn2p9qjtEmUu0fV0aSaTtn61K4Lxt8vEtTEnYjzELkrmis
hLFtqPYfqUiMQHULoxaqLHhjQdEZ5ihTBWxtxW4F1TwrRGsduzoZC2Njv2qN3SBE1DnFTjdjpkKM
tVsAyGR2IgxGvLYqnRKHiIzQz05rV5jwJwZGZBbIFG9IVyCJAeSEjjLxbk8ayOLpvpWwjCS8y6dZ
GBQkA8tpyVCpXJFhHajIw4BtUrhkZaiSBkmhxXZeGOxGd0kk4lCMaoUrtVSy0RDy2KVaxyFUNJp7
T5K1KzNxLFhUq0YQlKMQHJovOjbeO8VOSmIQ1Sn7IGGwq5AQJMvETRuxSjEERJqTjRRsl2icMyoD
U4FNAKt6iKChBoGXm3bzSjg2ClKMvMuD2iWixV0ThKVCDu7EYHIsQaFGDKN2ReJ8QxoUD4ZN4gnA
8yO7FEGh2FHV3IQgKKNu2NV2WKEnZ6PHJ6IGUjqDDVto6eRcqvDIeGQxBXk3+G6PCcpjaFdFqLxu
hrtrI7xvUeb5Q+Zy8qT2xOyS/E8pwXf3tvMHaELcyPOAq2fy7zLJ0Xhnke1GxzMdN0bc94Wi9xWM
Iz2dq86xLTM1BGBQtczHy5YCRwKpXenmGnlMUNMEI8287WHmgYbFqgdUTmOgXLR8q/GonGj9q/D8
6NE/ZuezJbQULkD5V6OE4/ahy/PwYYC9HA71qcScMJxZ2KIA86wDQVdkL/Kz8u8MdJ+gryuZg0sp
jAhYOCjcsFjiYHBaZjy57Dn8sp1mkARvDoStAW5g1agl2sibkPNsfmlzFCL6JkOIyzHVbqStnHIq
UJBiCx6QPaGSoFUISiSCMwj5lsSkfawK12pSnpDCBD1CbmHszFJCWDoRlN3zjUKOi9EmWAfpM5mg
+lSuWzGxy+Gq4dLrT+Ms3LmwH7UYEBjhLF+woRuxEoz4a70LsI6TGWWDFT862JTicdye3bAJ71KM
ZDziGjEZIzkXkS5PQBLwyoU4qDgqr7OlwhXXEF2koG9YAEQAWNVpsnTqNAdihagCZSbSomZJuMNU
SFxPHtXjimjXsqiyoEZEYdYFVwWq7EPGg2lHSGHshGWSpRPmjG5JiTRPCQkN3WII4xgUYyyQhAUz
KAIBO9ViFwgD5TzX8pf/AGCuY/1J/tH+w7yIA3rVagb1WeODrVcHkWiMX0lR80z5kk8IydGMLYsW
yGiQWKE78jduDMlNbiIjd8h1XZaQaDeVK1y9m5cvs4izDtUTdlO1J+MuwbYAtQiTI+Ikk6u1CMQ0
RgB0uaBBnuElgIVqnB/DQzJxZDXd/E3SaQcGqMbFqFiyPaOY2BG9zYF2ZwjXSPWtIMLQ2UC/7aAu
B2cnHsT35APkH4exARgHHtHHrDXMG5KkYA1JUoW4xs2cNYP2rzbkRdvEMZEU9Sph1pXJ3InT7IId
1Oxy1sRLP5zvGL/ahe/qNwOA3lxJlrO9af6Ry3lxLjzSM9yjc/qF6V66ayj7PYhC3ERjEMAA3otU
yIxGZQhygNwktOcQ+j+9DmP6hcl5gJ8u3GmmJ27yELVqIjCOAHXiYFpwdnwLo3+ZkCAGjAVer1Vz
l4cuJPDhOXao3IHhkHXmx8E/rVMQuKshghcYSujEmvQDgRiE4zRnzEm2A4nsTRH4blPe9qS8vl7Q
Es7kqyO9SnclrcjSNiE4YAOUb8iZE+GJwBR5a3LTZi3nTGf5oXlxB1RHwzgxUrcw0oliF+EuXDEf
u/yIx8xt+BWmV4kDHajG2DKRxltR5blQHIaUxhDv2oG0DIn7yR8RO1GBcSFGKDOzVdBiSEcUTCJJ
GAK8AgMyVpi7Rq+0rVEMdpWicSBtFQozuWjOD5JrcpyicYkNpXBJ3wBojExcZppBmyQY12Jjwhdi
qAVqtkjc6aUixzT6y/YjCy+sisjiylbrSlUyeeTt0REamWSAdy7lbOm1eGMKKBOBOk96pmgx4gOE
KoYZleOu5FojTtzQfPp1zLBao2jIZF1NrsgZHhiDQBEiU/MHtEuGTwlhWTlNOMZPUEoQuwcYUNEL
kOA4ghNdmZjIZIRgxGwoykRKRDAHBaLgjJsNyF2cNWnwtknNuQnsZHmRHxYjcjCza0yIYyJwXk8w
CH4okB8ULRjqgK12rVdsxBkaMKpoWPhZTdEyjpYUarlCUgzsH7V5dDJnxVsSgGd+1MANIGGSvXRB
4mdAvNNNdWQizAYFOcNqBehUrmwfSiTiUNhoUIjAmvQ21RgPEalE50BUzvCFu3jiTsCl+GvmUgPu
8AVcvcxwyNNKD7Ain7eh0YnHBEyFIfWmGAQ6DCXYpTn7NAjsGHSYHFGcvZoEwwHR/iCl2j6uiBOV
uSNqREJCRIfMFXLcJic7gMQAXxTblOIxYshbBYkY7yjE4g16J2jgahVwimBpHBQubRVGJxxCLDFM
KHNSiMWV2F0mRjSO4BWZ5iABGwiigIEGUfEy0ONMvWgTIyL8MRmUJEaI57SqxJ3lOQCNi0sCpafH
7KJkGJLuvLqLhqChUbGWm5HHAhPNvLlgscckRMCKmICs3AlsWmYMoQFd5UsoSwjuQBxNOxTMpeYS
aFU+heZOmwIGGBoCrcSSAwMmqpXIwOkuIkoueI/WiSKDFswrF0/GhNjuqFETtxaPhogZCMRgDQKc
BOInTZVTh5g1RxRBm0o5MhchA6gXMVFhKvipgo+W9yPtkZIgRlpbFlIXfhseF8wpTFwBsQcVO5bA
HETA7lKUxouihiNqlCcdVo0O0IwBeVujHEjJaj3BVGmfvBASDxOEghorOWO5DmZUhbNN6jgZZ9iA
FBu6dM+6QxB3IQ5kvHCN3I9quS5cAm5EiVs+GVMQpHSbfMvwyy3gqVy5bao03YDwnfuQjcIF4Yx2
7x8uY0kPDIYhfhucAMTSNzKXatdr4nLnGGcexOeKP0hOAb/Kf80AvMsy1DMZhGEg8TiCvM5STxPj
tyr6kxIjcwMDQ9Gi7HVFPW9ymzGUQtdqWoZjMI27o1RK83lZCVkV0bt60XCLc9h8JR5jk5m3eFQB
4ZLyOfgLN3Di8MuxeZy58y3jofLctL6bgxiTVaZiuUhiEIzHm2cpDxBaoESG75iqqw0yGEo0IT2Z
edEHCR9lGN2Jtzj4nFAniXBwI655i0OJuKKMSGIyWuRYYIaZttTNXanCIPREPwTIEx9qIPtDHIqV
sZFlTEKlwyGyVULrNIUkBtVmBNDJ5DsRsCRFqPDGAoGCuXpvE6SYBGBOuGcJVCe1SYxtnHtiVcsX
ZSJAoDjTIqduZbVh2ryrJ1XjifdRnMmUjiT1PLuQM7Q9sDBa4ESiaghYLBEALDpdC5zxkbelonFi
+KE+XuiQwovM5yYJlQDMKcbEtVsHhO5W7w4hAuYnAhW+aiY+TdAmIxNK1ZObfCpQEGkdqY9Xy54b
U0alMCgbktRyCYCgyVFToeMjFk10ahtzTCWk7CqFxu6nFEErhDKiZ6/I69fmv5W/+wVzH+pP9o/2
Ec0G9aYyE5+6CjGxZFuIobkjRA8zd1TArbhIklAcjy4hF2Mp/WtfNXTJ/YBovhwAbPH5C5wWqczI
YPEOH2KXk2blqyR8OQFSd5Kjd5+9O7eFQNVIo+WKnGRqT1HuzEdxNUI8pZ1v7UqAIT/qPMRtxd2B
buRtf0qx51yPtEFkLnPzFmA8NqH2oyhbBnLxTIRjrBkMIjPsQFkCMZULxLxHegb0vNY6qjNcMRHs
HWMSddwB9EalR8mx5XLZ6yxIWu7EXbpqZSqB2OgIhgMAOrqnIRiMSSwUbVqYu3pnCJcDeWTHRy9k
SejvMD7FO7ea9eLCQAdkbXI2vw8Hxwp2oT5yZvXAXYmgdCFuIjEYAeiclgMSVosS866SxEAZaRtL
IXOavG3GJe1CAahzktFsMHcnMnafRMSHiKhRF74hjsDjsKsWLNryYtLiZosEeWsjWJGh3hG2QxjQ
p8ihE+GWKfFarkhGO0ryuRtm5M0Ezh3I81/Upm5dNYwyCgwDBwGyRMA5GKJlFzkE0YSqzg0wUbHL
HjNZz9yO/ehC1QxBIf2n2rVcNNgX4u1EiY+8DYjahKJYguCo3Lja4cM/sUSC0Q9StHLg/hsLl0Rx
OYijbiGgzuaF96mZPGI8AzJVeLtUYzAEpMA29GMRVRgYsKuRkyLOBk6eZfcqBuoxAPd01APajIOC
Mgo3PE7AvsWuEWi7BwqxG9a4ggHFqqBu01VO1nUpCUn9lghdsHTIOx3J7oe8fEQjIyZveWmETpGD
Lwk9yMpxZhRUkx2J37lVUVy0Q/CW7QnwMZfUrdwHxRFd6BMjIZnNGMmlGYcdgQ8uOsmjAsEdVqVo
jMmhRFwYZpwXqiFCVp9MTxAI2b1RLwvkVKVqLQEiNRLCmxf91dAiMhVPbuAWzlmhK7cMhHCIom8p
jvqtMaAYKqotWxEqoZEZppSJDvpTRiAN4QkaGIYb1GA1cZ8Q3oeVEyuYyMsG7U0xphsGYRjJhDKQ
KAm5IUZD4YiaEZhQMJHUKAI8QNMM1bhchq4qgKlNyi9dgQiKOHZDbGijbGMqnoGyNSpEezVFaj4Y
1RKIyJRO0oiZAEwwO9SuzkBEB1dMM5YdpQbBHsR7D0tkUSMSnQ6A2dCqZ9QNmiRnRPt6D+kFP9If
V0WwRjGS/FXZSEJl42wWHajzPLE6Y+OJrTb0d0gqeGKFweGY+nojLfVSmM0+3FSt5wqEGVcUCTTN
axgzqRiYiZxbFDTcMBIOG3oQlUxq/aViQUCZORg68T9BcOCicSTUp04zXEHWzepQkdTkkHNcEyAM
AcHXFOu5HWdROa0woFinBeWQTyjqkXcHJExDuU8g834YqQALZOoicZHyzUtQofiIiUyOBCRtx1DC
ism00Z3HEgKbGRF81OAClypk1iUnjLODqIu3pSAqKrimZiOZyT4lVx2ozhN32oPL1LREmMjjJEAk
6sSVogXfEqi1XJCIRjy4p7xRlMuStQ7xuQ5mxKmbZIQ5nhOGv8qAtS8yRwZTlKIndj4Y7ELxhKGo
0AFFAGb8vPiludEEvm/VMZAGJxBT2PiWs7RxH6JUuZsS037AMpWzQyA3ICAlHmhXUPDL82QUDqbm
BWI90+6dxTmkx4o/LjCYcFaZcfLnA5xX4jlCGIeUcpLDTIeKJXn8lLyrrvKPsy7kbN+PlXY4g4Hs
6NYOi6MJhCxzYAGEbowO8pwXBzCIOBXn8mTC57UBhLuWi98O8KaZUdMahSuWIgSdzH8iYcdtnlCX
ij2LTNifdNJxKEQPO5V6H2ohC5AgXd3iHatPMDXbGFwfatUCJRKNzlzpnjp9koQvcFzfgez5kPmQ
EnxOCB5W5Ly84PUdi0c5blAvwzahTwk7ZZ9Y3+XHH7UUYXYvF6xNKp7U9Mh7JxXlzckYST9Iv3pi
7cmOG3E4dq0RuSjAYRByWi7W7Eu5zCMo0By3okGuS+FKUQcWKGq7xM2qSlC6XMOItgaoQiGiKK5H
ZIj6U4oVG1fDvSNweIdu1C9GPDLw3YiiJuOZGrnpYVJX4nmjotRxiaYKVjkYCMPC7Yo3wWtkuYn8
iEb0HBxkFrtESHRgsOio6MU0qjoha1CGs6dUsA+1WrM+YjcnZgBERqKBkQQyoHT4Ii4CNkkSJYKn
S2aM5muQRlIEsC3arty5GjkxKYQMh2JhEuciFWKoE4VAuGUgyYl+1cQBCrbX3apAKh09ii8yK5oH
5TzX8pf/AGCuY/1J/tH+wT3JgbhUoQ5Wxr1YSJwR/H3owfIEuPUtHKWjOQFLswS6/wC4veVbekIB
itZjrmzapJhhs+QvdmItlmhb5GErxfiuCJMYhEXr8rXLbG0yl3KIOq4IViJlwD2KmHS5oNq0Amcz
hGIQ0EcvZaspUKEr9wcxdFQSaBC3/TrQtRdjJqN2oT/qN43pYmAwTW4RtQGwIxsRlOYx4cFIXLhh
aO4AoTPHMBgTl12JNyZoIxrXtUogHlrBwkKUQu3Pi3WA1SwpuTDDqveuRh2lStchHzJAcMyDpkdy
jf5/mIxiP3URwgHa68vl7cbkwCQQHc/pIxhHyrchjgBVar3xZFqHBwmiAAMh6NxIXJksIRL+tkRc
mLHLxLxMAXn+tkFIwA1zrKTAP6PmBy9+EOZtxcAkEg9incu3Zyu3TpvQk4GlQt8oPMvE6jEDACi/
Ff1o+XaLeXF6xrm20K7+CAFuEtILMV+JiN0lpzyQkMQtIMYCI8Zqe4IEkyGcpfkQMRxbU79iBgAB
mSUZQDPsXEDEoRj8S/OluP2lGFwk3ZMZzahKGiOonBR1DizCYgMcu1a7Y+BcLx3HYrtrmC1u4H7D
FC5feHLisLRNZdqFuzLy4DIbFGMpu2MiA64jVsd64pFRlGpGBKcsTtVGHTimr6lgfUsD6k5BPcsC
O5YssVitMiOxNEgAZBYpjmpAByTQ7FRNTtRpxyL6t2xYNV0TFzKgiE8qSVU7P2UVC42SxCfpuROZ
JHejaJrA07D0AvUmp3KrEO4OxPqdhtReLJgMVgy0yDg4rVZAIxbAhDl7sZWzHMih71SQlvdAiQFM
iE87kR3hNy0xO/gGDhG3eAYB6DqUW9AgpwdMs9hQMJhvaBQmIDXbGAzQ93YckRCUhAhnJp3KNnU+
qgKJFw95oFO3KeoSFJDJaZzM9hOxRlLBUmHIcB1CESwuScjcg9HCDl96kfUpQOdVLYKAdBmfFPBG
BNJIxQ96ePQO1DeShahQCspbApQs35eYzh8FIXC90yaXr6D2FE7j06zgFIRq1ExQ6DckpDYUQcuk
3JKTZUCIPQf0gp/pD6ui3LZCShblIQvW3iYEsp2RLVO6NIA2II7tXR+dDDoAyxKceyQ/QAcJ0KbZ
VMKjwrYcwhE0org9mQdQslzKIalVI4CPCAdyyK8IWCpKQ71S5IKl0lUuBeMLxBYheIKswq3E5uFV
kSjiUJM7FTkMMYhGAD3JGm1ebdPhPENiaEA+1lIaojTiFq5eGmcSdMio3Lt0iEMBGlVqmdR2lGRy
wWpnnAVG0IWbsjK3L7uZy3KUxWU+EBDyydS+IADmsaZsix3KLljHMJgVsAzRha45j1LVckTsGXTR
NEPH2huUrV0MSXA2ozt3dHmR4I4rWbmskjWDmHqvKNuJgQwojC1SIr3oylj1yJhiQ2oUKtxsPdhf
fSM3fBGzzGq3zIINsHAtktYk3M28YH2gtUaSFJRzB+XEGoOIWuzxWT4rZy7ELtmWmY2favKu8Nwf
SjG4GllIYgoW7w82xlMYgLXalqjuRjMCUTSqM7HxeX9q3mOxarZqPFHMHoBk4mMJBeVzAe2C0bjZ
bSngRIbQXQkOGY9oIRvfBvDw3o4ELyuciJQDAXQKSG1S5j+nzAkakCvqQ5fngIzwJy715/JzpnB+
Eo27sfKujEHA9iaY7DmqvdsDD3gFqtycbM/mQwmHicl5lm5KFwBg5cEDJDzo64uxaNQNtE0bgJ2Z
9UuBG5lIISt4irrRcDXBjvRY9USiWIwKaVJihH2qqErmEgxZQkJCdq6BctzjUESXm2TokNmaGqzE
yGJfFTuENrLt29AlsLo8tDTOxckZaZh2J2Lha1dOMD4SfzTkjEghsijGxAzIqWyXm8/F5isYb1oh
HTbGEI/ahO7xc0agbCmuTOn3RgowJbUWdCMD271GceJxUBMQx2FUWDrBYLBYJuhwSFxF+nSDRV6d
6F+duN0MRolUEFG7at+Q9TEYKF7Vx3DQbkLccRxSQJjHXGnqVIgdFYhYepUWSoHG5VDLCqomZb0J
M0RmgBgPkbDrc1/K3/2Cr/8AqT/aPz+8yy0WYynLA0wWqdw2rOerhPqCa3q5m4TXEgLRYmLNhqyE
WPchC9M3gC5cYnetMIiIGQ+Q6pkRAzNESLgu3PZtwqSVAcnpjr8RMTwDtKlK/wA1O5K54ww+goW7
MBCI2dQm5OIbevL5WyZkhxMnhQuc7zBhFvuwaOpHlbWqUaCUgS53LXJ7eqgjMcIG1fEPml3L4OtD
VAfRAVZD8LalG17U5UJ7EJ81dnOYLxD0COgM+JzPW1SIAGJKMbcjeuighAPVPrNiyRUGOnHIIM9y
Qq8613dZp3QDsFXQHKAWwZEHXE0jViXU5c5fN2FxjMSAAcF6HYhDloOMAYilOxCUp6LZBcEaQH3I
aviTGZTCg9Gbky5wEQakq5agBZtwI46nVuClIgSuTbVJqU2ekjYsSeUn13IjVoC/G3pSv8vrmZah
U0Okkdql/U7VoQiW85iG2Rw3Yq3zUG1Wy5jkRgQhOV+AhcAlFyHB7FdvW7s7nMXGPADodGItiMJC
pkXNdyMJdxQIpHMoQhxticlGcPDIU6WMiy06iyEvEcIx2k4BXLt6I84tXIA+yENQBO9YgdixUhaL
3iODdvQ5e5c16i7yyK827XSXbb0YrxGqrIrHpw9Dh04LhdUkV4li6YrBVBWxYpweh+qOYiHYNIry
yWjMN39GmVJeyU04EHKQXl6DpapOS1RL/mlOQ0hiOh9i3lNKu5cUB3UXwrkoHtoiDdcyTRuAk7Vw
h7hDSO1OQwGJQlE0WDp4WxIDEoGMeOWRyXHcb82KAkTqGa4ontC+GSdxDFSgLbuW3o2+YYj2Y7EK
kNVlxcXaU0WAGSxClc1GNmBZ9pQ5kXDK3Gkgck7xMm4dqANKBXJjIMOg3DhgE4wkHQiMyyjAeyFG
W9W5jD2kwwFOiHaVFTjIiJmKFG5OQEYhySVcnb8Pm49EuwqfZ0CIzQgPEaLScJLUMD0AetC1GhOP
YhsNCtY6AMs0LccTj2IDKVCtQwPQf0gp9o+rosjbGQKnzU4v5bNHJypQMAJNwyGRRByoiP0gifUj
E4SUo7EbhoSFcgfaRGYK1DEVRvbYutXetW0AqQOAwUtEuNuIHBlC5GI1yfUc6FHmzZErgowo9FeN
62bNqAcEP6keWsSlGIc6zuX4S3c1ydnajoWJSibkiAIg5rRdiBI4B01y2YvtVQR2hVf1LFYrFYqp
TalimAJ3MiBEkjJkQI+HFDm+Y4QfCE9kNt3ry7ETaYl5O5KM5kykaklAAJ8E5oiSWG1ai0bYxJRt
QLxPFDtzCELx4RSH5ECIg5unYO+S14GSrn0HWa+6MUYxOiGwdNEIwBJKE7+Puq15VqUASBQeIZq1
OyTEyoXxEsnRt3jovWHBt7RSoQuZA1CcEM2CIJocvQjXESMaxOxC9CnMWQ8DtbIozld8vnIUA95s
ijet8HNWw121hq3oTj3jYfl/n8qdNzGUMpIwmNF2OWEgdy8rmfBhG7l3piBKJ70LvJyIrxQ3Ly7o
Nu6KF8D0edyx8q9uLCXahY5uJtXhTV7J7+gxkHiaELzOX44CkrZ+tPGkh4o5hGFyIkDtTEG9yuBB
qYr8R/T7gGZtnA7l5fNWxZ5kOA+B70IAAxFA+EhuXl3OG6MjQ9y4nvWRs8QC1WpahmFq5c+XcGzA
oW+YGmeUsj8zSMZaDLGj+pRiTG7HASLgtvRiCIzjjEn6uq+kE9i8yJ+HL6+tCMDWfDXBOYiIG9PL
DJQiZExiaA5IHaOrCWwA9EzzdeXA4ZHEEbCp3v6bc1hq6cQN4Q/H3zAHGZ+pGzyA13jSV1GUi5PT
G1cOqMmiHyQMcAAh5weOLZrXZJGyJQhdBi9AclQgrJYVTBCO5/QNEOSiCGboc4BQtjGUgB3q1Yjh
agB3hTu6JFxi2RTwkYlNcAmPpQEjoO9aokEdWsQqBuxMxRncbV7qeMajEFaYBh6KvyDmv5W/+wVz
H+pP9o/PjyLAZlaeKc8oxBqiYRnYfwuAA20lar/MzvX2rGJX/ZW/K1GplGrbSStfMXjOZDENRabU
RHac/kRMpxAjjUIy5aEZAFgJHil2AKF69chFqi0xIHapXBESuzLyk31KgbpclhtWmEvMng0at2qQ
JhZtD2qufWp3JgcxdiHIeg9agOUskW50aIY+tRHNTlE6tUquW2IRgKRqNRdfDgbtdNGxR1fCifC0
sO1AkGc/alIkkoABgMAOs85CA2ksvI5aUblzOVTEepC5zV4WLQLxtwFTvLoyAErhxmQH6uqZEYjM
o+SfNuD2Ygn1qUdZtcvKNZaWLnIKNzmLuvy6wM2DI2OWtkzFNRwweiE+YuShHUZVxAqwAUWeZi7G
VWf0huXTpiFPl+VtmEotrnI4A1ZXL14Cc7jcJ4oxbY+1aYgCIyFPR67sxCI2oRk/L8vcJGHxJAB6
KVr+mWpETGmNwjilIUc7go3ObkRJhqhA0JzqvJ8kaNjlSskPaPFakcxs7lO1eiBC4KS2EBTlzMvI
5aArcIqwzC/D8kZXrcQwnOhk2aFy8BwDAZo2oDRbwAz6PIlRhwjoxRRnMsAhzF3H91b90bTvXCKr
FkTKRbNGNhzL3sgpeZLVdkPEclxXBEDA5oRlc1EDHMpyT6l8OMpdgWmUTEnI0WuNomOL7l5VmIMz
gCVE3oNq8LVXmWTFhQvtUrFwkGJYyAo6PMC7qAA4QK1RtGUoSjWooo2wTcBGokZIXxeMTIOxGCjY
mZREywkzqE4XDc1FtIRuC4YEFiCFKxImWk6dQGaPMykNIDkNVGFoNOIcg0QtcxECRD9y8/QPKx1P
RYDuITyBD4LE+pUkFrEg6xHVxWJXiPrXiK8ZX3hXM2pS1Ex1RG8KE8DGQJ9ajIYEAreuLiIwdSNx
rUwWxXiZsexGcixwAFaIVxyWKY4hEhMaomEHgMSo6IPMhy+SAlPS+QWk3JElMZkxOIKjAnw4ZFFw
abVwYoGZOnNk1okwHtGhTusViqyCeLyO4LhgR2qshHsqnuXCd2C8II3o8tcIg51ROVUeXhcBlMig
2I3a+XGNDvUYlXB0QiMfEVG4MY4o3JeGA+lE7ejeBRV6IDcoDcvPveEFohGXL3ZWxDxRehCt245y
clFlM7lcbERW9eZLBE+yKBPmvzgG70QUbktjoyPd0aT4hREFGcu1GR7k6/OFOgk4GYb1KfaPq6LJ
GUZK5bvAyMyxiMaYEIi3CQlIMJSDN0V2y+3ocYhW5R9qhQgM0DvRIwlX1oRGJotMcIs63KB7iiTu
Q5mJYAMRtUY8va87VKRMR7Kib9oxtWwSLcQ4qEIVt2BJpRarI2uVtAQdpXCGNU9i15l8+KeQdR5q
8TcvSOsQFQCo8/zV4gSaUbYVvm7l/wArlmBjDNWr0bwt8oAKmjq0eWnGHLiPFMsK7Vajypj5QAE7
lBVWo8vCPlsNd3erUOXhBmGu5vVqNmMRFhruZOrUbUYxtMDK4cDtUNOi3YDVPtbVCOqMLVNI2qUb
lwUcQiMMM1zBM5XLhBESBRT/ABEC8q6itN15Wo0i1SFPy5G1y5FI5po0GJkU0rkQTTFcBd81WgyT
TPDktIDCLKFu25tRDsNqp4hUdq1txCku1C1IyiDQHJCV6VMa5lUpEYBGdyQiBtRhy9B7+aJkXJxJ
VehgH7FrvcEN6AsW2B/eEUUocxASuONJxzVY6omo3LVZPl3Ijh35sfUo83y/w+esUuR2mOIKdtN2
NLkDiD6U83y8Qbo8Udu8KN63w81awOUmykvPs8F2FL9n7QtdsuMxs+X64HRdjhIfajy/NR0zwrhL
sVB5nLH1xQnbIlE5hGUeGeO4lC1fGqH0rVbkJLRdi+w5habmq9y49rExC1W5CQ6BO2dFyOBwB7WX
lczHTIU1+yUxqCvO5M6T7VvIo2b8dN0Yg0IO5ESH4jltntRQuQ44x/XggLvxrGVweIDeF53Jz0Tx
BGB3EIW+bGifsz9mS4wJROBRMSbtkeyTUBPDvBx+ZWNUQYiMspgMQUJWtNyA8UAfyrjibblq4PsT
ioyI6bVnIvI9YTiWlEuDvCiTEahQlESiO0BUVuW7qt7tEIgPEF5Hcochyh4B4m27F+OuzMb0hwje
cmXlf1e1+Gunw8zaw/xBG7yHPW+Zt5BwJLQRxDIIABoksSg8pPmotI8MhinNaYKfMyL5AJ8BsUbo
jqAPHA/WvxXIzLHxWti8k25QnHGYzKJkSYOwfFVDq3e5aTmTRIORRjGAnIUIiXK0Xrcrctkg3QLs
ZhjkjpBLZrSQQexapReHvDBHUNZI4dxTQxz7VVMEJy8Noa/Up3ZHU0iQEYG4Rbw0jYneqqqLgkQ2
Sa7EHeEA+mRyKcEHemAco6pAEZKINwRiSxOxEWj5pIodilOZxyRmaA+j3fIOZ/lb/wCwVf8A9Sf7
R+euOTnIBGNm1KIykzrVz3NaSDSMSieUhKU4ho3LgJJ7EJc3e0h30xWvQJT96Sp8ilG0PNlDxMQA
EPItC1az1SYy3LzeYjF28Ach9pWqFuMTtA6j3JNuzTctamY4G4Y0WvnuaYisbcW+pNyduJMsZTNV
5c4A6664mkQoy5gvIYiJNe11wCMAMgzo2rNoOQ4m7gdqEuYlgaiJPEnhbET1zO7MQiKlyox5CwLu
osCZfSwUp/1GcCJ4wFdI2BCNm1GIGYFerqvTEANq8rk4zlE/vRAkPuULnOcwYGJeNuIAHejqmNZ8
RNZFfh+Vg35+P0LzuZuGNsM0a1PYU9uA1HGRxPpDKRaIxJUuX/p1ud7mBQy0tGI2uVK7z5ncifu7
M5OI7SWWm3ERG70hjD4koh5mNRAb09wTu82Wnct0EYRBoBvRu8/qtwNbUdXHCJxFNq0WLYgBs6ZQ
A+PCtqWw7O9GN14zgWMcwQrn9MGqf9SnEW7cICswC7+oVUYStytyt04g2KLyE5ChIqFqA4JVCfPN
CcSxCDg6swuELVOTAIXbvhHggfrKL5p5FkzmR3IznMQt+6SyEfPjEYcAdW7li753mbDkpS5m2QQW
BehUp26WhKgIppU7fJxFydAZQyzRjzcpAzHBEh80eYNnh0gRD19SuclZgYRsDRO5PaclHnfLjc0y
0iQoxKs3+aItTMdQts7PtQ/pVqEY2/vJXG9nMq5ety13QDJpChOKF/8AqEjLzSTbgKaYgtVcpb/p
xIjzEjGRbwlxj60DceV1mNx1zcOauH8NYl5cYD23f7FLmf6e8LsDEMDUgllbhzs5Xb4HFMUYlfgb
Fwx5WI1mbYRb61cv2Jk3YgyGqrkVWv8AqF0xF0vCAFWG1WLHIgTPMu8yMgRipXpSjcu6axbZkEeU
5qxpsxGlhjRarb6ZF4A4gZOok6iW4QNm5QPlSuA0NsgnU6lzAezGhNshgBmpXOWMZXgAAYn2lK3z
R0ADhkDUleTAynbYEyFWJX4kyErYGoviyPlw1kYtktM3jLMELhkOtQs9D2FMaxMn7lpmGjGkexY9
Ak7bRtTYb0wxAZ8VGvGMGWlg61tE0YjJDTGLnIITuTA3DNASNAXbaUDCQERjvVcU/RxHDNeIetUk
/YuGMj3LhtnvWUfpXFdbcFxSlLvTiI71gyqV4u5fBtyn3KlsQ3lS5jmiLk3aMcgpXowEJwq4zRtw
fVBtSrjVl+a1Vpyf6EdgoFK2di05yNegDahZ2BHf0RG4KPYF5N46Q7xOSnG3ITnINTJWpANj0T7F
N9gCYYSQhHE06dB8MvrQOWaFqPf0jYaFCSFqPf06SaSTD2kIjKQU+0dFjeJK7zRhE3QdMTsU4XAG
YsdlOg/pFFFB97Ik9nRC5nGhWs4Rqp2zhIU7kRsopQzFQqnEBQstwzzV2B8UZv3HoldjCJkTUspX
pWxXGOTrzIw0uKgIkUDB81EiWqIDDV7Kt6Z+YI8IiaAKzauXHtw9gYBW7du4Tbt428HVu0Lgjbj7
Ks2YX/hw8URR1CEbuqEPFEUKt2S5iKRiD9at6zqEQwgDgoy5qTiIDAHBC7akTOjmWTbETqFaylmn
fgFaYumJjbA20U/KlqsxI0jJwtUy8ijy9oShaHinEYpnM4yqJkrSPFb4SN4USW0y+hRm+kR8Slct
lojapXTVqAbyjK4NpI2ImIa1cpu1I2blYnwleXzFfcmcEYWeOe3ILVckTsGXSwTs0feKELcfMuna
tN25onLwgYBCxI6jAUlvQlKIMhgen8RZOmd0f4TIZHtX47kxpvw+/s7Qhdt9ko5g7PSy5zlw9uX3
sNn5wVvnOT+9HiA9obwjzHLeIff2ftAQuWy8T8vadCMJDELyearbNI3cu9eby1YHxQy7QnFJZxOI
TSxGBXmxLPmBTvQt3h5dw4HKXYiCHByXn8pIvnbNQyMJNC7GhiaP2P0GNyILoEvcsZ1charZcZjA
oT8N2PhkF5POBvduDA9qFy2fLuYiUcD2r4weHtSGEu4rzeQmbd327JwK8jnLfl3dkhQ9hWrl5eZZ
H7smoG5NFxIYxlitVr4dwZjA9qFvmI6TlPIqlfmYi5AF881q5aZlBqQK081Hyp4Pk6eJBBzCsyzY
jrmGUq9BUTsYdWcUbVn7y54jsR/qHNfdwqH27UWLWo0hHooWQL4VUd5dOVpEhKT4KUBEifi1HAKd
qcBoFXB3YpoyBKoaGhCiBIwJIetEJkgxZywzXlxD0dEkVOalbvnVauQYP7Mnx9SuXuUlIxjIkTjl
VWrXOW43OIAz3KM7FzSLlRE4LTO4GJfFRhEaoxLkqUyRxVEQpRbRaz2sjDl4gHDUcUZk1NU1yIO/
NARwV/mc58EVoMRqJd8+pgmGKqCEBsqgASHxWqJftUrlyhJQlE8JK4pOQhRrYzQjHLqP8n5r+Vv/
ALBV/wD1J/tH53e5MR70BYsmerA/3Iy5u7G1A4B2I9SjbtSPMSHtycwHqUomcbNjIxixIUfNPm6M
HA+lNEAAYAfItMiTL3YhypR5K1OVzAyIYRUZXZ3bcf3jyYy7AFqjaD7SSX7UwoBgB01ojoHmyGIi
QpSI/DWiKHU0hvKEjr5q5KmslwSjbcWH8EIGv0J9MQCA92Tv3BC5dETIDE0CNrl9M5xDkRLAIExh
ZtPgXeQUrkwJzlkcB2JoRERsFOv8S4NWURUlGPIWTCrapDLa683nZGUiGMBIkJrNuMBuHVInceeU
Y1JX/ZwNiJLapxy2upT5q/K8Z+OPsrQ4GkUhHYjZ5WBjExcEA6qo3uauHiA4c6L4cRqzlmfSyED5
12PsQqz0qtfN3TbsFjC3EaT3rTbGPikak9vpDK7MQiKuSyibHDZuzlAAeM6cOx1GErcrdmP/AORK
Um8yTcLNkEJGIne9q7KpPW//AIly0XGF8D6JKHMWJGE40cbDQrTZkbpux1gk4OWqr/L3CNAmREbD
iVK2RVngUYyxFD0vItELzbtIDwQP1lNZgZnbkjO/cFmA9aHl6iHrcl9gWpyHoBuVHkCcA5VLekbZ
UULnMQF29eGqT1AD0AXJ+U8eWmJyuW3o8Rw/SpwuQGgxINNyjZiQZ6pSmcy5p9C5SV1jLl56z9GK
1mcdOOKvFhbF64bj7ztX4cAXIuJS2OMEBNxMBjEB8FPmywtiOkRNCylDlgTOYbUQwDoWb0CRbcRk
PWoXIggQPw4dlXKYWmu7zRTlKOsXDqkMC6FuMNFsFzWpQ12iZ5MaFS5qY1a6SiNmTKVrlwdUwxlK
jAqNq9EjQGiRVwo3ZwMbcHiHxrmgLcvMnKgiAp3TAT8oNCG2RoFGYJMBW9JmiMzpRvStkGfgj7sB
h61OeniiBoBL44q9buAPMaYnNyoxMRcuXBqnM5kqyYzNuxbgZXIxoSXorl/lfvIjwmrrTzl8DXWN
tnpvQtQ5fzJXAA8XqEeZIlYIDsRSiNzlRr5iAALUcqdony5wDtLAo8vfbWNhdUPd1BzMfFAgFGN0
MGoyxJWKxWJWaz9SoD6lSMk4gXVIFUiPWsgqzA7FW4VW5I96q57SvCFgAqUVU5kAixJ7AtFsPIoX
LsNEZUGZRuCREYljRG1GLwEtLkqMjCMptU4oEBgMgFxSA7SjERM7eLxDsvJjCQEsSaKUyXN2T9yJ
lQAEqXarlzBgw6BvQ2NTolcOEQjN6mqEwhFMMmCHYjevEi1gIjNG/YJBt1lB6EK1CAYAEsmUu5Sk
MRIIT3OiThgOlxiMECfEykZYv0sMVEHxMpasX6QRiFGeeau+6JRA+lXO0fV0WiMWl9SnaMJXJTk2
hsdi0ysmyJBiT0EbZFHahEYlQh7xZEZGvRO3tDhF/EXQlvqpDI19afICq1g1b6lrmHMKgqMoyEJk
cRzLql0ktgj5ZocQUbsI+eZUMNnqXm3bYjbIbyS4Ha6F78OfLuUptC8wwMYGlUGqMzsUZatQJyQj
AmcjiAELty4BHPaoDzxMzIBEQ+kHMoXIXRdM8IwqVanG5rBNQBgoG3dExLZl2oSfWMxFACMhA4yI
wQABlH2iMkZW7b2gKmVCVAyiBBuGAP1rTCgJoFqvT1bskxZjQur1u1cZi+nbEqWml2Jdj7Q2IG3i
3h2FTgXMiWUIXDpkcBtU8QAHCeEt0mRgS7570IXZNeiW7wvLmWhjEDPoqmCAhElCV3jn7oqpl/Lh
BuFsVE8vEQuQoTtUY3C88Se5MST1DEUkKxOwhRv2uG/GkhkSMRJHneWiWw5rl/8A6go3bR1Qlh6Q
xkHBoQhdtR1cvM8Qx0lDnOTJ0yrMDD1LzrXgl95b+0ITgXifl5hMPE5LO5y/0xX4jlpcZruPatEx
oujEHPsRBDjYjKA1Qx0bOxO5u2fdPiitcC4+lCXhux8MwvJ50BhSN4ZoEFwcCiDUHELXyxMT7UcX
C0TOm6KGJotFwaolExe9y2zGUQni04HEEIz5KXlzJEm7Ni/D/wBTGiYoLhoXQMX5jlvZasgFrsHR
dFXFCO1eXzUTKHs3Y19acgThLDNE2fiW3rEmvcnFJZxOPzMya5ES7UZ8rMgt4CaOFCN6212Mn1DB
jkmkG60Tv6DsUhsPVn2KECCYahq7HR5Gzw2rdC2bdS5LZFvWo7gEbcKyOO5HXEgjiExSqANcnRkD
jkrkY2+LSXkMhtREiJxiHBz7FwyaWzMLRck85YK55vtPxKdm3ISMSQ/YhMEuKgqcZTNqZGWBUyZn
zIyJhIZrlbZGqQo4FaBcVGxXBccbELlypyUbVCaP2KMYhoygCFRYoZnJcvy0cSBKXaVISDEUbsR8
sORkifLwWjmLcoz2p44KmK0yAnHYVWItzO1E2C08QjC7TSUxREShO4fhhaYBgjKZHYuGLxC1xxzH
Vr6aqcdXmv5S/wDsFX/9Sf7R+dHnIRG0omzA3Wo4KErsI2bJylJitc5C/dkaQ1PELy+XjbhbbxgG
iML09YkXlSp70IWoCMRsHyLVOQiN6MrcvMnlGNSoeTMgnxDS0YjvRuyvTNyQaRdnWi2GH19Q65xD
ZPVCPK2hNyzyLIDnLg1SNIQJfsUo8lyohpobsyG7k05XLuRtxJMQd6LkWLQPBGI4vpRuXJx1tWUi
NS/7O2JOWBk/rRlzl0S14xAoBuQhatgAZtXruSw2lRtRIuXZFgAaDtKnag3L2MPMD17FCd8+dKGF
GHftWmIYDIdTVckIjehDlib8yWJiCRHtUhcv+Ryxw0xaRCE7h82UQ2u41AjGEZXJAONI4fWha5cS
EJRctQB96je5qZmYgARJeqaEQPS67sgHpGL1JUrVpuX5cGtwFzIdqMxESuybVNtmwel0WgL94+yD
SP6SmSNdy+Im3GPhgIl15nNDXJg1t3jE7UIxDAZDrytXBqhMNIHYpWcbU+K1M7NncvIugC3faOvO
JDt60b1oiJuBx+kEDP7yIAkAvxEBQ+JAJ5FvrQlGDQHhMsBvWq9LzZDEZLyLMdVwUEYijoXedm0c
RbwZWrNzjtWg8hbD191aeWsmPEzbBvUYGZDYiIbHetU7bgZyqjYhGMoRpAYMhzEp6SPC2S8u/dMv
oR8qkjgQcVKMhxAOScSnuHjODVR0kvkCjGVcyUZEVKM5jtdVCwYZrXCZBT6nzBWp3P1rjHERhHJA
RoXPayaRomAovKEjKGyX2FPD4kTiDj3Ly7A+Nco2JivNvQMrknfvR5a1A+XE6rzfRFfd/QvNi9sx
HC2LqUOZuSMwODY6Fsh5jwgigClfuDXqpIYepeXbgYxxJOaBuS0yArFTmY8MYtacfSrougHUGHaV
C3atxESASWxKt3eSmLERHjbajzHMAyk+rWM2Xmcta+MCBWmCNwRJ00MStLtMYjoNt+EkFlQekJKc
kBMZBMJOdgXmGBEdpoE3Lw1NjVGzMNMULB2UuaMyWDxgN6Nq+JRERqd1C3ysmiYuX2ocvLTPmzEi
TZOhzZJu+W8pQGajODC0D7QqDsVm3bAny1+WmZAqCU0oEyOMnV7l7dwG3qeJlUsjO9eIjuR1Fzqx
JqyjG2AIgUZaoikIue1azHSIuFKI8IADow2hCGcq9Erh9kKMxj0CGc8UOxTtnEfUpTOWCfaR0CyT
plHbmrrzBuSiRGALkkq3IhiYdEu0LSM5BC2NiLYSr0ucI1QgvMjgce3p1kcMfrQiMELsRQ49IiM1
G33K9E4aot3qe8j6ujl/8f1K/fMQbgkIxOwMpwuDhMS5OVEdy/xokZozPctQwjh3KF0ZivQGwzQi
O/ohMbG9SncOdE20E+tWzaJEZHiATyc0zUNcgDIsBvTPRUqU9+IlNqNioxbRCOAR2RDkDFDS7HBR
1kaJVBRMQJ3AKAImZ0wJpAYJ3qEwHqQucydMchtTQMRuCLBgNqx0g0RuEtEZlCLHRtUbra4xNQdi
eIADOEYjwnYmMqbCaqN2FTDEbQUL1qWmYqwzCF+wC37wDNSuwrIYDeg8AJRwdMZ6XoQNieUgNr4l
GNjghtzRMi5OJXlT+9gOE7RsTHFCIGKEr9B7uZWuza0244nNA24iVwBzIrgholmAMSjLaX6/m/ur
tJ7pZFeZbOm6MDlIbCpXYAixI/Hs+4feG5CcDqjIOCPSG3cGqMgxClaug3OVu0gTk+RUeZ5cE8vM
cQifCoylW1cxf6whOJeMqg/L2NRmvM5YvD27Z+xaokwux/WiULXM0l7M8j0CcOCYzGB7U4+Fczyj
JaLvBIeooxuREonIozsPd5d3lbeoG5arZrnE4jodzC4zCYXl84HiC0boGO8pwRKJzC87laS9qGRC
0S4LscYGhRhegJbDmE0D53Kiuk+KIXncndNq/iYAtXeF5PPR0v7ZHCje5KVDXy34T2LRMeVdzhLP
sQkDonHAheXfiwDAXBgXTxII2j5nMZgSiaEFapA/h5eGQDtuKM7cgWyVeoCh0Sjgm6hBziVOVmWk
/YUZSLyNSepLZKQCBiK4BSuSxJdSpkB0uES1ThuUiYhyGJ3IyhIERDxOfYrkfM03JRIETkVKZhLU
ZeLJfEDlOxDbECcRVRaYHOW5yMQ1SFwRAiSXJLFO8f1gtAnAD9ILUTAk7ZBRN8wloAiGIQkJ2wDX
xBVu2/1ghe5u/E6aiES7om0TG3HwjcMEDdLnB81EiPEQ79HxIAnI5r4ZBGwqsaqqouCZpkjG/AEj
CQXCcckNQ1RBqEBbtgBEUi64i7op9p+U81/K3/2Cr/8AqT/aPzi5oN6LE3DHEQqvgWvLtk1lKhZe
bzN4z2WxPEqNvlrFsvvcjtR84wJlmxLdi0RiDtJFUwDDd8iM5yGkb1DyoQlr8MXeXey827cgZENp
IcR7ETGs5VMj1HJYbSjplrmPZCIeNuy1ZkMR60JxgeblAtFmqftUjfELFsikQeIdrIC1aNyX/Wm5
P0rXzV6gqIjhiFp5dpzOUantKMIGNqwRW4xfsDqM7/xTDwuPrTRAA3dfVdkIRGZKA5a3K8ZlokYK
UuZkIW5hoxLgxHYE0ICcs5yqSmFAMuoRO7GJGIJUo8nGoLREok6lG/zXMHWQxhEDSBsTDTCOJkWc
oixETLOJEhkBIHSQ5afCozvgSMQwgCdPamhERG70pncOmIzRsf021OcwfiTkNIiNz7VK/wA2DduS
LxhM6hFCMQIxGAFB6Q3uZmIQHrPYpw5SBs8uaa/bm+zYrcr8Y2xAvIjilcLM5dC1YgIxHr9FKDNd
hxWpZiX96lC4DGcCxBxBCh59zUbfww+01dawXs3Q064EBTEm0EVJyRt2I+ZMlg2CF3mj5lzKOQXG
RCOQCJ5aRs2ABqkaOiLTXL0vFclt3IWrczKc8QKABCQGvmZ0h2lCMPFjKWZkUNYEonFwqng3I3IC
mY3rRM9r5IajVaICo+hCRk7UqnnGB3olgAck0WiN1U8pAb5JmN3swXmsQCSAOxHXQJo02JyabEaD
cV+cNiYGuxaXYrQZknJlpuB5bULHKjXelQD3V5t0678qmWLJ4jVdnw247ZFaXecjquS2yOKEiT2d
DmIfb0Bwsk6BiWIwKGuRkAvJBEgMCckfNlxvmrfK+KESZT2FskYxti3pDgxpgpcxy5Pk1BG3sC8z
mJyjakag4uo2xc8yEhqB3ISIIjtZPCMj2BaWL7M1qlGQG0hfDiZbWWggiWw4o3ZWpCAxkU1iGtti
8q7ARnsJR5owAtAan3LybJAkz1GSFqRMiQ7xC/FG6Q8TIRULV2J8uRrJ8lbjyvtAmT1X/caTzBBL
ZsvxF6Uhy8ZapEimlRhygFyLg0phtQ82yTG/IR1AuxUr50gzOoRIXMclzkdEbJABFHCnc5Em3ehX
HEK3K9ETuGI1SNao3gRHzYM2QKlOcgxGG1StUtjUZDZVRs8vISIlqlIYBlpkHuChOTrzJSDk4qJk
TK2TgNqlOEuEHhiKrVbhRuHVRSHMBrkydVEYQwCk+ZQO1AnBqdEYDGVSpWzktOxHZGgQVKiRYqER
gZVQ/STDNRGkGRHESocwBjQxydECgEQAOjvClM5Fa1qGID9DDFV8RDrVmmxP2og4hCIxKbP7U6MT
izIxOI6PNlicOxasgjIZmKn2josD9JvUp29JmZy8LO9UYysmzCVCeg/pqniiStMTU0Cqp2tlQiEZ
nABRuDA06JDExLhRjHHEq2TixBUBeIEXDalesAhiHBGwUQnC9MXI1DlAG9KQG1MDVOA20oxiWAzU
gX4gQWUfMbVGofGq8izw2xgU8iXOJVB3ozvxcHwhCGgAYOpCVIDNQuQZgcEduAATmu11ocl8VpB3
KBxhUEISnLTkRky8uzSAxIzTppZ5IwPjBoDsQjOlm4anYVr5ccEvvI/aFr1ARbFGHLjUfeyC1XJG
R6QbeINFbuz+Hr8XajasQ1bZnJW/PkJ2o1iTmRko6QwIaQCJthifQmEg4IYo8vdPHHwS96KcUuAM
DkRsKNi+8bEzQH2D+ROKg4H0krV0PE4bivwfNx12p0iTgQVq+85O6XiYnwKIm8uXuYHYhKJeJqD8
weby50Xh6pdq8m/HTdGX2habhM7GAliY9qEol4nArTMU25owujXZykPFFPH43L+8KyitVuQkF53K
nyr4zGEu1eRzY8q8MCcJdnQYTAlE0IKM7B8yznbzHYngeIeKOYKdzC4MJxxQs84GOEbowPatoXnc
sfJvirjAo2P6jENEgam+ledydwXrMqytmrdi0z4LoyNJA7kITj5trASBqO1UaUTiFr5bijnbKMX0
3I0MDQ/M8rV2InCWIKN3lXnaxYYxXxYAjAhmRnAgDY9R3JpBtnQJDJAjBUxRyoiN/Ujvp60erEgO
TJ0Lc46W9RQhELSMc+sTtUpsHZijZuRF20KucQFrsEH804pmbciUWTOexT4pM5zQGo+teI03okko
VNViemERg6EdlOpVcUQiYEgrhaSqK5okDBVxbooE00BEcOZQjHLpp8jr081/K3/2Cr/+pP8AaPyu
nyQ6iZSGMYhyjHk7MjlrkKIXOa5g24DGL4lR8jlvMM8DI1PcnvwhGLUgJGnqRMoicziTh3JoREew
emJJYDElG1Em4Y+KQ8KazYOn3pFAXLMu5A6zbOyVFEi3OcZUjIDFarlki03BDU3fJSleAnKWEcYx
Xw4Rj2DqPMtuzTcvCZDsZ6aBf91zDOfh24sCe1ShydgAikr9yVO5CfNS82UaG3aJLn85fBtR5W2c
C3Env3RKZxMijDlY0HhJieLsUbnMXAJ+4BQIi3EOamTVPoCLkuIB9MalCH9OsuCWM5ZBSu85eJjI
N5b6vpTWbYhvAr1NUiANpohEXI3LkiwhEv62U7do27Vkj71i9cgoG/MXjbqKYnaVxyGoezGpXwQL
cMifEewLiZiK3JPR9yE5xjOcQwpRNECI2CnpXNBtUbdqQuXZnCPFpG0svM5y6YwiXsxgNPeVotim
ZOJO/wBIZSLRAclG3yo1XiHjI+FRvXZyMDXVLwxO4FExiJTOMj9g9IBOQi5o5ZR57lpjzZFr1sZj
KSdniTUZOy0WYGdzYKlRtc1KUbTtpjt3oQhAmzdqZM5iQjHlQZ3NuQUZczPzbodoPQPkm1aYnwxi
jevkjYMyhctHRpwCMuYIjdZrYyG1fSqqg1QOMU8I6dqc8MtoVLtNiMgMK1o7IvIiWIiUxrtC4XA3
FHTdmG30QMzqEsJYrYoCzWDONr5qMCCxrTcmfVLJapYbEfoXAA+xARIic08jXaqTMTtWmwfMvGgl
kEJ3QZXJVMwavsRmeACpU/6jfBEIBrMNkc5d6l5doN7JJqhIzI8wYDAIHzZE4mtVwTlKvtVCMb0S
QcNIzTnHZ0V9fRRSJx+zoE44jNaC0YnxHco6SNIFFM3Yg28A2ZGaaI1wbg2uMl5E+WMRbLCYo4Cl
yV3VPmLheMCHYKXOX4ytW9WuRFaIR5KBnESAkcMEbHNCQu3iNIIcKfOStTETLXTYFPl+VtngIEhK
mCBu2NX4giMTHAKXMyjF5AcGavclK35FuyRbkTXDJQ5nlYxuGchbJbbgo3eYl8YxDgYBcxyvMzJ5
O0GgPedXDyg8u9EPEgs6tR5gC7d0jXKVS5UObhS3oLwy1K5buAaJRIPqU+XtycmRkHxUYzAkRMSA
7Fr1jeM1cvGBNu4ctgRt2gQJYyKFsQFzKJWucSTm9AokkOSweoCd3BxZDyg4JcspU4sgdq+OBBth
coxgAYM7q1qiBAyGooacMmU52dgG5yhO54pAEppFwahGZwioXBgUAiBhGgQ2GhU7mZDBVQKndOEc
F2F1H9LojHmHjKNHxdQt8tSED3kozOJiOiI2lCOZJ6DA9yPumoWs+GP1rSMB0aTgV5gwl9aNyQpH
BaRgOhsivMHtYoRyzQhH/wBDolHZIKfaPt6OW7ZBXb8gJTgwG5St3ADGQLqQGAJA9al+mPsV0+yZ
Fu10QcI4dEQcDQ96IFNi0jGSnE4xLjv6CDgU2QYITiOJ0DbB1AurcziItMZgFeKuS1O21NEUGMjg
tJPDmVIiTWxRyUbfLcUs5ZBG5dmZE7Vg7oABCUuKX0LcvNrLJlb0cJcuFQsNi1ku2Do3DVeYDTIK
JttrzdOTwj3cEJXCWODokRBbMqY01I4aLVcLlQvW4uYeIbYoDCOITTGqfstgVIktGRfSMOpwimZQ
mHldixBydR5qU2FyPDEeyRtUuVYi7bI1j80EVRi5D4EYgo2OYL3R4Ze/H0YMS1yFYS37EQRpuRpO
Owowlw3B4J7Ef6fz506fu5nBu1aokEHMekNudJCsJZgqX9N54OC7E4EbQmuiVzlLvhliAoxuS18l
c8Fwey+DoSiXiagj5grwzHhkMV5HORxoJ+zJG5ZeVk1MMfUtVsuMxsTHBa+WPD7Vs4dyMuWJs8xH
xQOCFu+DC5tyK03BUYSzCEbvxuWyn7UQhO3ISicx0ebYPl3hmKA9qFnm4+Xdw1ezJGFwaolOHvcs
cRnFa7Z1D6kRcFSGcYoG2eEGhIoRsKibrWOYY6Zg0K0c2PMtYC6K03rz+VkATWmH0LRfj5csHehQ
n4ZjCcVp5rigCwuDZvQlA6onAj5moj5tqL+8KFGXK3GPuy/KmvWTcgMDGqEvLkJRPhkMQuD4Nw4w
lgU2APqTuEWziUR39SH6Q+tA9QBQtxo0XXHgDimGHVfqVDupXLB0lqRfNC3zIMJ4Eo3LXxIjFlvV
wxt6tMCSrpAwkaJ5CqmcmKaXrUYgvToauv6OiV0igoOpXqkHNGcC8FqmUAMkIwHEUJTl2hNEdDej
fqN16dHNfyt/9gq//qT/AGit/wA1/EkxyGJRhy0J3Jj82gQPnTtQxmWZatN29qoZE+JD4JtWW4Yg
se9G5zD3JGoiSTELggB8gMpFgKlHleUcQFJS2qqYBTuXbojciHjDaVEYF6y2IC9eN2+BTXgN0UDE
uDl1CSJSAodIdcD2bZFCSBLvWri5m54SdWPYE93y7NpvA5pvohDl7H4q6f3hqgeZuCzbBfRClNi0
2WuXdkRqk+9abUhbtEVnpqOx0PNmbrF6gVO9AAMBgOvqkREDMogT8y7lbjUkqI5fVZMjxPFhGPaU
Z3rk7t2XjkSwKELcRGIyHU1XJCI3lGPJQ86YLB6OdwUbnNShbt/9LFExtx1GspkBeXbHmTHswTGf
lxIpbiHmUJt5cSADOY41rI13AG1SHpwJVuTfRAYllGVqAs8sCXEzWZwyyREYgzkXlJtuQ3elMy9y
QJjERw1DIq1ZAmA5HMRgXjXANuXmcxIzlqlKMBSMRI4IRiGiMAPS8qbUpG1IGMIxf7x9yh/SbszH
zCRMzi5iIgyL+pTt2pxnysa2y7SLjBkL7GF2chajZ94yzT8xd0XcQRt7Fct6ZShEajEBqI6Zm1E4
jPvKMg8ogtrGDoXOaLzPhjiUJRL7AiIigx6BbvfEt/SEJ2pahszW5CQY7jgg9qA7Ah5YiO5VkyMi
HAw7k2mmaOggg0IdcVyMRjihZtVjEvKW/d0DyyRuyQjcFZUcY1VaKUBgM96IJ/RCbMYoPHVHIjFM
S2wZ+pC3E6IyxhHFt5Wu5JsnQkCX3Zr8NaHwoEHmCPogjbIo3hybYtduRtvURxC4Jgxj4QgL0TGI
PEWcMvMswBElgBsyWHqQMpUNBtQLhitoVaLUA+1YpgQ6PmXNQOQWmJ0wAoy0XYnhzTAabccNpUNH
hYKfMADzdGgSV8XAJCUCANpKswtgR0jiGbrkLV3TKUCTKWUSfC6lcukC2A5JwZXI8vpiJTMu11aj
MCZhMT7GWvWGxxV2MmgLkzMHIuhatETIIkTlRPJ9YxCNwms/ZWm3FonFaRASAwJyQnKYEzsyWiVz
hOLICBYiplmjEzLnNeMumx3ohqhEMw95FpNLZuRiBqDu6APDLYUA1ThEJydO5VGrvVIt3oQiTVGN
u5q0ijiqu/iQLkno+1NgyERkKps5n6FIZwU7hyDBPtQbF6LfGp6ANpXlikpYoxOBBUe/oYh0bmkG
SuHIMOiA3qHf0AhAxxxCAGJ+tE7eh802f2oRGOHen6THMfWjKXiOKJ9XRI74qfaOix2y+pSt24G4
bhw2hF7PlxI4iC5bol+kPsRbbRRujA0PQDsVu761oGEfrWk4SDFEbFKZwC3Gq0gOHRldLRGKnOMu
CMATvBUvJGm6A8TvCF25JnyRg1R6kJTzpGI2pydNvKIVQ6AAonYHtTaQBuTP6+gDbtREBq/OKjCE
XJrKWxAQJfMoQB7ihAAFGMxpIDghGAqSGQJPhomah6Sbsv8ADmUbVsmNty21keXuHGsCdqIkKhUx
WmAc7EJX+KWUArfmQNqzLMIfDE7dHJGOboRshrNw6ZgYRJwKj/UOV++teMD2oqN62aSxGw5hcJa5
GsJbCuIabkDpnHYfRi9apdj/AMw2FOKSHijmCpR8yFrmLdbcp0fcrY5e6bspff2J5HbFAm9GMzjA
4grVbkJDd6PVEAcxCtuaP9O/qAaYcB8jky/Cc0PN5SQo+QGYKjCchPkbp+Fdd9L5FOC4OBHzBouR
cLTcefLnCece1DmOUIBOMRhJaSDC6PFA9GqJ0XBhILyebjxezPJPL4lrbmE8SJROIRvcowzlb2oR
INu4Q+mX2dGm7F9hzCAnqu8v72JiFqtyEolefyh0yxlbOElokNF2PigVpkHicQUdA1WzUxIqD+av
hxjes4TtHxD1rzeSnokPHZJdGxzMfLuYNIY9iey1yznA4jeE22hhLFa+UkwztnDuXly4LgxjKnq+
aeKIl2hCNy2C2BFCEb/LTMmxtyqjJpQ2oXCDOHrogSDFxgeoDsKt8yKxkfr6kRvRaojEAdqjHD0d
FxjUuAvbJYA1cbE04nlrxz9klc1KTGItECQVyUBQyKeQqMV5URUmvREbB0MM+gEF9VfWOpXofqMa
g4ha4xeB2JoRJXmS8Sr6CvRTHr19FzX8rf8A2Cr/APqT/aPzS85CI3prcvNn7saoG1LywcXjQDvW
u/euXZHEgU7kBy1mdu0cZNxSRlzE5GJwg/1oERdsHq3yInmJ6WwjmVLl+TgbcJUlM4kLVcJc5ZrU
xlLZkEZChPTG5dkRGBcAZoX4GcSA3FmUSSwGa8u3blKRwJIAURduMXrGM2opQs2IxETW5KTgqM+Y
nEywFuBdzvRjytiFqP8A1Shc566JSGIcs6lDkLbacJaS0uxRu81eMWwtxFO9E24jWcZnH0BE7kYk
ZEhXI8rpOgUlJzqOyLKF3nLgsxBeNqI/aRvka70qGZGHYOqPNmNRpGANSp2uTs6GoLhqPWo3OfuG
dyOUSW708Yxt78ypW+XtSlKOM5BohGNy5cuyyswFPWuKXk2jkBx9joGI1SFNUqn05ndkIRFSSvJ5
CEzbwleiM9zoXudlLmeZOMpmg3BCMQwGAHpDrcmIBMYhyAVLl4CUZyI8uNuVRHZLeVbvc1M2rceP
QPESa1dabcWfE5nt9MTIiVz2bQ8UjkGUL/Nk8vCydVmEaS1EVNfUnhAajjcNZknaVK5MgRxJKsR/
pRN3mIOZSiHjE5Ovxf8AUrhv8zJpGGEIkYBtykTERMgQNI4juCnd/qN/yInihYBctk6BmBbtRrC3
h3o3LR49hRjWMhkhbkGl9acZ9Gu1IxKFu/8ADuZHIp/pTHoxxWlSDtqBD7FoYvk2acwJXFEhU9SE
ptAbDivMkdch4dg3pu90fLObkoyArgy1XGAGZUvI4QaeYaDtCcyMp+1cKj5Fxm2KIuEa8cN6hZid
V24WjXDetDirkyxJkcyjO5IEblpwydOZ6kYTAIkGKJ5S8Q/snBOeIbmKBMjxeySxC8YIOZKMZz1k
1O7pcrBO1VQstqIyWHchbBIiRWuC0Qk0oycHNQFyUpMGlqw7kYWT5cmxgWdargOrHUc+9C3O/OVs
YQMiyAJMrWY2IXLcnfHaiI4lGMpERGK4ZNtKxfetWkSbArRIMfoTA0zWoEg7XRaZkRgCETdtiOwh
Yd/TgFxBGAk0opzPDAISLmQzTBq5laHcgOW6HkWRjOgkGjI7VckJB9Jaqt24SIM5OW2JtqEMXCEB
hAMp2jhIOhbGJLno1HCNVOMs1KOwoE4RqUdgoEdwKfcehpRJ3gKMbWLsRuUzv6IDtUBuPTVMcMuo
ScCgcuklEZIAZ9M+2KuDs+3o5c7ZEH1K9KhuRiNO50dVQcXVzR4BI6ex1I7x9iIyGClbxIFExyx6
HOJcgIk4vVCQyqhIYSAK05zUJ54HuUtrhTt5l2Peh+JNJREJEV3go3LUvMkRwgDNPdm4xQ0S0ROa
8qMtUInxLSgCRtTxY7+l1wnuTSqMkIuzJtTS3qldqJJ0tmgZScA0fNMLbtR0CKbQtRxzCojKchEb
0Ycv+ufsWqcjI7T0CQoRVC7Dxikx9qE73DHYonlY+WY+2c1cHNW6xHiOfYpW5h4g0QhAMAjCQeMq
EI8rcqYjgPvQ/uXmD/8AC5g8QyhI5oEVBwKHMx8JaN0bve7lTPP0fm2jpuj1S3FW7V74NzWIyJxi
+ztUDy3MxndI+7DexjgrXPXoSMDwyiKmFwbQpR5cG2NHEJDTqNGYek/E2OHmbYoR7Q2I8lzRbmIg
gahgjyXORFzlp0Ixb84KNucvM5G79zex0PlJAguDgfmAiQcHEFeZYGu0fHb2diF2zLTdj7QxG4ry
b8TG4MJZHo0ziJBf9S0aMcl5/Ky0zxMcitF8eXc34FaocF0VjMUXkc4CH8Fwih9SBBcHAog1BxC8
/lJV9q0cCEbcuC9HGJ+xCQ4LowmPtXk82GI8Nz2SE4wRuWjpuY7iUDcHkcyKiQoJLyuctagfBehV
ghJxzHLYCQ8QXmWTouxxahB3ry+ajTAXRge1aga5TjitN8m5bFBMCrb1qtnUPmkgihxRoDE0IKa3
LSNijGUjLUdMStE4kFOLc/Uvu5+pfcy9S5jl+Yl5fNWXlCEsSAKMiNm3ooHQvGBFuOZGKM51kS4H
oqdBvO5qG2IwlQZFO4k2AKF0DCTziPd2hc0Ld0C5KBAhLHBcBBL8Q3qQ04g1CjdcESPerVmAeU5A
AIwkGIo3Z0PkFbtZGpQhAUCr6JjULhAHy3mj/wDtb/7BV/8A1J/tH5mc0CYzEpe6C5Qjy8IsczkN
6NzneZiZYNiAtPKQI23dDv2Im9elC17IAAJUfMlK6I4CRp6kww+RmNyWqY9mKN6TAYRj+VFlUv1X
QjrOgYReit3DEmcsIjEkqRvCFsEYk1CFqzb/ABFz3jUE9qe/KPK2R7MdiJttfvnIDUVosmNm0RWe
mo7EBfuG8xeoxO9CMQIgYAegFmJFy6fZBDDtKna5W3G1EExleJp/hUJc0YnRiIuTI7yUNMIhsGA6
jmgGJTwhO8CdIMBQnYjrtHl7Eg0BqaQ3lCV4G/cHtTJK0EiLezEIm1b8m2P3k8wpMJ8xzGPFSEU/
MXJRtsPhROa+HFic8/T6pFgMSV+H5GJv8wfUEL/P3iHDGzGoQhbiIxGQ9IDcLPgMyhcty8nlj+8k
dMidwK/7KPl2X4+ameKbbNy1afN5g1lenUk7vTEyIAGJK/D8hAXpgDVPGAfevxF2Ru8xUmZwBOLd
EhKWu97FqOJJWvmLkuX5IHhiA0pIWrEGGZzJ2lG1y48y77R9mPaV5/My87mZUjHMnZEZBHmeajIz
d7doimkuQAhdv2xO62BwiNiPlu5oIbzsRnzUdVydZE0bYAjctccN2IWmfFHfiE4Y7s08QqrRM67e
w4jsQlal2xOI6OFau6qItSEpZAppx0EZqIYnVidiIEgW71buxDQHFIZAok/Smty4hkVIXQwyZSkZ
PAojlom7I4ywgEJczIzljpFIheTAAMGJ2KPL6SGDyGbmoXnczE6yeC2aU3qdwwEBHCIzR5i8Gu3N
vsxyiFijmFQNuRALOjKJckIPjmnGGSdOQwTg9Lah2LJbAtLmmYXBLUNhQNBWrJ2dsEZtpmKDe6BA
aQqmuim1EwKFq+AQQQiYRMreRCMSGQnaNMxkUIxlpue1EqUokknJHXwgDSAEYyuyrgjGMiXxKDSY
FaIVIDg7V8QaSi5oMUR4RvDIF6FO6xTQcjaUJSkWOGlC6QTJDSCHLYOuM13LikAE1sGROxMToG7F
PLiO0ozkdNuJYNi6N2MyYxoQShqMTNqA4rioMiicoh1KW0qIGdEDkR0GRxnh2IN2ITGElrOM0VM7
ApHYFbtHCcgChbEBpFMELlqIj5g1NvUz+d0QG4q2N3QAM0FrGx+kRCAHiOC1Zt9KbZ0MjI7HWoep
V6J9sVc7uiwR7xb1IjloynOZo2zeiblvREjiMalug932KXahvoVJsDX1qMBmowHZ3KQyxHf0QHul
ihD3QpRzjVGUNzqVyZfS5PcrGofDnICQPavKNuOhmZhguZsWXMYETtjYCAW+lAkEA4J4xc5BGMo6
XwOKYuJBGOgy0n60ZGE4gYkL4V1i1dWQQOkXHDBvrXEdBAq+1VzDuE8SmOHRpMmiUIAu2aqVRPI4
Ixtcc/oCJuSJGQy6WjVRE+CMsCV5VsEznQS3q5Z5p48zbLGJowyKEJhrcAR24LTbmYyHhKNu7S8M
fzt46RKBa7brA/Z3owmKSDSHuyU+QvwldtWZmHnRrpA2p4kStzFDkQUbUqysnT3ZekNq9H9GQoQV
LmCZQ5i1cJh+dFRuyY8rqEpwwFBsUeZtjRO2z6aUC0zDTGD+16T8ZynDdjWYGYQhMx/F2xRwxPrU
7V8auWkWuWzjA7Qo270vM5G591e918ihKJeJqCPmHzuW4bmccpI27g0Xo4xOIO5C1e4onwz/ACp8
jh0G7YOmWJjkV5d6OiYy/Iq/Esj9YBMDqj9IKp8blTUv4ooXLZeJ+jo1eC6MJhC1ztQfDdAp3ppA
ThJBnu8vmHeUVrgXH0habgfYdi8u7HzbBoDmF5vJS4CK2n4SU7S5XmR4mpGS/D85A6jhJnjIbULv
Jy1QONs4Ebkbc+CeBhLPsWvl5GMs45MhbuvC5vFD813bguCPNcvI3IQNHAFGXwbJuyh7IDsgBy4t
j85gvicxbtg48YX/AHH9RgOwkrVLnzOTMQIuFKXMylcteyIUquHlpXCPekvhchbDYE1WkAQiMIgU
9KJeK1I8cVE8va0k4yTuy03OKKu3CCLokRH1IxEs0agutNxi2Ct8xQ6HIB3hkbjgaxqpvWKp3o8x
MUwCp0N8z81/KX/2Cr/+pP8AaPzJ8SYCEeWsmZOBK1c1cjCPukt9CIsiNy7iZywCIJjZtbQKlRF6
XmCOAYB+1CMQABgB6X4lyMe0hfeGZ2RDprVmUt5LL4lho7jVB5SjI5M7IEXhXIpxdg3ao2rJEpHG
eMYqUY3jGxncEWP+FCEbkrly4XOr0IjBtWSt2rTXeZlWROEQtXP8z22hKilDkLcRpwJB4juUbvNX
QDlbAoO1EwiNcqmRFfQfFuCJyi9T3I2OVsTndbMUTc1LyzIuZCWA2ABD4euQxnOpK0xAjEYAU6j3
JMdgqfUhHkbc7hdpT00iEZczfuxsEVtksSe5PCLCNQZFwFptxldmaR0ij9qBv3fw0CfDEhyNiNvl
bEpyBrdumiM+ZkZjK2/CE0IiI+QM/mXTSNuFT9CkeZPlcrQwEXBOeC+HHiNDM1kW3+k1TIAGZRtc
jEzMfvLhDRgO9C3akef/AKjMkW4g8EAdrLz/AOsSF2Q+7sDwQQhACMY0AFB6aU7kqxDiA8R3AL4r
8vy0SJRA8U9xfYhasx0wH09q1XpiEdpRsf0sOSW8wimnDU5Qv8xM8xej4TOoitUyIxCzs8v/AM8v
yIWOXjr5idIW41LnOS/Hf1E+ZzBrGHsw6JSmdIFSUOf5yLm3W1A+yNp3lSBttOOB9qiF6VqUYVem
W1GURon9BTkGJyIwWm8O9PEu6JAWu3IxkMwhb5nhllLIoGJcHAjpdnAGOaECPhnbuUDbjpqxb7Uz
4YBAxoDT1LVKRG92opC1E3pENjwjtKM+YInbAcW40A/KgRHSCMBReXy05TuGhzAVsQHm3IkGcdsy
aBfiedkDPUZadpWrhIGO5edID8PAkW4n25e8tmaDk6gnyzW0LGg2pggHxVFUsiBN2xAXBEpmATgh
lqkInvTCERvWkmoyyWOGK0xckJjE9q+GWO9RuFtAGeC0XIAyHtA1VJFhiChGJyQeTHJYrzrQ444g
ZhMQyE7ZaYzCFnmeG5gJZFULg4FOPyrVIAtkFGWUg4BRGzMICMnOxCMizYhSDPRmXlGJiBgc1pch
8XLoliBm6lExBtZHYgMhgqyATWwZHdgsdA+lPMmZ3pgwXEa7AhFtOrAlCN6Tai8ZHAuvw9qYMpEE
9gQlIHy4DHIoROZUz71B0Su7KBCYxGKEdpQtjCIZOo7QowGEei52KfcozjSUS4XxoSEgKkYKJt+A
UAGQR2Oa9EexQH5vQZnAYKmAoEbUsRh2Ij1dBuSR2CgRtnA4dqfI9Go4BeXE0GK8s4S+tPkei52x
+tXO7osA+/8AYr8ixuRAA3BHV4SKvsU9Ph1Fux13BFPsUbgxjQo3TlQIy2FW7ozoeicsshvUicXK
McpBlOJxopQOEgR60I6qxLg7Frt8qbjBtYerZqd2+D5kzxEhGMqWYeKX5EIxt6mzKHwo03Kdqggx
jCXuzIcfSrov2wRL4cgcpxRgLUWIqGUjbJhmBk6EuUnrcV3LTzVhhDdmgeWuNOWIOQTcxHREBhMZ
lAUk4fetUUx6S8gZYsiAdMPdHRgqYpoimZQePm3dgwCNvmPhygxgN6hcNuOnEfmlW/6jysXlapeA
9qH9yjdtl4TDjoBidNyNYyRjIabkKTj9vT5w+6uUuDIHKS5jneVAny96YjcEg41s6jHmL5hy5BML
ZPDqPsq5GJB1wjItuLemF+ydMfbCcYjxBCYJjB6EeyVpnS4MRtG0ek/F8phjOLP3r8RZ0+fENOGU
h2LQYa+UuUuRONuSFq5LzeSl93PFnQlEvE1B+YdY4Lo8Mwvw/ORAn7MjhJahx2DiMwhOBcHoBNJD
CQWjmBwmkZ5d683lZaJ7sJLyeYjouYF/CV5/JnTL2oeyUbUx5d4YxOfZ0aJgSjsKM7J8yzLxQl7P
YnBD4SgcQvO5QsT47ZwktJ4bo8UDimNRsRucvxD2oIxuRGoUIzCIL8xYHhjjKIzX/bTlK2B91LEH
YmPDejSuIWm98SyMJgcTb1qiRL84GqJk92yMAKkBa7ZcfSPmlpxEhvRly48snExouK4T3qpPTX5C
xwXhCIMR2q5G4SIyidLVDrSOXMj71arhsEdycWyycxbZVE3CPWuKQCecjJskIxDAYel3/Lea/lL/
AOwVf/1J/tH5h1TIiN60QiZE4FqIgnyrdWIojCcjzExUhwAjDl7MbURTV+RDz9LDFnJPeUBCEQBu
9JqvTjAbyjG0DeluoE1q3GG81K47pbZGgWqRPaVtVFVMtpVQhHy4zgMiEBN7Z7KLyrMBcOPmnLs9
AAA5OChzHMTP4g+Czs3yU4wEIQzvMSTuCibxFzRUUxO9UAHZ1zO5IRiMyhbhGU5nCjBSYGxCVIEF
u8uoy5yZvzjXZXtTWoCL4tj1HNAM0TO9EyGEIlyT3Ifg/hklmlA0G0ujev8ANXJ3JBpMwDbAmJFu
EfWtPKWjInCcvCgefv6pH91A0K08ny4sWwazliexG5fe9cJfiJIHYE0QANg+QRhKtyfhgMSmsQ8j
loSaRkazIyopXWErsqk5Ds9L5VgG9zB8MIj6Sjd/qd86Hezy8KSJ3qMbMRyf9OkXkfbmEBy9oRLM
Z+0e/wBNVDl+Ut+feLYeEPtIX4rmbhvX3JD+GJOxOSwUbMJCc5YmNRHtX4j+oT0W50NkZxGAQt2Y
iEIheVy0fNnnL2I9pXm8xLzbu/wx7AvwfIjzealQkVEF598+bzcqymat2dBJLAZoXP8AxoHgHvn3
uxTtPp1Ch2FRu3pAscAca5qdu0GkQwVy5JrcXOmBxUrHMQBIyOB7EZ2KjHTmqOAMQtMqTTxVUA+q
3nArgk0s4nHootOnDLYpzifLlEA0wK1NHh2o24Q16fd8L9qB5mWqP/SjSPehGFqOjYKFaZCUJbCF
5XLRe4QxPu9iMLUQJyxnIVC8wjV5ZdzXVM/kQ8y228KXLW5abcGN+YyceELT7MKBkwMTVhtVIk7V
Q9oWnCJVaEpsexUmBpHhUtJpkjVo4EoxiRO8+aa2NMh4ijGbuUBKUomOyq06abSsSsSvFjii0QR9
K+xAgsR9K0zLxGAXDEA7WRLAkhk+a1SL0oEzhVkEZQYTzbNGWmm1ahlmhZ5jit4CWxCdsuDmnlEF
9ylqiNPssiYiuFcVrMjGUMBvTyg5wdGOk1xWkzYMyeUtRVZOdgqvhQLbTRcdzTuinPEdpVKJ5SAU
pW4ahGpO5eVqIzYCjJxJrUZNtohehECERp1SGLKMbgEzAMBkFZIAi5OCFq3MSkCNQRBFasVCRwVV
GOcqlEbRRSnL2PrRlt6C+GSL9E+5T7lG3LwipHYjaNqOghmZXYW4gDEdhQO1+gDYFA5GIQAzQhHx
EN0RlkMUJxrn3IR9aEImpp3dAkKEISzZ0yJzb6UZHEoSGIQmMeicsiYhXNtPt6LP6anK0DLzDRq9
yJuWjbtkVYVbobaAj0StnMUWmPROGJjUdEI7ce9SHeO9RlvUzHd9a1RDkD6kJyADSaQ3KItgCDBg
NjLzxc0cuQNcAMZZoW4xZ8hiSuAi0N2KAN+RODBPAkTHECNoQuW5mPntIt78f71G6Lhc0IODhGE7
YkxqQgDLRM0EZImUYzDM9CjLlpmzI96I5m1qtR8MmfvUb0C2v/0y0EabmTYSRBDSGSeSMbXCNuac
kknMqvRptxJQnzFfzUNIFmBwUo3gDckX1ITm4lGjjtUZRDWZgRmMgcijGVRIMRuR5OZ/7e8dViRy
Pu9I5i2PiQxA9qOYQnGsZBx0GEg8ZBiFb5MzBFy4Jw1DhPsgkrmDzFpudsypKIZw+IV3mvNIv2TC
MYk+ME1DemINQcQtUC0fY3/mlFxunE5IWzJpD7i5/wDSUYz4bsaSj9vo2NQaFfi+V4rUqziaaV+L
sReMh8eztBzR/fcjfOGdsoRuHzOVueCWxCcDqiagj5h0XA4yOYXlcxx2MI3Nm4o3+ULxlWUBgexP
Gkh4oHEdGmQcbEZQeVvExxITs+/MJi921tGIQuRIF0VjIYjtXk84GB8N3JOC4yI6POttG43Z9S8q
9ExnE6TLIla4nRdGEh9q8nmY6ZZT9k9AmOGYwIXl3gxFBPIomJMJEuDHbtR/EPGTv5sQw7U1+QnF
21xFG3rzuSuCMhXSKxl2rRzAFu5vwPYtfLy0T3YFC1zAMbmRyK3fL9/o9nyeodUA67fND9HNfyt/
9gq//qT/AGj8hr6XVIsFp1GcshEKRtarZ9kN9pWvmrs7sxiI1Zf9pZk+BmRggZGUI+08qn1LwB9p
TAMNg9LOVuRjLaFGd2cp9pdYLBOalVwVFXoc9SqbajKDts6whEOZYIczzMYyvGsQa6USIiUiXMjU
9cykRGIxJTSmScgAa9in5Nu5ag3wyI1kd5KE/wCoXpTl7gOC1RjqngJS4j1dUyIjaVK3yzXbscQS
wUZXDHl7OOjb2o37kYzus2ogADsWkSE5nCEKn6ERbAsQ984siJz/ABN5nIJovBHl7Pu5n1LVp1z9
6VUw+QfGmI7UY8qJxskcFyI8R7Tko3ebe/zGcpE03BCMAIxGAHpDqk8/ZgKknYtV+7+G5QgGYZpE
7Ahy39KtSncmDruHx6RQEHYhf/qQN6+7xEi+kb0IxDRFABh6Y3b0hCI259ilC2/K8pE+L2rn9yFq
0KDE5knMqd29MDSH0vU9yjZ5MRhyz6vPjm2RRnOIuXpVlIij7loHHcyhGpRN+Wi1lbjn+kVpiBCA
7keR/pnZdv5Abk0eK7Lx3DiT0mzAty8T8WQ9o+6EIxDAUAHSxFdq0RJDASGTu7qN24ZQhE5Yk5oy
hPVAUkDjFESABymE4DxykFou1jtzWqBBfEKi1RJjIYEIW+ZyoJ/lQncuDTLBqoSgXBzC8qEPOuDZ
gO0oHmpERlXRDw9hRMYgCLBgEZRYAetAzuAUoowsgmcQxn2ryrHxeZl4p4iLqVy9Ez5q9w24kZnN
CEYTBjjIDElfhYwa9IUlsG1eTKMgM55yO0oyaRlKpdRmJaZDJFpM+xCWB2rVHDAtgtINc9yMTUxx
CMrkGkcxmibcHBzKIERGJxCMwJGZwfJPPHYgSA4TdFSAm1P2VVAfUuCBKpbDbyqgBZIgSFNyPxDR
VuSK8Uj3rM96wWCIIocUdNYHBHhqjCB1T1eE7EDhLMLFYpjILheXYuCHeUxmIjcnnIy7SqADoaRA
QhbqTtooxkCdQdo7FCcHjeuMWxLKViYNyd46RqFAEZ2pASZiwaiENAIapbFSsgiOgmiMakDNQqwj
E6Qrt0kHWaNsTSFGOKmPcw7lGAzKYYCgQ9SkI4zLlOqK3EYk1QmOiXaETvUb0csRtCMpEiQHhZXJ
x8Ege5Q7Og7gEBsARnKjInIUHSbcsY/UpSP/AKCJyFB0m0cDUIyyxXlxNI49vSbRwNQtIzWkZEK5
3dFn/UC42kYRMojetyvRt+F3btFVH9EfWu4dGrIIDJuhj7VCvLOD/QhEYBQue8GPd0ASoZgAdycm
mPrVoCtu9NpA7HUbdqbCNBnRTvCp2bTgjKZ1SZtwXmXpNAfSpaYsRWO8Idrq7bdxGXmQOzVVThKk
RxxO44oiDsPa2p5hhiCgbciIipC08wKH2xkiZaZ2pBi7K5C1wxE5eWMmJotMhWTiJOUgnHDdAodq
lbmGkNqY9GmAcoTvcEVb5a1HSZljMj6UY1ndZ4y7EfNcRAzpVgtJlToMZBwQxCNi54oeE+9FG3hc
jxW5bJBGF2nMWToux7M+mVn2J8dv/wCodNqV2RjK07NsKj5UXiRpm/1oc1yPBG20pw2TB+pCUqXI
0mPt9MYSwOG4okfexxjlcj+VOKg4bYlaSW5qAeJ94Ig0uRpKOz0Zt3AJRkGIK1QjKVqeDVHYvxNo
auVu/e2/dJzZNP4nJXcCKmBKEH8zl51idoOxCcC4PzCYyAMTiCnscVmWMDl2Lz+XkI3RWmfavLuj
RdGRz7OkSsnRMbM15VwabgoRkUb1g6LmJGRXk83HRPJ8D2IyifN5eZcjDStdsuMxs6GnEHenmTOy
MTiyI8UfpCaT3eX24mKE7Z1ROxGMhjR80BIa7RzGITg6oSoQnsFtII0EUI2FR0EW5xHgPhIfBC1z
DRuYxr9SYfEtYu1R6kziR2ZhExJu2RhHMBPAnsOPyFvmLd0nr19JX5p5r+Uv/sFX/wDUn+0fkDej
c0C4rgfYC5Qly0RU0ep7ULvOcxESHsDBPbjqmf3mlwER5pt2drMVplcnMYkE4rTAMBkPTvOYiN5Z
TtC6JzIYCNV5AAxfUsUwKeRcqnWx6plHEdPCHG1CEISubQFC7K0PPA7QOs5LAZlNK6ApxsiVuAHD
LSSZHcgeevmEcrcQx71G5cJvTgGiZ4DsHWHmSaRpGIxPcpWuS5eRmPakKOoz5+ekipEZE/3LUIRi
cTOVZFCPLvMn2gCQFr5q8LNsGkQGftdSHJ2vOvHGTZlPzMhbgf3cU1uAf3jU/ITK5IRAqXLIfg4R
IMtI1O7e8pT5i8bnmVnBgAhGIaIDAekMrsxACtShHlo+Xak4lckC8QM+9SuWyOZv2iQDIh3liwQl
zT2uXMtUpkMWyEU1mPEQAZyrItv9ObViBv8AMf8ATjk+1R5rn5C7djWNseCJ7Nyt8pYtA0eRnwxI
IyKjb5awRdlwykTwxOFNqjd/qrSILuC7x3ryeWhqIwhbH2r4svJt+5DxEbymtxbacz2lG5ckIwiH
Mjgjy/Jva5KJa5e97sQtWYsBicydp6fwtgsf3twewNnahbgGjHDqjzICTYOEYxAi2AFF5lyXw5fe
R2sjG1Hy526AimCM9BNvAkiiMrXDLMZFZxIWi6Gl9C1QVU/iam4LTrIt+1EFnWm3EW5x8UE4iCUW
lwSrJ8ytIbuR5e2QSKSmcIqhMYkUPtTO7cvOuAaz4Qct5XmgHy7YMbR2l+KSMYxMjKkBtkpT5hjd
nW7PYPdHYvMu1sviTUh18K25G1PGA1Cqdx61pnMgZsjasjSB7WalJ9eDEqEZ2xquDxBS4WIwZRiQ
xJaiJMizpz4sHVSuCJKYAR3rime6irXtTgDpYkLxBeIJyWfctRB07WopSjEmMcSMERbBmRkKoxDu
KEDFM7HMFUkOhiHBT2/UtUaa8158JsYtEjIhOZMuKRPUcllWT9ilehB7ccZK4NZjoDkYIeaB5QJM
juC/FW7ZNoyBGTgIXeZeJwjAZBXD4iQBF8go3SBwmvYpCxLXckGAZGzC45NAQKjvQnK9IOXLlCJD
kBiTmpRPhjHLJ0ZWpGQ1VMlKIAAEcVO2faCnM+w470605goS7ugbBVSOQoE2YomUNpLp9pKlK6Hh
b9nepWpW4hxQgK5CMQDAkHuUTu6JdwU7R7lpGMuoNPf2I6e/s6kdOLqRHiARJxOPSJDJRuDFXYjw
xMWVzu6LR/PCM7UZTMzwNVapcvoceLFkZSLmVSVE/m/agdsQegyOIH1qNwYh3TJ9mC87bFGR2qQz
gXUYjElRjH2MfUtIxZnU7d0km2XgNlVZuYmUA/bmrNvW0nJMdyeMTKYQekRgFUnUNi1ai2xQlPCX
wz34LdbLH9GSoMFu2dDykAjCyCxxrRAzLjYEJtpuZtt2qM4iuEtxWofeRGO1Sek3wQo0dpQjZt+Z
PORwCI5uPFEgRjlUsozNsO3CWqECKAYdUTgWuwrE/YhMUOEhsKj/AFKwMOG/EZx2qNyBeMw4PRC9
nakD3Gh6hhLCQYowAeURX86GKHNWZkxuMRDIh6hRuQwOWw7PTAxLXIVhJG7CNQW5i1v94KN2yWuC
sJfYV5tvg5m3ScdrYrZIUlHYfRm3PA4HMKULsjOzOjM4ITSOvkb9A/sk7UYXHuf0+4eCQqbRP2J4
nVE1YeGcdo3rXbLjMZj5iN/lqT9qORRjP4d+HcQdy8nmQBlG5kelpDiykMQhbvDXbPhuDLtQ1cQy
IVficv6yAjzHIS0y9q3gCexeXdHl3hjE59nQYyqDiF5nLEmWBia0RhcYTFCDQH1rzuV7ZW9qbwzG
MUxqje5WksTbehTEG3cGMJfYtFwPvzTwhG5AYgUkN4QjMCVvBnqEbvLy0zBctt3rTzTDZMCi12Ja
J5SjgULd8HUcJCoZAioOfy2vzXgq/MbdTmf5W/8AsFX/APUn+0flLzkxyGaELdkyMsKoz5qMRsBN
B3IzjEXpipJKe21sYAANRRF8iWnYPtQEIiIGz0zyIA2lHzLwJHsxqU3LWjL86dE0Zi3E5RT3Lkpn
Nyeg9FOmuHRQKirQKlSq9Uz5azKbCrDFa+cErOyEgxTXZmQ92NAtHL2xAbRj1HOG1HXegGxqpjlz
GMY+GUgSZHcEDzFwWYnCMRU9qhcvnzDb8IYCIVA3VclhtKMZTMpjCMQST2KUbUbnLueCjU2klC5z
Nyd+6K6iVpJEAMQKlEctbnGPszlGh9aF3neYOs4xDAdgQtf0+y4940C1c5cMn9gYIRtQEQNnyE3b
0hGIzKPLclYl50g4nKkYg5qF3nWlKFSASdR/OdcERHsDekMpERAxJovL/p8Bek+nWfCN68znp/ib
8eMxGAdfh+XhS7SAiawAxJ0qN/nWndFRbHhHbtTAMBgPTG5cOmIxJWjl4+VYj47ksZA+6yu37ERK
7ajqM51Mq1cqP4GwBQGU7n2BG7zPKyvRlSEoBhFthUIX7IhGfhm7jsohDnrk73K4aSS0e5Rucrp8
oimn7UZzIjGIck4AIjlrVy+1HiKFA82Dy/JxwtDGXao2rMRCEaADp/D2PvpYyygNpQhDtlI4yO0+
gnAFjKJAPaoyuNG2C06u4d6KVq3APpoGo6uGFrTaiaaixfcp279k2zGg1Yoyt8UfpRhVsGKD8WwK
MDgKkBMULlstIZpxS4PFFAzDzkWjEZoRDWvMpGI8RdefzstUzWNrfv2o83zAofu4bAhYgwneLDdH
2pIEnRbthhvZHnLkNJwsxOUTinIYHGJQjhEBgvLkGhF3IzRkA1ELmoxAwATiTgoiMcGZs0+mrYJ5
ARIpHNAT8WdVwCo2JhQLiJKoqlkzud1VoAOrYaKIuQ06w8TuUOYhLUJ0ERipXhr8wUjA0cqcb9qW
tuBicUfxkTG1EEkkI3LF6RhnEhk9ssI72UZ83AmOMGNWQ5W3AytQPFKVC+xXeXiTbjPxE19SuTth
zcGmMhkvMjaM7hBaO85oXb0JRJk5GRXmObbnws1E3Lg6I49qjG9UyDmmCDFn2pgHkKxUYs0jWXb0
YsyxdtlV5dkapHIlfhpgxkCAWD1Ktxty16o6pPtVm1btvdAe4Y4etHk5PEEkzlL6AEIiTwuEamoW
GSkBbjFhiomBGgBuxSBBIBoQvNjw5DsRnzEvh4FGNktaOBUbcxpkKEgUQMbgGxGcrU5Ae0BQq5Mc
M7kX7AvLNTiT2qZljRA7aIgYyLnonPICnaiTiOiUjjKg6DA4SwW41QiMHI9Sj3omQe3OkkZxlqk3
DHer5NYkkv2qLbOiXaFcuyzNE4xFR1DclifqUonsUo+rpNyQoMEYI7JVHSIjEoW82V6MsjH61PsH
Rb/1I/arkyAZwiDHc5TGoOKvQt+ATLdjqH6P2q3IYgB1XAKNoHCpU7JzDhEHLobNugwOEgyr7Dk9
ylqwIb1ISBcEYK9IWhEgvrzNVEclPy7ZDucHWrniZSnhPEHsXCHQ0xlErVOgGDpgGCkBiKjtCjc9
m5FpL4kgJQ4Zdya2NRXi0jYFiS6qqpyXBFe1AP8ADuYduSeRZQ5iEWgPG9HU5GegRA0gb1K3OQmZ
OSUJTi5GBW4Ydfz7YeJpdj/9QTHihMesFXOS5l5cv4rUhUxEvsQnA6oSDghXAfdKgTnEH6OoLkKX
bdY79yAjW3OsCf3dzYjOXgkWuxHsy296BGBw9N51rxjxD3gvMt1sS8Q90ocxY8ftN7QQ5nlzxCsx
t2ha4HtGz0ZtzA2glT5XmuKxKm7uX4a80+XuD4csmOS8ueqfKTLwmMbUk9s8bO2VyO0LVHH2o5g/
MWscF0eGYX4fnQBI0EsihGQ18ucJD2UJRLxOBHQxDjYUZ2RqjnBUocDA4rXZOieYyK0X4m1fjhMF
qoWubrAlo3cu8p4kEHMdAL6ZjAryb0X04TBxHehdtnRcAeMhQHtXk83HRcFBP2ZdAk5hcj4ZjFCz
zIP5twYHtT4goSYRmMxmNhQlGB0nwmBo+9eRzECJ5lnBWvl5aoGsoEP6lokwmMYSoQjK2dVsezsC
o4JyP9r+a/lL/wCwVf8A9Sf7R+TPMsFotxlcnkAETLVafACgCE70pXrnbRR8i0AJDHNlruzIA9l3
XDAOcSVu9I5IHaVx3oD/ABBFp+bIZRRjy8BbG01KPm3pF8nYdbU1Uyqe5YKip0VKp14mVQDUIRgB
E7AtY8UKjpclhtRJuRYY1Up8vGOmJYajWR3AKMpmFi3j5dXl2qN7mdMzGkYRDR79qGmEQ2DAdbjk
I9pZT/DGB0YykaE7gEDzN0WbWUICp7XXmXD5k4htUgAAFpgDclgNIoT2qp/D2zsxZGVyRuTxIJcn
uQHKWvLt4GcqHuQlzMjemK1wWmIEQMh8he7MRG9C1/T4yuQf4l6MXA7Ebn9RuTuxBe3akcO1k1qI
jtOfpZQsx8+7FnhE4PReZ/VLgjAnULUCXYZKJ5aPkcqaCfhkexEGJhCPiukuZLTaiAT4pZn0/lxB
uXTQQjWuToXedL//AGXeG5AyaEAwAwrkFL+n8laNwyLXIyDO2zcje/qUdU5eGy/DEb0LduIjCIYR
FAFKzfgJwlkU9Z8sfDPZukhKJMuXJ47eRG0Kc+UvDzL5jGFv231DUG7Fb5DleTNrmoHSbjcLbS+a
Bm2pqtg/SLVsar8/DHZvKLnVclWczmfRsQ6MLfDoaRbNwUOYMzbtRoTiZFTFyYNsYSOKN2MRbgx0
nauIdkhghC9UYOtUCH3JiKITtliM1aIGmcQw2OhzPOScxFC7uvPvBoDwRTnwjHsUuauyYzpAe7Af
lXnY8taLWwfbOckNXiAXwYjVtOC+LIEEovIA5heVFzRzIYUQBOHRUhMKlYsnJdO4TvgpXoRe3HGS
mBcMJRD6WxQjzFkm2cyS6F0PC0ZPp3L8RKQ8ylOxPeIaI0xj2KInbEpDbkiIxDnLYgAwmR3srkSQ
bjcIzVmum5cGuZ+pfEhiQZU3qN0TiICOD/YpEkQLksdilCTiAwm1Co27fgBcyVWBZgVGMiCXcBAT
iDsojKEBGYwI2rzNQkWrFlb8qMrQgGBahKgDLzQRXco3NNLnhapX4yEiTIsIGivz5iGmY4bZFXKu
SugzGlowIqZFR5m5bkJVMdjnBefcm94nUaUdarsfMuSPFI1RtWyARUxQsymHbiYuyEbDli+peSIC
MTQyGa0vvK0awCtOuqNu4HgUdEi29cUzpOACgCDJq1OxaDEaGwajK7OQeDmMQNgQuxDasAjE0i9F
EDF1E93RGGc6lSt5SqFp2lRtjCIr0CQyKNwVLOFAnNyodildujVGJYR3qUDbEZAPEjapQgAGLH1o
dB/SWgYnFaCjsNR0COWJQtxQORohdjlj0CIzQiMWZCS1DEV6fNPYEGwGKlMYy0hTfd0W/wBOKN2I
JEqADMbE/wCHNrUPGQU5xOJUP0T9aNqXuAjvUpS70ZnEqMssD3rcaoDJSsvl9KIzCB2K5Me1VAbV
oFcVesGUjAh2VoDGDxKt2pgSuawYjNmqtYGkbE8akI7FxEAb0RE6juwXlSpbkSw2OpQkTUpmVVUs
mthztTQBbctUixXmQJNsNqI9kqNu5alK5FtcjkNqhehc+Fpwyd1ctSI0NX7EYvh6FjgmP/48zQ+4
fyKXNwtxMrPwpW5+0HxCN63J7cZGRsHAA5BSnDVGZGmUSMCaYqMdgA6pysXqS/NlkU10agOG5slA
4S7l+Hul4SrYntGz0+sB7M6XI7N6Ax5edYy910L1utmf3kch+cvxFisDWcBmNoQnAuD6OQ0jzG4Z
L/8Ah/NDhJ4JnGJUuT5oPlCRwnFaZkz5aR+HPO2Vqg0b4q3s3IokUlGko5g/MWi4HGRzC8u/8Tlz
QTzHavO5U67JrKGPqWu2e0Zjp8yB03BgQhDmBonkcimmOw5ry7kfOsSodoQucsfN5f2rb8QWqBqP
FE4jo03IiQ2FGdsyuWQX0YtHNNiN4YhAXPi8v73tRQnAiUTmFpkHB2oygTdse5iYjctVsuMwcQmI
cbFrtkxOYBRt3pT0+xTUCtUJabreIUK08zxQdhMDLetVqQEspRQjeBIOEhX1oSiXBwPzq/R2fKKe
kr8i5n+Vv/sFX/8AUn+0fkbyIHatOsSkcACoxtXICU6sMhvXnc3zQMtgK8y1bBnPCRx7UPOuCMfd
iUIwgbssWkaIW70PLycITEwInBynBcdbyoTjK4fZBqmkKp5yERtJZF7nmSGUaojl7QjsMqriumI2
Rovvpetcd6chvJVST1nKaIJOwVT6NEdsqJrl0mW7BQsWQZTNZSKaIVVxHtXCHTdZh1IgYEoGRFQp
SldhpxZxihcsQgYktEE8R7lCctEYivlOa9qN2/GJllGI4QtUbcQdrdfRWdz3I1KNvlOWIILSuTNA
om/FgKycuZFA6IwAwpVG1ytmUrg9qVIhaubvSjbatsSotFkCUxhGPEUQALFkikvaWqY825nKVU2A
+Qmd2QhEZkqUf6fbEyC0TJ+I7l+I52+TdmGMI4AbELdqIjEZD0pNyTyHsRrKu5SNz/teXk2ku0mf
7UOW/p1vzLn7y8BqbeShGc5c1zsqaYeCG5C/zsvMkPDbHgj3JgGAwA9MZzIjEYko2+QBjbqJXyGr
uWrxXTWdyWJO1Shae7fbhAHD3lRjy8rkrcjxGfhtZON6AhxTYCVyXiLdSVu7EShKhBUuZ5N58tjO
GcP7lb5qEBPyi+mVRsXmWQIXf3lvCQPSABruzpbgMSUblw679zxy+wbvSvdgJHB9yM7hjatxoB9g
X47np+Xy0W8uzH2oiryO9a+VjolbeJiMDpQlO2fLnhqFEZ2K7YJg42goi6GK1AcJwW9W/Nkbp9zK
IQNmWHs4EKPLRPjrcOyI/KvItHTZhS5IZ/mhCGkCMQwAwZan0nNUmuIpjDU/tdiaERHayqarhpvV
S64iy0Ww8jtorYuBvMBIEK0CtXeTEp3Jl5vkEeVvWxGZLyODAVV2xefQ7mD4uEJ27YGTKNy6BFiA
D2o3DciTlEFyStURI3APA2aJEHmVI6gXLgFG95xjM49iMzOU5D2ivL8Vy9SL4srFq6NdwkSlujHJ
axFhg2CaFsBByANy4eIjJGNyOllwmuxa4xMpDYh8SQmMAclCUpiUIFzEBnULYjKMplpEigWmTSEq
BSu2+EgOQcCytzmdAA4YgZLTfGuMKQjgO1Hy4gA5YqBkAJGgVycAJ3AHjHaUTon+IAbQ1NXarkTp
mJkyjq9l9inzUr8hdniYkjuWvXLWcZPVAGesD3k16DHaKhcF3iIwNGQkwMsXXEAUwBIzTwA3qgUS
BxPki9sxJDaipwfSRLHao2xlQKAPidGZ9kOnzBQimGEaBRl615hwARltPQI7cUbW5mUYbAodilC5
93Kr7CpG1PXcIOkb1My9/Hv6X/OUidqBWsYxr0GZ8Rr3Iy29BgdjKUDkjdl3Jsh0GBRGRqEIjNCI
2N0SBxiR9am2wdEP9SKlK4BLyo6gDtdaJRBiaMrtqPhjIgdih2H61a04aA/YoxHtGvTC5nGhUrhw
Qub3KfKVfX0R2kMVGW9CXa6lNo6mxzU52Lnl2xIgyGZXn3ZeYCW17E8iKIiJc7Aj5Y0hPMkusKKE
hmWXmGhDEdoXAHKaAZ9i1XKDemjE3JZqMoWxpkWkBiECI6iR7W1XuUkAOVvPMAfUFK3dtiXMcqTC
bis7RwVyMKW5nXDcCnie9E+iMZB4mhCvcp/U+W8wSkZWbjO8TgO5QEbMBKcPrCn/AE7mbeq3M0G3
YXUATqIiA/d1ZW5+GSlYb49gERJ9qOCPJXCY3rVbROIbLuRt3KX7VJjbv9MQQ4OIRsXK8vPwk5bl
+Gu8VqdIS7cl5M/uj4JH6l5ttzbPjh9oQnGoPo5TjS5GuFSy/A8yTG7D7q4+exS5XmY/EjSUT7Q2
hHWZHlSfhXAa2zsQlAiPMxi74C4EQRpuR8UT8xGMg4OIKM7HHa9q3s7EL3KS0Tj4oflC0SBjdj4o
np0zDjI5hNN52RgWqFqiXBRu2TpmcR7JWqH/AG/MnHISXlcz8O8KVoJbx0ifLS8u5GoHsleTzQEL
jVcNEo3uUNMZW8j2JvDcHigcejzuXkbdwYgYHuXlXHhdGRDP0DViMDmpeRcIhLiLh+JG3zRAL6TE
hl5nJzFMYYxIXk8wBC5slQHsRnY4of8ATejbkQHjIYxOPpn+aafMDegcel5n+Vv/ALBV/wD1J/tH
0rykB2riuxpkF8OJmVSMYx2IPbfay+DZA3kumM9L0YJ5TNy8cWOCeLg9qeciShqJLUCx6HCD3JMM
KoAXNURlJaeZjpltCo5Cpc0nejKN2MmyBClbjLyoOwiKOF5pvTFzHhKMYXCaM5qU925KT7T6CiYA
ncFwWpNtIYK3bvkQNws+LKxzU4HmZGUdWs8JfcF5lmzCAuV4R0G5ekIRGZQjYgTp/eHP01Qa4KmV
VGMpCccBGRIA9SlelbjcvSq+Q7E8LcYns67yIAGZRjCXm3B7MVDybs7erxtHTGI7SjKVyc5yDSkS
zry4ByK6Ysgbdvy7ebliUZ81LWTXiJYdiFnlI+ZI46A4HaVq5m55Nt6Qhie1GVuA1nGRx+RGd+4I
DeUbHJWpGTOJkbVCf9QnrkKmIJr2oRtQEYjBh6UzuyEQNq08nHyrRxuyx7lrn/3HNS/xSdefz90c
tZjWFmJ4pdq8rlbf4PkXrM+OYQjZgAc5msj3+n0yOq6fBajWRKFzniYWsYWBT9ZDzJRtxyG0qVvk
5+TCokJBn2DV2Ao2Ll1+WoZXGfwnwxPco2bMWjH1k9YxkHBoQVLnP6dHgNblkZb4qPMWZGFyGX2F
aS1vmY+O39sVqlU4RiMSdiN+9W9P1RGwem13KyPggMZHco85z7EitqxlB9u9TtwLSI4VrvMLQqYu
+oupQsRBMW4dw2I35QAGIi/EyNu3DVcFDPKPaUT4o+8Fpx3KOtgZBwE4oVqhIxkNilC4Wu3iAbpy
CEYVjtxJ39QvVaY+oLYE5PeVptRMzuwUeXlM2y+lojPtRFoCVuMhEas159yQjNhwjAMrYvR8ycQz
nJSs2mDFzHtQtGY1NVqo3rcXgwBBz3qMoS8sRqBHagb12UwMjgtJDyVIDe6bywAsGVZMpGRYRzKP
M8xWIDQfJXL9W8NvsCkCWYOGQN6RMcEJ25ljUMnhMxlm2a4ogAZoTBYLy5gmO5EgmLGgzZOQ7UJT
RPrULkbhOjCJdiEbMbYtiQaUs2UbV0HVbDAjNSuAAwOETkhPVpIwATzkCe1EDiIRlKyDI4krVaj5
Z+hECGoZEJ5RbsTNXoYY7lw3C2w1QjfgCNoWky0y30Wq1pIOLVWAdSnJjICiuGY4REkoao6tcmQl
gQgZbFvmfoUrZ7VOZyDBaj2oEYpji3SbsuwLVtTbggNwXnXvAaRijetAjRUx3KMLYYGQ6X/OKPb0
GByo25GHsivchCPTuNFGUc6FCI7OkFa44ivcjdl2BEDAdFwfo/Wp9nRH9OKlckDKEqNkQhK3y8hK
QxlgFKci8pFye1W+w/Wo3NsW9RUiPFGo7umdo9oQgPap0QnmKHuUQpwGMGKAGaMQhGGowlHJ2wVz
lpkRuQlq0nFpK9EyHm3AI245u+K45EjoquIphVNCLBGNza8ZFStyBlOBFBsKjKFkNIsWxChKQ1yk
KvtRlaAjakXPYjpNDknkVG7HG0dXaM1a/qNvwho3WzhLNC272roe3L7PS2DIsLcnkcyGUuTmSQDw
k5KFyYa9b8Exihy8iTHCMjlLZ1het+O3XtGxR/qXKODGtwDdtCh/UOW+8iOKO0DGJQuRzxGw7PTG
EsCjy1+kwOCXZmEeV5ilyNYy97ejYu+IYE+0Ebka2ieKOxCUS4Po/P5cNMVIjQ0zQtGennbPgn74
UrV0cQpcgdu1eI+V+7nnF8kJROjmoChwEwEYSGi7HxRPzH51g6LoxAoJdq0S+DzUcDg68jmOG4PD
LKQ6SDUHELzOWk3vWzgexaWMbg8UCtMw+w5hETBuWh4ZDxRQjcPm2RhczA3oTgdUTgR0aLg7DmEL
dwedYJ4ZjGPahesy0XBhOO3ehY5oaZ5TyPRxUllIYhaOZ4rfs3B9qEolwcCOjVIcRDOz0Wm1CMhC
okaEjYoG6IxnIPFiCVpufFsDwyA4gN612y08pxoQhDmY6oktG4K+tCUS4Ofzm3pG+a+a/lb/AOwV
f/1J/tH0FSy47kR3qkjI7k1uA/xFEQkI/op5TJ7SsWVS/Up6PFYp4hztNVrmXKx6X6ohZgZyOQQl
zEhaByxKGoG6d+C+FZjHeypRW7mw4rXGsoQEh2xWmR4oBwVo5dpzzJyRlfm+yOQTRoPRunADBBsV
F2aIYBOhKJwUdRqzItga9V7kxEb8VH8JaFwzLRJP2IXOZkN0TUA9iMpadZLmcmTctDzy7Bjmn5gx
txOEYkuEJXNIIxlM1KMeShrIHDIjhPYvM5y85P7uI4QtNqAiM2+RPemI7s0If0+2TA43EZ/1CfnE
l9Jq3etNqIiBs9KZSLAYko2ORtG5cP7w+APmhzH9QmJ3CwEX4X70bXK2xY5e2Wlcl7W6LI8vyNr8
Vzkq3J4xh3o81/UD51+RcR9mO4IABgMB6YykRGIxJoFKz/TmlOONwh4u7fQvOvz8y6HJuzahOLKd
jkjCV4MImR4ZE+72Mpc3/U7gFqMpHTIPqPhBG7YoC3D8Pyds8IGM9pQhACMRQAegqpc1yIEbwrK3
gJ/3oXIE2r9o9hG5R/qF1tYeMbQqINRzvPpvKgDc5mY+HajUk5OvxPPtc5k+EGogNg6XWqZYKcpT
lY5WJ0i2CxlFzVG7elKGsPGEcs3KlacEDKVHCldsxfTgMiUZ3QRIquK4s08Y6YjBGpn+acEwOmWc
T0b9ixYIkrgrtJoAjKZ86XsxwirNi3d8nzZaQI0ERmfUhKEGuCvmZvtTsOCoO8VQk0jcI8LZ9qnW
Okkmvsujc8x5SxMSozmXJPEEQAwZqoyMnGwJzQLTZaU8zkFKMg0hVwgYlmNTuWqPEFqnTSHJQt2+
Hl41J2o6ak8EANpUbcSRGIw3oxk8JbWdeXK/Bji5YoRgY3AKDSXRoQRi6oHHqTFydjp4PEFa9OoZ
7UYmJAOSDHQNpR0SEthRMzq2ISmWbEFHQPWtRZpYOU7htyMgOwLCu1SEnGkVKGYR1LV4DuXwTr3F
eEBGM4rBlQVRsl4yJ4ZZKUpzE2wGaEuVtSMziGohZu2fKhOkjtCt3jTTKgVMAHUWxwUYD2Ag+dEI
DMueiAyxKMFKJ20QiMSo2xjn0N2LuQsTkIThg+YVyImJXJhgAXVuX531dBT9vSDtRnm2KJ6gJRBw
HRXoMCmFHw6bn+H61PsHQP0o/WvjREoWxqYry5QjoIZmV2zHwwNOzFW+9QAxkPtRgcJKUDlh2dGo
YRxQBwanRO1sqFKZwCIJpNwt4KO0GvqUnIEmo6PMQvG3btwiJGBYmVVLmrF2V2VsPKE603J5Gipi
mtxVXb1BcTzPuxUbluxwks+au2tRlcEfMt+qoXL81ai1u58K9LsaqLVByTyywHUY4FXeUuVAp/hl
grv9OvH43LH4cvzcYlNKlyFJjf6UX4UMKTbY4qvzo4/lQnhbuUmdkspIwn44U7Rt6xgf/wAa+4bK
Mm+1aT/+JfwOUZI3x9xM/EAyO1CQqDUH0wlGlyFYle7dhgcwV5c+HmbWB2oxmGnGkolahWzI1Huo
SjUHA+j/ABPLOMy3skIX7TDm7Y+JDKYRBALhpxORUSSTZNYTHs7ioyjLRzUA4I9pG1dGi9GhG3eP
mOtJjwzGIXk86HifBeCEL512j4Lo+1PEuDgel48FwYTCFnmA0sp5Fdq1W+GRxjlJPZcEFpW5F4kb
kYAgXBjH8nQxqNhRu8oTEGsreRK8m8NN0eKEgxB3Kr3eX9ZitcC4WmQcHIoz5c6rWMrZy7E8KHOJ
xHRWqMoAAnuY7mRt3LUpQHDqBeveje5aRhN3Mcn2FeVzMdMt/hK18ueE+w9O5GLGMgWIO35kf0JH
Vb5jp8k5r+Vv/sFX/wDUn+0eo9y4BuREXmRsTW7YG8qkhHsR1XJHvWPrqqn5ZUsNpUh5xAiKsFat
ynLROTElaoWdU6SNyReS08vbEN+fUeRAG9ThblrmBQBXrF65GOmMgxNcEI2pnzDSmxEkvI1J9DVO
aBAksCqh4jJVYDYmjRP00UYQLITmfiM/STLVNvcDprdqULOZwke8qV2/OTy9nU4A7UBDS+AjEh0Y
WrURDOZNELl8Cnsxdid7puCByjFnQ/CQEYksZSxAWrmpm/LEg4OhGAEQMAPkUp3LkQI4hw6j+BhA
GZZpOTEe8VK5zV+V25c+8AoOwIQtREIjAD03lw+LdOEYrzeduG1aGNqJYEb0eW/p9usKSIDADtUu
b/qUzMU8u3Iux3Dao24RlyfIx8WUpjctFiDP4pGsj2n05nckH9mGZKE+ZAs8vEnTbFTPtUbVsRBI
OiDgYVqhbhZ4CBqiS0IxOb5krzr8BK7NhCA4mb3X3r8Vzwa1ja5cmg2OEIxAERQAeiJv3Y22DkSI
BbsXncmH5iZ8uzCA4psWMpK7Gds2eXjHTdEvakMCPS/h+VHm81LIVEN8keYvnzObuVnM5bo9Ty7Y
8y7lEZdpXm8wfMuYgezHsCFyduJmKuRsQ2Ngp8tycBKX764PZapEVGzyloyiwDD6yjZ5i20xgjci
XgM8057AgCRrGITsgQSCMCh5kyYjJAJsZHABeZzDiAroCjZt0tkiEYjAlRhdDahwtWgVq7y/BeMh
ouSxDbl5cYRgSGM81pMiwyQBqy0mRiMe9CIkDIrjAkDghGI0vmUxl6gmnOTbEfKm52FOC0hRNOAk
mlARgMdyjGDi1I8Iznv7FpZpZqMH4LI1H9IriDsrmlxJqEBSmxkZU71Hm9Zt2f2uhmCbSFgsKJzl
tTFpLXENVi1ETYPE1FCV8mU5B22KIg4iBVlG3MGWnDJnTAkHb0YqpcnILgjp3lcUydy3o6jnRVkE
RcYsuG4uGQknEhqbBGEsQWUGbzJEmZ3ouHkZDSocroDYko6KFkDLKpRltKiBtUZ5YdBuZmgQPrQu
bcUZnCKJ2YdHeOgX+Yc66iO5C/ZkYwBacVatwwB6CdxR3A9O4VRg9cwiOkR9ajA50C1jDPqGZzQk
Mum5/h+tT7B0H9KP1ozuVtmIBG5CdoSlIigIZSuzrKZcq2e37FbP5v2oHYoXRmGPQZnGVVG6Mix6
Ig4GikMDLBRlsIKcYGvrUhGjtXuVmdsGQNC3aja5uQtfiIvAyLB4lXdNyNy7OJjCES7khskBXSh5
hc+6KlebCwSHxIqrdy8TIyDmOABR8i0RbkQCWo1HRADjYrV8+EcExul/euc/pxLCXxbPeozzZpdo
oetC9keCfYcCrX9Rtj7s6LwGcCo83a4okDWBnE59yBFQaj0hBqDQhEDCFQPetk/YmxhMKn3/AC9C
PfghcjgeqbcsDgdhV3kuYpdhQnP82QU+Q5ppXrHCX9uGUl+FuF7Mq2ZH9n0/4m0Hbxx2hRv2JNcF
Yy+wrzYhr9vxw2p2oaSCbxWZmn5pTioPoiDUGhCHMcqCCC4Ir3EL8Xy44hTmLOY3riAnbmKgqN2B
kbTtA46dyiX8vmI1BzXkXaXY03S3/MZjMOCjje5c4wxIXm2D5vLnxQzitcC46dMwJBOHuWtmYQlE
ujGQxzzQuW/EKiQp61G1zLCUhwyGHenBcHAjoJB0TPtBeXzQBi7a2b1rz+Vk/vQxB7Fo8NwYwOPR
5lo+XdGYz7V5XMDRPKWUuniiCtdm4RVzV3jsZaLwYgsNQRlaIuWTVjiOxHSQLnqlEoC484mjipdP
H5kb5yf5DzX8rf8A2Cr/APqT/aPRphxzR49A2BEykZHa62LH5c8qBU6Llx6tQK3IsDGTuEK6joL9
yHR8W7GKI5eQB9nUMVrnfqBQCNCdynK9NiXLMxPargi4GogrVi/R29SnQ2aL1lkEeF9mxDUE8i5T
RHQ56uueEaurl2XBy8AwJLVXlWJxlIeKR8MVIG/osPQgMSowvXjMRLgTIAdG3y8RcIGPs/QhK+Y2
Yf8ATDl+1GWkGRxkQtMOOWGmFUQwsWTgX4kJzHm3R7cqphh8i8tjO6ziA2JuWt+VAFpmRxbIMvxH
MRiZswgKxG+q4QB2D03EXkz6RiniBY5Y04vEUdDTu+uRKPmyMLDOwDElHlP6dbN/mCWMgHAO8qPM
8/dN+8KiB8ETuHyD8P8A0+35940M34IdpX4vnpRnfzkcHOx1+F5S0aeImmos7BlG7z2oBzI2iXck
0HYvw3LR1cxINbtQD1ydDmv6i9zmDWMZYQHZ6Tz+T+NcMPiWxXSI5oc9elqv3YAaCG0PU9/pfwPI
ESvyOmdzEWxmVIiRuXbhe5cliemNrlA96dZ3AHFuO/tQsczDyx7XMRqw7ELnLSE4TrrxJ7ekf07+
nHVfnS5cGEI9qjCzL4jfElL2iR/epV13JEmUu3IbkL0bmi2fFKRpEBXLXLyfk7ctIutSTYlCMMRh
LanYjeMFpuh961wLqLYLy7Q1TOJ2LWTrunE7FK5LsA2yNAFy+mQ12HlOe2ci/wBCN+5c826cDsG5
HZZi3+KX93TtCL47lqka71pjEb5FBi7YlapGvujFDUS5D4OjGxBifakq1TkUGa86+8bAqBnNefIM
IhrcdgClN2Ea96EplpzqTnVM77yoWIUJOqTbEIiQMJtF9hKt2YYQDKgoqll4gmMu9Y96oeHJAZlG
xoeAqW2oBiIDELTH4YwBUIxk4jiekgcR2BVOmOwdDykAqV7aKMjFoyzClC3QRq6EyJlswKMh5QJm
zyc0VbcaDajLycMNJVNUTvUbl4VxJCMuSgZwOIPhdC3zsdGk0hkvOg7yGBT9yMhnTolcOQYJ8wtO
1Qt7BXoIzATe1LoA3oD85FQtmQjO2GIJUrQkJzm3CDkrVyIoxPRI7iifzT0mZxxWo4SQkOk3JURk
MI4LaWY9qI6AFojiaBGEsQiOi53fWp9g6JDfE/SrXLTi8ZOZHcAhajZiIgNvUrUS8G1R7CodpVvs
I+nolH2ohwow2mqEI7FO1tFEXxCiBiSFAZdEJ7m9SlCQqAPqQ1ER2OrNqNzSLOo3D+aWWvlyZ3Ih
2lV1C9fJBljbFGQly9s6SwdnGKwcHEIZAYDplbOEgy5T+o525eTf7DSquW/Zn8SHfSXWlA5j6Vou
B9QMJjeKK5/T71Z2Dwv7UDgjy8jTxWzu2d3pROAe5brHeMwhbf4V3itHYc4qPMWw84UkNsShcH/4
971RkVTqjmrY47dJAe1FQ5/lfv7Vae1DOJQnA0lUHOMlKxepetUP5w2+n1D7i4aj3SUL9k/EH/MF
+Itihpdt7N6fGMghA8VqWB91OPRGMg4KN+1hgxFJR2Er8RarYn97AexJMWlCVQUbkZHRqeEhvyKY
tC/DMYuvw98NdGEspD5k83lzplnD2ZIytE27uNyycJdio4kMYnEdQ3bJ0yxMcitMx5dweyfs6JSg
0ZmuFHWm5F7Zxjv3LVbNRjHMdBjIOCtVoa4DGL1ZCcQbd6NXwK8nmR+hcGBG9OKg4FaZh9m0LiBu
WMjnEISgXB6ScJYiQ2hSnL4kBKsQKscwyF21IRuSGIoQd60c1EAOwmMChcsybcDQrRcBhPB8iq/2
l5r+Vv8A7BV//Un+0VIgsWRnIuTVV+XiMQ5OAXmXjxHCK2DZ1Igg6HxUoYkSL9il5Bi7nxUAU7V/
mIgjCNsfagZgXNODhC3zduFvTUSAzXl8kPNl7xDRCHm3C3uigRkPaAl60xy6tVwqlE5KqqKnUr0u
aBaYoW4Ye0clCyWFjGW2S02xANkKkqIsRAjLFweEb0fPua9VSG+pDwwjHMsEYctE3Z5e6hLmrnlQ
/wCnA5L4cA+cjU/IjKRYDElaAZTmfCIxPF2I0uWZPwjwxEdpQlcBuXW4pklyhGIEYjAD02q5IRC0
cnExtYG6aepG/wA1LzbuJnP7FKzaiLdtvvMfUjf5m4BMhnOJbYEfIJ5bk8NZ8Ugo2rQwFZZyO0+n
Ny6aZAYncAi4ly3KZElpyRs8tESugOIjMnMlarkpxsRkJCE6RbcyeI1TLkzNcdiHK8h8TmJUJFRH
tR5i9LzeanWUzk+z0kuW5MvcFJXGeMXKYEymfFM4n0jnDapcp/TiBbjS9zGQfKKFq0KjxT9qRzJP
SbXLTB564RGEcdO2RUxzsweVu3DK7M+O4x8L7KI3LAM+Tmcc49qBJM+WkeO3s3hR5jl5iduQxGW4
rmxd5r8NyfKlo24lp3Gz+hRvcrAxF0OZT8Z7eiV25IQhEPIlRjYBtcmZMBXXcA8UmGQXkiBjykK1
oTXBW5cvZEYhwSELl21IW7mGoMCjPlx2x/ItEnBwIKjZtjAcc9gWiArnLMlagdLVJUbFk/DtGstp
29yEbZ4LeG85laozMTuKkX16i51L40DA7RVcF0PsNCnBcdXehCWWBzCLTMwPZRneGmORRIpy4NBg
ZIWRLVagf/QWm2GIFAo2JGg4p92AQyKehjmTkpXJUldkBD9EIXK3NApsdMYMAvLbSTgTguHidOaH
JDzGEtyaJaSiIyBkaUUQPHuQDkzOJRqZNi6YRp2LUAz5KuOxULRTmgQar5nBaWLHAjBGBBMTV4iq
cgajhrxUOWvSBmSRGIyAU5GI88zOt8WGCuRlKMQxBwR1HhlmEQ5IbFkIi5UlmKJ4Zj1qVy3wyIpE
YKFqIYRAHerUYjjiHKnCUQBawIQETSpKi+PRCGZqVKG1VwhUp+gxQGQ6AN6HaejzbkzbEqxbFCUr
plYngVC2MIxPRPsUz+b0BxQIW4gtmypEpphjgiGKAILZrTbDk0XhK0mJ0yWqIxxWCMzQn6kTkKBA
5YFC5Hv6Lnd9an2dEu5/WoTmOCIY7wcULguhtmaleZgaRG4KHafqVvv6GOElORw9nvRQ2GikBgaj
vWs4QqifcLp0YZguO9SkMWChK0DIiTUWqcSIThpkdj7UNRZ8FpAYde/yxwvx1w/TiuX5iX3nLnRd
2t4T15w9m4Nce0UKs/1K2K2jpvAZwKjctF5Bp25ITGeI2H0ptCkLp1W5e7cCadLkOG5HeFLlblYT
rBHlbp44h4S96PV7UbZPwp1huKJFOV5gudkZqN+wWvW6xPvDYhcFDhKOw+mMZBwV+HuF4E/Ckdmx
efaFfbhlIIShWzM/qlGJqCELU62z4Z7N3ozbngVI6NVqR0zGUgdiGk6uVu+E+4TkUYTAlGQWqEDO
JrCQOG4oa4m3dAcPiDtCHL3/AB+zPI/MjnhmMJDFCNwmExSN2OB3FeXcpc25Hs6gEqSGEhivL5nw
Ckbv5U4Lg4ELTMPsOYUr3LEkYnbvTXRomKVwPRVa7MjbuDMbEbHN0lkZBnRMCbvL+7iYha7cnCY1
BxCFzlyzYwyZGOFyOMTQpuky5eXl3H1A/YgOdtNEnyzPJ0LnLESg1YnNG3dAExl9oRMnuWtuYWqB
cbNnzS3yKnUbqP16fIK+i5n+Vv8A7BV//Un+0VG13pvkohbiZSOACEzB3yFSmuQMe0ehNz2slqmX
6rGR0RyRt3LZuQuNUHAqUozlCBPDAFgB3IylIkZLFYpkCKqJGIixCLJnZOZdw6HJ6MGTdGPX3Jym
hQLVMsmHDajWVPEdia5IWrQ8MQKsNq8xhK4aOye5OMdz1QHJWnctqnRt61c7cNwnGAwC02oCI3fI
viTjAnaULNggk43GJiPUiOZu6LZPCIhie1CcpG7ciGjKWQ3enMpFgMSV5PKWzcuEUl7IXn87N5AV
iDwhfhuStGdxqUaERtdedzR1UpAlwDtX4X+nwF6+ch4Y9pC/Ff1SQv3vZh7EEBEMBgB6fyOVib3M
GgAHCN5K/Ff1K+bt2pt25GkeyK/D8jHTCUSGLg7qhG5zs/OnIRGg1bTvWUYx7gFLk/6aRoJMbnMZ
ADFlphxXJVnM4k+jMpFgKklGxyUtNiP3t8Z7ooW7YYDE5k7T6Q3LkhGEamRQtcvqt/04Frl7A3Gx
EdyFqxAQgMh0yu3S0Yq5/UeaP4a3ICNmMRxEDCUla5S14LUREHM7yjbuREoSoQVLmeVibnKl9URj
D+5Tjy1wwtXaTjjTbF81/wDxOUuCUuCwC5Aj752koRiGADABG7ekIQGZQ5vm/hf02HFC1ncAwMu1
efKOgyDQh7kBgFXBTlKImIjVpO6qFu/MaMYxiGArgpC4QQDSW5RtWADcznko22aXtSOZRk4ERiUe
XsYGjheSCPOuB5nYNnQw6nw7kgNjprkRcG3AoeZEwJ7wnt3Il8nTrZsWPejNgBmUZyDWY1jA+1vX
lgaTLZsRjKj4URvSJMWqjemK3CSx2ZLhYDsXlxkAZ0NMsyhqlwwDR7AgJSLZKlRkTRAhnR9oZkZI
8R8vHUSmD3JjM4BPMsMWiETCJMhmQjcGW1aWqizVxKy3lNGg2p5HvK0xI7TRG3okdVIyGC0cw83P
CDRnVsmOqUduDp+G2D3IhzNvdqFb0Gs3P6ITW5ESzIVauaunJZ8lU44KUicc00JkPiQUIX3NsU15
oaYyNo4zaimY8UDQFSNs+MuXQEthVwDI/Uoxyeq3CgQ30U5DxTPSbkkJiqCG4rzoR4A9StNwMVCW
uMQIhwTghatyE56gaZLXkAzdBicDiiwxoVSK3rBMm6tE6wTZKsQy8LLRkqFlKA8UiG7lIywI6J93
1qFvAmplsCFuDiQDat6nZnjAs+5RP5yh39AIxT7Qy3oFQu9xKNzAlSgcJAhNsLItgyMJYkEepAzI
iCcSrkInU8XRiaHEHYU0qThSY39eN4Y2pAn9E0kua5KRAhzAFy2O3FkIy8VsmEu2PWheGNuQfsNC
pWph4TDHsKu/068ficsfhn3rZwT/ALu9Q7pf3+lMMJYxOwhDmgGlHg5iHZmhK2eOLShJRuwOm/bw
O/MLUKSFJx2EdXScqgqdi6GnGh2g5FS5K+fi2yRF8SF+Kth7Um86A/aQnEvGQcHd6bTLHI7Cjau0
uQp+kEblsPbl95D7QhakXhKtuX2IwkKFeTc8B8E/s9GbcqHGJGIIU7NwGViZ4onH9IKNmctVqdbV
z7CjC4HiVGQlMh2jIGjbFplS4MQaEFCzzBd/BP8AL8yGMw4KJg87eIbxR7ELd2RL+GR+3qGMg8Tk
VqtHVZzgat2J4GuYzHQ8OGYwXlczEygPa3ITtnVE4EdGmY7DsXlkC9YNdWcQvxHKSaWccivLuDy7
oxic+zo1gmFwYSCFvmhpJLRn7JVMOkwuREonEFPy8jMiT6ZGhh7qe5E25RNXpXtRE+O2A7jEDYUL
vJ3QJjEA45sQvLugxlVi1CNq3fI3+Za9VkfkVfkXM/yt/wDYK56xy9ozuWb12D5PGcgtV+OkxOkj
f8kiOdcw90KN21ZHl6cI7kBZtRh2CvQ122JPtCMrR8uW7BE2viDJkfMtSDblUEdRlGfvEfV1hEYl
QkMQURmUE4VehwjIypHABM9FUOd/RUrHrV6rlNGsk2JTzrLYhCAaI8RyAQtWxhjLaUxkJTyjGpRh
ajGzZ/6hqULl74sxgSmAYbB8hclhtK8yUhLZEGpKFqzag0g+pydI3qQ5mcZ6y8pAcXYHwQhCIEY0
FPkHlW3u3jhCNVr5q5KNk/uhRaLQBngIRrIp705W+WatsDSSdieR0PTaSiIPY5Tb7UlosxAOcsz6
fXemIRGZXwJnl+WqJTIYkbiULP8AToC7cJMfMkcSKkoX+bANvirPGpqQyFuzABsZZk9qlduyEYRD
klG1bj5PJWy05GhmSo2bEBGI9Z7fRm5ckIQjUkowtPZ/pwLSnhK7uG5RtWoiEIhgB6QzuyEYitTs
UvOBs8jE0izSn2qNu2NMIhgB0m5cLRjmhdvRMeWtl4QPtFCIDAUA6TGQeJoQVLnuQiTYd7loexvG
5C7Yk8D95aykPyoc35gAwMD4tXusjzH9RiY8vBvI5c0D5ykhajTlrNCBhKQwiNw6ahwcVO7yvLiV
6bxBNRF82VqXMvKNyRGiGOkZqPN2b0rPLyAMARxb1HlzYN0RiD5m16LzuZibHLyiCYg1JOESFGUq
3RxSfAPgFK5GWqRLl14SRtFVUVG1cQWwqiosOhwWK4bhbYaoC7bEhmRiuImBGRCF2Uxo9i2D9JRk
cBkjdu54DcmjQhCw5L8UzsC0MQwxRiK2zhIKcLfgjwavrZDTQZBRnPiEcQn002KmGJCAOxu10I26
NkgbdWLkJjR8k2D4kLREanxJ6GFSnkVpgNRRtwfzMg1FG5zLAZZHShKEagM5UJW2N3BhsXl0iSPF
mh5szJsycEbcSS2L4LmLwLRg1uBQi5MjR0CS4GK3ZJpF22rhq1FSTblZtEgQMnmNrLSBTBslf8sB
oVbevMmNNSKKRfhAopx2hTmfZoOgdqB9fQBtUbQ7SmbCidmC1SlVW42sIhu9Q1UL4q3FzpMSSMkw
6KdFfkjJjgcQvMsTMJAM42Ly+cBPuziMSp3ohoyYB9wZAQLmJdAGhBII6MMFOAxA6ZW8SKhRgEDs
UmwNR3qVw9iDVJcr4YJIILKHmR0vBpLFRvDwy4bn2HrmBwkCD3rkp8qH5iEZDcTaP5FGRGid+JM7
Z9m5bOmQ60oH2gyiTiKHtFFY/qkIaxE+Xdb3ZYIXbReMx3xktM/vIcMvy+lF791d4boy3FHlp+GX
FZltGYQkA1q7SW6S/EW/BKl2P2pwXBwPVjzUKswuRGcdqtc/YPFAgybOKrUs0gc8l5Ej/wBvcPwp
H2SfZ9OJwpchWO/cjlKNJx2FEj7mZct7B2heXIvKOB2hGEsCvJumnsS2+j4aXY+E7dyPL3RwHEHG
EtqFm8QZM8Jj2gtFwOMRuKNyMiY6gQTgQdqIkxIoRmChavnVCX3c/sPzKZ2uGZyyK8nmRIRBZzVu
xCUS8Tgep5liRt3foPavKvAwuDbhLs6DG5ESG9CdqR8sZBaZHRcHiieitUbvKUOJtvQrRMG3fj3E
HtQt8yDKHs3RX1oSiXBwKMJh4lcHHZzBxC1QL7RmOo96WiALkildqNyxzZlf1mPlA4xykFrBMJg4
iiFnnuKJoLmBD0qvMtz8y0axatFwv2H5Kf7Gcz/K3/2CucuC6Im5fuykDtMyUYW4mRJcybNHXEgZ
H5GJRoRgURcvahlFGJLseq0ogjeFx2Yuc2R8p7ckfKkJhcVknsqmuxlDtChbBe4G+rqgI7kTvVei
vRToMZYFExw6mC2J+h+q5oE2aaOC1S4Y5krTZDyzmUAKmS8jl4G7zB8bDNCV8i1D3AnhAGRxkan5
EZSIAGJK1WbZuQJYSdnO4ITnCIstS0ZfSWR8yIuTkS74DcE1uIiNw+QEzk8soCpTP5HLEYjxFG4a
yiGM5Yo2rQELRFbgLlG9JpXGbWQAwCNnkiLt85jijHtX4jn5+deyj7Ee5MKD0/lWWuX5O0cg21lL
mP6kTcmACeXthxAPwujajalG2C+mJ06IDAHeUJXY67g8L+yNiYUAwC1z7ojEq3e5kG3y4kZC370W
otMAw9Gb1+WmIw2k7Ahe5p7XIRL27OBnvkhCAEYxoAMB6Q3ruAoI5k7Fb57mxoshja5fHA4y7U3T
K5MtGIclScShylqQBBoJtVCMQwGA6pBDg4hDnOULRuSaVgYuc4hWeakNei4JGBwLK2OTk/mxEjMV
EIn7UIQ8I6jGu5fEgJHAFCADRiGCFy7EExqCcmqo8xzEXq3LWvemcJMom9bFy9eOq4SHajsrlwE2
7QoIx2r8R5mqAxiRnkFC7PlzLzHmS1eJEaTbmnhIS3LwltoTS9RTEKqp1HiSDuXmX5mZl93EoRuW
wQMwnkTA7CEbmsGdyrPgMgmGajCFbs+GI+1ADwW/plmekgRqqZ4rRGOGJTRDSPtHJYymdpwQEQBt
TTLy2dGmHeiZGq4AJV8INVG6bcrUCWJ3bVq8ciG1ELS7lqAIxB0xP1LRIvPbuQ0HU+SadJSxCMhQ
nhiAvMJAjdJnvQjfZ8YyOSMpXdT4BULbHQIxNXQeTRliN6LVrRG5ZcXY1g2LqB5nlgHFblfpCuyu
eKdSRvQtW8B9qmDiWXcjEZlz0GZwiHROwqgJTyLblrkNUt6OkMNnT8GRDnDJaJggZlRicY2+rXq1
LLhr0y1SIAHChqx6KrH0D9ED+cFchHGBYhajgpNSpPeqF1KRxGKLmk8URk/QSNlU2zohMdhQ2yVu
juCEDMgR2lWhGYPmBotvCxUoSwkGWmXjtnTLu60p46QT6kbkJmJ1mcBHGJI0kIXzK5C9qM9dRU4o
fi4+GQhK6NssHHWuw5q4RcJ1QtgOSCr3LeTJpx4SSMRUK1HkrJtQnFuYtk8OoZxUL8LUtBBjeGbZ
EL4Fzj9yVD6QwlUSDFSsSLX7B1W5bRkjCY0ywmNkgpWLo+Ja4ZbxkV5Evu5Vty+sdUg1BoQjylyt
qTmy+zOK0wwDyt/nRNTFNjCdQcwUeVvfe2/DL3o7fT/ibIcj7yO0ISFYyFQgYloPwy2HYU+Eh4gj
CXcdhXkXzxDwS94ejN22PiCpHvIWLgA0+Cb1iUbdzhvQxG3eFK3cDxkEZwDgh4nItin0/pROIWm6
TKyfDLFtyBFQcPmTTdi+yWYRJe5YJyqwQuWi42ZjqaZ90hiFpvPO1lMYjtWqJeJz6DOyfLuEMSAj
Dmw8YEDUzFtq1QkJA5jokAdEpe0MXXl84xjhrw9a12T5lk10Yt2J4FyMRmOg3bLRnmMitMxonmOj
SxlLYFLlrkdECKom2aA0IQ5e/AGOvV5g8QdS54zMrWng0+9k6Nm6TK1I0HurXy822EZFRjdJL4Sb
PYnGHz6/oX9HX03M/wArf/YKvXL0iYXZSuxfDikSQiHjqGQqV5dqGmG3P5GCA68ycTGDURltPoGJ
BJwAKMbduIHvEupRvRe4xOoBBwBSqcVHUJRWmWBTdRgqoMaoxlQkPFEHLowWHQxNFTofpc4qmCaI
f6kNQ8y5syTGJYZAUQiIEk5LzOYpOYpEZBfDiz4nM/IpCIN6UPEIVbvQHK8tK1ZfinItIjcjc5oy
0nC0ZkjvT2rQjs+Q6rshEb0YcpAwt/8AVKN+607mMpywCNnlYGV1sTSIG1ebzHEWpAlw+1PIiIyi
GDoiQFnlgaHEyWixARepOZPpzO4RGIxJUo2Ws8uPHflmPzVOzyGo3JYXox1SnLduR5jmb02IiQcJ
TIq57HWmAbacz29B9u6aRgNu9S5jmpEighDLAavpQiKAUA9HquHjPghnIoc3/UR8OJezZwHaUAAw
FAPSCEnnfkPh2o1kTkvxn9TOu4a27J8MO5MOkzmWiMSUbUPh8tblU5lnQhEMB1q8d6VLdoYklT/q
X9VuR1CLCJ8FsK5f/o41QtxMr5j4X/NVw346bU5vaB8TMx7usHTqUH08vbrentb2UeduBuXs8PLw
yp7S03I6gtNuIiHdgocvcjqB45g7I1H0om0Gq1Mgylf5m08YBg4pIuzp+XttczAUbmuMZkPKByXl
3bYnPAaauuKErR3hE25ONiPCabE0vUU8hXo1mkI1Kp4I0iOl4kgjYg1wyAylVPciCWPFsRt6tJBx
K4LgPYemhVVgqUVS42InPMogYJo1ltyCjHlzG9rpq3nJebzMAZkUDuHQtyIgAOGO5GEBpg7OMSg5
YnFeVpkd6eAc5qseLcqRA7UYg8NsN/iKhaakYgAqA1Ak4x2LVOQ/RCEDByaPsQYcAwZOWiIprYck
omeMYvEHapGWABJU5xAOqRYHY6jckGMg7ImRcE06AdyoHWjAyx7EzuTi6ZqdUBMOi4cWiOtToMpY
BY6Ymg2larlTsK4AuKTDcgQTUYqVudZAtEpzIg4p4y1RWm5FlqtFtyaY7046zbeiP6QR5jl5adXi
CfRKUNNSBR0xDEYg9FahagNMghKOWKYhlK4f/TKM8pDolDMVCjEZAOhIVk5ZTjAPJqDsK5WWjijI
EPsBQJ6Dsuxf/FH+7rXYW/GYlvyKNjl+W0XyQJAx8M7cnl9CjC3IWZRBDgULqz/SrliNq0RHVzA9
vSXf19blr39Qt67c4SgCztIEEfWr121ajOzr0wfcA6py8X2ox8iLEMpchdseTz4lptTHtOeEuq4+
kjzNus7fiG2Oajzlutq4ALo+oqHN266WFwDOBQY0NYlGEw1yFJDbv6rClyNYHeEfZv2j6pD8qEma
3I6bg92aFy3S9brA7dy1M040nHYfT6x9xcPEPdO1MeKMlGQJMDQHaNhQnDAppYisTsK8q9SY8J2j
0Z5i3F/fiPrQA4b1v7uT4tkXTkaZjxROSNu4HBw3KJtuBWuUhsTiuU4HEFcZMuWOBxMUJRLg4H5k
IkHicQV+I5CTD27RwPYtD6b4HFA0L9QghwcQjd5ascZWnp3J40lnE4joMLgcFGfLgyq/dsZaWMJj
GJ6DCYeJoXQNuOuz7UQajevO5Ym3dFSMPWF5fMcF3aaA9DGkspDFCF2OqJoJj7VeuTLaXaW4YKc8
QTTp5qyZfFgPB+bi6ZCTk2/ai6GuGWsSxi21RB5kR8yInolv2FMCDvFR8ojHByA6PKTY2gdOrOqc
Yekfop1d/pW+ZOZ/lb/7BXMcrE+Xb5W7dshsTpkYv9CeRJJ2/IwIhyhzHMEiDOIrRCkRgOsZXZiI
G0o6S42ksFITuOTgImgQjCYiTjKVStdzm3OQBACMJThMmtTiVcjbINt+BsGXlTzoCtsTgekooMgd
nTSq4YFPMiI7aoG1Cd2W6NFGdy2YTiNiJ2riqV4R3BMYA9oWq3Td0bx00TkrTEVT3TqllAfahGPD
HKMVw2wfenP7EblzjlLEeyhc8sCQwZP8gMCTO4A5hAOUY8hy1yUsDckGEVGV+/dtW244vUncy4Ym
W3UXc70woMh8h1SLAZlG1yUPMk7GRwRu85cN2RroPhC02yJSFBCNfoCJvzIsEUiBpJKaAbaTivw/
KWzeun2vZj2lHmebAndIpF3iPWmAYDIenECdV2WEI1Peje52YJiNUbAPCO3aokEWbBi3B4QDuWsR
ErrMZsBQbOmVnleO+IklsI0o6jzHOHXzBOqtWPpI2YkS5i4WhbFe8oc7/UZa70qxttSHZ6Ucvy/x
ObuUhAVY7ZKXOc7IXucn7WUBsj1DKRaIxJQsWnFoVPZtkhathgMe3rQtWoedzN3wWx9ZR5vnB5nN
zOpzUQGQC5axCEZ2SDPQa6pigcbkOcu3TMXyTOwA0QQfaQEQwGAHX/D2S0z45+6PyqH9J5WkBXmJ
DKOxRtWw0IBgOhzgp8wf3haP6McPWq4KlE+alGy5kcWxZSv3gwA0wcVqhG1CJmMXzQN8kTFZSBzQ
5bk/jzmHAOQ2ncvN522JSn7QwG4LVF4PhsWqMgyjasxLGs5BVCqFXFOE+SMsLl2g3DoeJIO5UuEj
Yar41sEbQgJEwO8UXBMEHBj1GjgjGGLVOxGNstElnzKs3oCMrsnlORqRJStWSTdFHyC8y7IzntKb
ofNEAs+Kd67VK4fZD96iTjOeo9y0wmbZ2xzRkZuM5HFRaOreVS2A25AROIdPEajJYNIYgLl4ck5v
k6iRs3ry78NGsNItig9GlkhAYAN6lGL1JdSkA5AoomdGxXCO/wBC0+GQzKhbtHVKRDtsV6W4DrsK
lAAYFyoy16TEYLiuGSaA7yqLTpds08qFadNWbUnAK03Q29PCXcnjHD1FQy14jf1h0D9IfWrXLk0k
Q43BCMABEUAVq9ANK4CJAbs+p2piHCNuBZaJjih9S7FuOKO6ijE4CX1pzQV9SsSExIOBT1J4jhQk
M1aue7MA9kqdYti1O1Xed5hoGd3VTaS7qNyNRIAhWr0iRoOmnrUZYEcMu0dXlOaBErVk8cD63V2M
Q1m/LU2Q1Z9NvnOXYRhEai9XiSVEy8cKS9IxUuUn91ceVp9+MVLlLtQz2yfajsUuXmeHG0d2xDmL
fjh4h70VG5HCQ6v4qHhkwux+1eeA9m9S6Bvwkvw1wuRW3L3or8XaDj99AZjahcgXjIOD6YxkHicQ
vw862z93I/UjCYcFaLhJtSNDkhKJcHAqtJDwyGRXk3qXRhvHomyK82D+Wa/olCYJF2IqPfihcgcc
RsKNq4HBwOwqMbQYAvq9mYKyOUolGQeXKn2c4oTgXicD8y+ba+HfGEh9q/D82NN0eGXsyHVFy3wX
RhIUftXlcxHTMYSGB6TK1wXcQRg6FvnRj4ZhaoESicwmKFyxIwmKHeF5PMsJxZiaP2JiPMsVIOYz
ZaoEEbkxDurkuXwkGlBSjeIMyKAdIuRwNJx2xOIVu/bL2b8dUTsfLoja5yRny8uExZzGJCMeVvSm
P3T4AbFplKQsyNKUfcUJXJA7S2CeJcbQnkQBtK0+bF9jpxUZEfI7VuIcA6pdgVwwDaSH7Qrc9or8
8N0N6Lmf5W/+wV/Uv5u//uS+QNGNNqcl5bAtIigZzY7AvPveGOAOaEYDTEYAdRzQBa7jl6RbMox5
f4NvJsU9ycpnaS6qT0YrFUNVG85lI+LcFTuXlzxAoVK3LGJ6lAtIFShrIiPWUNFq5eO4MFqvCzys
dtyQdNzHOy5iQxhZFE3J/wBPEpDCd4utNucbEco2ogfSpWecuGYmMZFSgC8XeJ3egCZa70tMRtWn
lxpjnLMoAAykULt4A3JYDZ8haU9UvchxS+hGPJctcuTw1EMAVGU712wD947DuAWoSnKR8RMvF2oR
gNMRkPkTmg2oWrR8y4fdqB2shc5uemALxtxpRUAiDhtJUrcB5VsYzd33IzxuEMZMy135NsGJKEnl
y3K7MJTC02ottOZ9OZTIjEYkoWOUpqceafpZC3y3LmUr5mDcmCxbYQo3+drdY8ALiu1CFuIjEYAU
HQ5LDMoWORacpkx1btoU70+K7cZyasw9JOzyQF3mRpDCrayyHN82BPmzEAyxZvSjleRiJ35UMsoD
avMl8TmZ1uXTi+7qGUiwGJQt2w8D4Y+82ZTCsi2qW1ut+E5UeZzc6ADCPavOuyN3mZeKZyfILzJD
VclS3bGMpKX9S/qHxObvCkZVFuPugLTaiIRJdhSp64tWq3ZYfmjaUTDjv3C0AcZzOaM7teYvcV2R
xc5dIsxLSu0/w+0hEUADAdbl7XKSNq1ccm6KyJHsr8LzFuM5Va6KEUZ2U+YtXvN5m4ALr4hi9AvL
uh4qEJW46TSIIfBE2o+TysTU71EwkbsZZNghcu8uDEh3GK8q2DAxDzKJgRIKTRJEaEhRtycAmr7E
YtwwpHsHRQ9QGJIZOJkjYahNcgD2LURpickIwrOVAEzapT8RVwGkoz0gIRjc8uDeEHE7Vi/Ur0xs
j2qnuTn2I/TJOERMCv1JgGZaZYZo6Q5OG5DVJm2IykaAVV7mWHmykw2iKuynhGJNVbjpB1SZOKEh
A3DQJojv9FG1bDykaJjd42wyVzlpxa5DNXz+c3TVbTkEImWl8AE0y4lgU1vvKdynTAJ7nFM+yuHg
huVLh1LRci8/ZIzQMjoGQCeM67FpvR0yykE4LxOBCYGiEzOowXEHBzCcmioQel+gbHH1o3y7ODA5
AhRM4SEyHYYLzZDTGNIR2Dr1GKeNDsUpTDMFqGEg6feFOEXciQ72QlK2dEJPXcj5JcxbUNjoBS3E
H6R17kMSxMe0Ly2bRgNxJU4DEinbkgJFo3gSB+fGh6wujGFJfonFaTWUKHeMj0mWFomo/Nl+Qqnp
HjS5A6oHeFG7b4b1uo3SGIUb0A122XbYRiEJ7cRvX/2Lv/LLq6SHBxClyt6sTW2TmNi8iRa7aOqx
LaBknIaYpcidq0n/APGunhPuSOXp9Eu0HYUbF2lyGH5w2o259x2FHleZJ0k8EsuhxS5HwyXl3KXR
iNvojGQeJoQmgWifBPYdi4jxsPMhkQcwhOBeMsCjbn3HMFM3FjE+8AjKI3Tgckblp5WDWUB7O8IT
gXifmXTMPsOYWnmOKw7RnsG9CcJCUTgR1NMh3pjx2znmEJRLjo03IiQ3o3LB1WRjb3J40nnDMdAE
xhgV5dyPmWSSYyGIByK8/lCAZVbI7lpuDy7mwojapEWxrESfMer4ptnTcszj5lzl5mQ2xiaghNFn
KMTQjArTI6SMCvJukzsxqNx2qE+RuSlqiPMiaNJcvyt8ytSoLzj2Ygl0OT5Oeq2+iJyJdnQlzMpE
kVEShPlrhu2faty2LVbLSHigcR0v1KlvRFqrWCLnPXh6n+wLzZsbhJJPaoCWPzC3zHzP8rf/AGCv
6l/N3/8Acl01oNqoXVPQ0TkJgGC4g60Wwwzk2CBI1y2lNEMNg6XnKgzC0i5+qHKIszlGD0fFA37h
m2D+gNqQ1RIb1oEYj6lFgwGSjKJ0zAqm1R7XQeRluiHXw+WnLfLhCfmuYscqBi8gZDuT3eauc3MZ
Ww0fWiOQ/p8d0rp1FMLw5eHu2wI0Wq/dldl+cSVwBlUrWTpjtKE4lztQumJaOaI6j9ADUWCYNO77
oy7VquHsGQQjEOTQAIXbw1XjUD3fTPIgDaaInXrMQ5EKlSPKWKCg1guTuQN29OzA+MO3cAEbhlKU
iGNWBWi2BGIyHyRjxTZ9IWqUBZsHCMjxHfRHQBHOU8yvK5OJnIHjkzCIRucz8QnCMi4Co0YjuCNn
+nDzr2Bl7MULnPS8+8+pj4Y9iYenecgbhpG2PEShC/OVuzAkzjAHZQVzKt3jcuQtuTpmGaO7tQjb
FRTUcekzmRGMcSVPlbAMbMA0pirkjaoxgOIBjL0bmgX4TkgZzNJSGDI3JnXzE6yme30hkSwFSVLl
eQJABa7dNABuWi3WR8UzjI9QklgMSmgD5Y8ERQ3C7epGUq3ZeI7Nw6w5PkCJ35PrnlAb0TI67063
LhxJRuXDX2YjElHn+dAMz91bxERt9ALdsarsvDH7SpXLknl4rkyjz90fBtnTy8T+11J8x7Pgtdgx
Peete5qENZtRMtLtgrcOdkbkjCflGeFvMkBG/wD06s9LStyzbYgYarPMR8WRfehy3OyFu/7MsIyX
LSvw8/lhBxbBzfFXeZhA2eSoLNshid64g+4oXbQJjEFwK1yopXubtg3+YOqeoYDYoWuQtiAn95IY
RG1CNq4Cw8JzUhZtEwsgiZAzKIIMJbDiuAiSPCabFxD1riHQ46HCByZG/cxPhRk+FfUpafbJknBK
fXIHcV95Id6pdl6197L1p/NkvvZL70q5fvS1GIaPapET8VSWXiB7lXSe5eGJ7l4IrwRX3cVoNuNc
aoy5W35nmRBnbqR9Cny9+15EQOIB3PrVuQfRAP39G7p4iy8Sph1omdHBA7U64W8zSIk7wr0hTj6H
KAiXJUpE1dhuVC5GaBkcMOnYMytFkapZyWqdZdIJFRUdLSDo27g1WzgdieNYHA9DYxOIRLuAWARj
gBmEIkuDhLqOMiPrVmMIahKYc7kDhIYNgmPd6Fjhmhpy+pV2grVgXUWkKuKbVDmCI2ZadMogeMA0
KHL83y5FshxOJdxtCty5aYmLsgGzDFy49AYyLRkSAMmlxD6ei7GPityF632e0FG5HCQBHf1TE1BD
FES/dnTLfD2T0i4Q4FJdhRsTPHap2xyPpdf7q9Q7pIXoj4VwtcGw5FN+7v4bpf3o251BR5W795Cs
D70eqJQ+8t1ghfhS7bem8YhDm7Ib/rR7EYnihMYo8pe8cPu5e9H04nCl2FYn7E+E40nHMFaTSYrG
WYK/CcyWuDwk5jo1QOm6PDJaJ0ux8Q9EYTDgqUKC4zWp7RsXlz8BbXE00ncgRUHArRPH2ZZhREDp
mMWwnFPHHCUTkvOsObR+8t494QnbLj6vTU+TmExqicQV5nKB7GM7aeBaWcTiOqbtktLExyK0yGiY
xHSLliXk3RnGjo2eYJoHFwhn3LVEvE4EJiHCNzl5EQqTaxDoRmNNxnGUghC4Nds0jMYjtUeW5Q6i
Q8yPUyIkGIx6Kd6ny0jwczalAjfirlo+xIx9RZO7EZdAi9VrjJiC7KV0xE72gw1nYVI3dQkYvAxy
k6081ZlO0PDcHiYbVomJaZUrFG5yNx5CpjtG9aYD40fHBESkHFCBihAzkDk4ZAxLjan5du+i+PzQ
tn3YVK//ACLk9+ChZujWxbzDiyEolxKoPb16Ba+YOogcMBUup3hMyhInSDiBkqGuaD5Fv7Ccz/K3
/wBgr+pfzd//AHJekELY708zruHDYmaoTRi53KrRG9cUvUtFoNtPR8SYid6AhISJzRa6ZS92IRgZ
kW8ohVr6NpVCLBhsCqNQ2FB+ShckM5ErTynLWeX3iOo/StNzmZkH2YnSPoWq5Ik7ZFynmXKaER1f
LlHUmHDEYAK5y0qyiKIxORbqEJkXo1SUbXL0yM/yJyXJzKAAQ5u/Hjl4Afr9I9yQiN6lKJNzTiIh
PYtaHNIkElu1Cd+7KEQx0Pqc9mCJkNZOL0HqCAiBEDAAN8l1XZCIRjywnbtkcNxm+tCVz4t3GU5l
6oRi0pmjDAdrLXzE/LgDwRjR95WmAxqTmTvWq4a5RGJUjdh5HLeyPakhbsx0jM5n07ksBiSo8pyJ
iZ3CQb0jwxEQTIt2BRvWZQvEyMIzFe2RfYoy5iXn3ATIkgeI59QzuGgqwxLKB+65USJpiwoFotht
p2+jMpFojNHleSOmIxn35oRjxTzmcfSGdyQjEYko2bEjb5EUuTZte4IWrMRGI9ZO/qOaAYogOLAL
BsbkjkNyN+9W7KgGUI7B1p8j/Tw7cN2/lF9iEbUeMjjmcZHejcuFtg2lHm+bHAPurZ9B5Voarpyy
jvKMidVyXikUP6bYPCOLmJDIbFG1ANCAYDs6Rah95dOmO4ZnuCjCNIxDDrfgYn4NDzBGYfwd6jct
WYwnCOmMgKgHoJkNF72boxWm7F4msbscFHmOetHmoCOh5cRgNzqNzlJA2yKRGXd0QsDwx45/YENN
bhpCKaReZrI70b8ZVPDEOx1Givay8p6Se1kOZuVIxjkVKFq2IXAWBGZQmLgE2cxI+hTtC1qlHxME
xgbZ30XDIFeEkDYmk4O9TiS2iJkN7K3b9mIeSAyGCLUlOj7s070ZPRUI3rJUr0VxWNNiJGOSFvM1
PagDWianTw1KwqiWwQmRirc4xBuXIiU5nFRjbA8wHjkNmxP0VWuUuFk1e1VbvQjGsTkMlODvGOHQ
xd9ypTcUTsThOCxGBRjC4SGovxHMUbAKZ2zPRqzBCOiLSOaOgEg5rVMF9q82ZYPQbUJWpBjkV5ce
K4foXxC25NEN6BjguDigcimnHTqwVR35LhcOhK5UHFNDF3DIGI1RP0FSEu3s6DHOn1r8MRWMXfen
dAbOvj1MOi5HTIgSFe9QMhpJAorstAlzFqBlaObirKM7cPLnZpciMCJAjV6CF2Ic+EnZUMoTzIr2
q1eGETpn2SVzlX8B1Q/RPWjdPhPBPsOBRtTLzt07RkeiUDhIMULntWDoufnRehQIqDUH0hgc8NxU
rF7xw4ZjaMip8ldPHGtqW2OXqRjL7y3wzQnAtdhWMvsWoUkKSGw9Xzx91cpcGw7Vr/8AG5iktkZE
Y968s/cXC8ZbCckDA6bsKwlvREhpuwpOO/b6ccxaFR95HaEJxLgrVE6b0KwkF5F7hvwxBz6PMtcN
0YHbuWk0uR8UfRaJUIrGWwqemLcxAVGUxtUbFwM4oXdj0aJ45SzCYRMZxNDjGcVqiRqFJRzBXncv
4T95bx9S1wPaMwfmYXeWkbc44gZo2bwMLsSzkUPVclpjCQXl8xQYRuDA9qcFxt6GmAdh2IXLRE7U
yAYEMI71omYxuDGLqi1xkYXBgQhZvjHCWRR5jlw0iCTB6HNGzdt+VdFBJmL9q47xbcFO1q+HE0lm
y5a6JGkxjvouY3zJ9dU4QkFUKlDtU5iQ4anaylK2GgxjM7sVx/Et4PmBvRlZEZMdbNuUr1yzExII
MoirirFW7nIyMLtyIcRzBqylK7w34ubmrF80Hg9wGjxb6U9w6j7uQUtJ02wGiBtRkS7npha1CN23
EAiRZ2zTicWG8IgXYU3hcVwHsqvET2BcMZFfDtjvRaWgHKK1SJJOZR2LBwCp28gXHyivzZzP8rf/
AGCv6l/N3/8Acl6SUyOLLo1l3TQiBv6OKQHamgxJzyWmExEblqlImjLikfTiWORQMSzhNKa1Fyd6
aAWPoYxJ4Z0K8yPguBx29QFatqMNWkbkYbC3QJ3B8KFShGNIxoB6FpzGrYKn6EJWLZumVIttUTAG
L+IEaYjvXn378jPICoHrRIhqkamUqk/J9UiIgZlGHIRjMu2qWCNzm7pvTlWUWaIQB/wxGKe1b8uw
CxMyxPcEQOKRLmRTmgCPLf0+Pm3TQz9mPavP5yfn8wczhHs+Qa70qmkYCspHYFG3dtSs8pE8UQWc
ZOV5l6zGySXOk1bYE1uAiNw6lWMz4YupXuaDWsIxO/YyEIREYxDAD0ZndlpiPpUYQjKNjGtBTay0
WwA5eR2k+klcmWEckDee3yo9mP7wYoQtxEYigA6jnBSAOnloUnLOZ90LzboAH7q21Ij8vVMpFogO
SjynKEwsR++vbd0ULVoUzJxJ2lG5cLAVX4i+DHloHghlJnxQAoBl1DKVAA5Xlm6bUnIGsMCx2oTt
yE4HCQLheXy9T7VzKKYVJrKRxJR0cXMT4bUBi5zWq5xcxd4rsji5y6kuYPhHBa7B4j39YCA1Xrjx
tx3rSTqnIvOW09SVnmICcJYgqd7kRrsmpt5x7Fr5eRtzBrHI7iEbV8izzEQ5iaAtsUuZuY3S4Gbe
yEb13xywHujZ0Rtitjlay2Gauy2zPR5pjxO7ZOmNFKVqI1kVIzKjbv2zpBeUm+hE2bYjexiQjc5i
YtS904K7GcdflxAoHFVO/ZeEgDTa61aXlOpIxZcThs2R8kEjCI3bUYAVjisD61gfWnYssJVwWElX
UvaUYXHZ3LosC3sjcEzSWEgqaljJYlf3JiHUtUfCKAbVb5W4WI9rPTkFv2qqxRNuLxGMlragy3o8
ITGLpoRAJzTFy+JXlgvGQcFCEGL5ISlSTtRaT92KmW0LtqFXJNLDYj2Fdsifp6B2oymaRqy+GNEc
kQSJBDzNjAIyjJgq+I4FP1HKcYCicdVs8kKa4oXZxGkUI3Ih2jiOwphIg70wJAOBGClImpPRLu+t
C9CRhcGYURASmBSUgEYzBEhiCnKdaRU7OiqjGBrIsSjbGMcSmd+3obpkSWEmbY6hKNQYiqMTnQ96
ibcybV+kn2E0+n0Eo4FnHaKqdrIgTh9UvpUoHMU7clY5oUMfh3fo60oSwkGUbp8Vo+Xe3xyKcVBw
6IzP3d4aJ9uSly88YVhvj6WPNQy4bo2x29yjetH4kOKB2jYoc1bpGTRvDYgQaHArzoDgl96PtQIL
g4HqGEw8ZUKnyF8vKI4Dtjk3YpcnzHjt0B2xBoUeXueO3gfeCHNWRxx+8iPaio3IF4y9PrjWxPxD
3SnBcHBC/YOm/DA7RsT4XY0nHMHo820dN0Z7Qtk4+KJ2+i1Qpdj4T9ilzMAY3rcWuQG0ZhCxekNb
PE7tnQYSocYyGIKM4n/uBUg0jMIThSXtQOIK8/l6S9uGRTxpIUlE4g/MxETomdmaHL8zqMRhIoSi
XBz6mmQeJyKeHHazjmFqgXCe44tRxIUIg/CIwyYbUeYnHTbjc0ExLEgbFqgfO5Ymgd5RCE41BQ1A
Fqh1wjVamcNjog0kCxbEEKR5x524j4Zajq48GMnlAjZkoSsgSOoN2qWsNKUQSN56CJCoNCmRwIKM
RQNUKUpn4cjxBC5ZmJA5ZoSsS0TJpH2SVf5e3CM+ZNom7LIONJ+lC3yMdVy3HXUswH/utRmIc3A6
ZRyJGS03OWj5oGmUjRyiRR8k3UcFitMiSE4OCx6gEIE72ouOUY/SiITEpbDgj8IkP7NQpi5ExcZ7
ektQAOULMyzlhPJOKg4fIm+St6TH0nM/yt/9gr+pfzd//cl6KqwUQPDsWqOOYVSybE7EYW4AD3in
5u6Axc7V5XJW3amso3LstUj6XDqNtVcYpwViq+iEhiC6jdFblsfUiGbqbwotSqlJsUDIabecj9iF
qyGAxOZ6zyIiNpotMSZ1ZwKP2ryeWtPmbg4gApeZIiBHtFq9gRecpSl4qs6AtQEWzz+UPkhbswle
uSpHSOF+1CfPFg76Ik4bE0RG1Adylb5QmRApMBw683m5m9fOLmg3AJgGAyC1XTXKIqSgboPL8piI
g8UhvWixAQG7E+nJkWAxJXkcpITu4G4Kxj+VHmPOuzuE8JnFgBtUYxeemryL129U2rfFeOyoj2oc
xzbyNTokXFcE0QwGA9HqkXkaCIqT3IzlIxtihGAHY6EYBhnv9JKc+KQDiAxNWQ5jm6MeGAwI3oAU
AwHVMYy08vD7y5t3BC7OIjagfg2+z2u/qmdyQjCNSSpQsS8vkIlpTzubWUbVqIjGOxSuXJCMI1JK
jd1GHKWy4jg5/vQjENEYDqxs+Ubt29GRi2EQKOVHnI3dPMz4/I2g10xXlxlPlxFhK0SQ4CjapZvC
nl5HsVi3akIXOauC2LhwgDjJG35x5vnIxEzcPFEPvw6gs2y1y7QHYMyhCNIxDDqyuXDphEOSpc/z
AbU4sQPswfHv68rtn4XMYuMJHeiOciYSHgkMJNsKjb5mQ/EWABCG0D2uidz2zSA2yKBn97c45neV
L9I9ZlK1blouEgg9i54878S7I6SMaMo8sYExuyMbcfqdTlK6bkPKMwBtGK8ydww5WQM5EBmjl61Y
nAtenWROQKvymTwAAkdilb8wmAHC2KMuXnI3MhLBA8xckJmpAwChZtXSYHEnFTlC7IyAoKYqUuYm
bTFgAMUIQu6y7SfJRtQmCJgyMmwiFdgQIwsDSJkUJK1aozkPZZQmJRAkHIZSsaA0fbIotcjAtVlG
5CMBGWAKNmdoEgPqAojeuQgNXExxZaxEA4MFXoEY+IrS77d6YeKVGRiC0PaK0uX95Au8Tgel4mow
Ws1lvTkFtiESS2xR3AJ0SSIkbVdjCtoRNd6gc69Ed5RfCNSE3TpGWKcJ03S2Z6DE5dRyn9S0nuTH
Aok12JmxXlSD5iSmPzh0S7vrVq04BmRH1qMLYAjEMrfMRAFwSESdoKI3IaS8JY7lIxLzqD3KQuFj
ki1SKBcY4hgQgdWmbM+1Vm43JokgqQkahnKcF+kcvzB+GaQn7vahKJcGoIVuewmPrw+lRkcWaXaK
egE8rcv+SdE6v8rLwXRrj34oRn47fDLu6wkR8O6NFztyKly8y87WG+OXRK3gTWJ2EYKHMxHxLdJj
sxQlGoNR6QxkHBoUeVmfDxWztijIj/t79JjKMtqPLzPFDwHbE4IxlUHEL8PLwSraJ+rqi7a++tVj
vGYUOctUuQFRnvCjeslrkajLuKEhiKTGwrUP/wAW6eL8yScYH0xjIODQheRcrbl93L7Oj8ZywacR
xw94IXYHHEbD0C9ZpOOI94LVGhHiicj6IXrNLkMRlIZuvxfLh9PihnGQWiXDdjlt6BrAJHhJyK1W
gIXo+IezcitUC0hScMwUeY5Y6borKOUgqUnGkoHEH5m0zpsOaYnVbPqWqB7QcR1RLlzpmS2n2Spj
mLcoQlQmFWpipmE9N+kIviYkO/euXsWi1uEI6x/9xuIrXA1QAueXqlxv4a5rTKQjMEjdLeOjXCRi
/iGRR5SRAkS4J3qzzFuL+WBG4NsWqVOFomJfVbIohzErpPMAMSc9yIIUolVzUmJ2tihM5iiZCdqR
jIbFEc5DAj4kfyL+o82CbluUjEA5gnUjKxI2zoOBypRXBIGJ1En1oWr1JikZnPcUYyDdHCH2piGT
dNCvj2Bcm9CTRS8m1pBwjGqeQ0R34qsdctpTRDDqYKhfao24eKRYd6lblASvSi3aSE2nTIVDKOou
Y0+b93yDmv5W/wDsFf1L+bv/AO5L0jhCEOIyLISunQ7EklAFrkhm6MOXgBsKM7szIn0bBCdwm3b3
iq+HdI7Vxx82ecpIk29JOYR8qZByRMY647QmnExI2hESkIAVco6rgjWm9EYnEFN6SfKzPDMUU45P
qHZ1QfDbhWUkGhrkM5JgGGzpqQO0oCcwZGgjGpK8uNmRmcB+VlqnA2reUY0J7Sv+7vSNfCC9O1CI
i8RgCXCaIAGwfKqzErmVuNST3IPM2uXIcgBidyAtio9o1KIhHzJxxiCtfMtGEwPh4sELdsCMY4AI
ykWAxJR5fkQZ3D+8AeI70OY5mRvXzi54R2D5Abl6YgBg5xUrfKiI5cSAAjKs+1f9yIynJjMADEVZ
0wDAYAdX8PyhEpyOkyFSNrBG7ercliNo3+kIi0rvuuzdqFy5JrddUohidwKEYhh6SUbfFdo0RsJZ
0OZ5s+ZdHherD+7rGzblosw+9u/YFEiOnlbZ4In2iMyqdSV27IRhEOSVGV+Jtf0+BeMcDc7ULdqI
hCNAAjOZaMQ5KNuMpQ5WBGoDCQ3oW7URGEQwA6pkaAByVc5q1OPkE+XbjKjwBdwrQvyN7mYRAlcl
Wu5HWNF72boxCa5E6X+HejhTeuT/AKf/AFC4IWbc+K/7TYOe5Nyh8zWxN0lzLpMjQCpKlzMh4qWx
siPy9b8JCnK8uRK/L3pZQQAoBQDr3/It6uYtT8sCRAB/OZWLXNkECWmMY0DlHmp3PMuGLRHuvipX
rx0wird+7bNvlLR1QjLGR29EhskfQG5IfCveI7JBRvaYmUfDJcNRbgRM5PL2VY5GNJ3yDcAyhHJC
EfDEMOwK/qDiUmKJtxYnEpslVebFzLIZB1uWwKVwkic2FFzHOnwj4cP0YrUA3mHUe9CRkYkULKMI
+GIZMAxOJQeRiY7N6jbjUR2oWwMazO5Rs2JvKBOtvq6X9SkX4li0BjJATiQDhIquBwCqGO1G3dGq
2cCjKNwNsTwkKYuoEBwDxSyUpRGseyMlxQGnMKEoYTanRot0A8UtiMhLzQKkZq5oDUKt9nRAbXVz
sTdBOac4lMtJwKcdDkon1dAIQI6Ny0jE9AbJAjt6HVre6mN46JbGVq/UwiKEZHaoecJCeBIDgqMY
jTagXAzJXcjGBNdiMhR0TmelhUp7s2J9kLXZk5GIOKOQLAjsQlDA4hCXsnPpHL3z8I0hI+yptiBq
i26qlEeG6Bch3+L0ELmUuCXfgUBLxQ4ZdoVrmGfRJpfolCT/AA7wY/pDDrShtFO1Q5geO1w3RtCE
hUGoPRK2RwXRqHbmpctLDxWzu2elF2397arHeMwgT4Zio2FASre5dm/PghOOElpNCKxlsKMbga7b
pIfb1TI/dXsRkJJsLF002CTL8RbHBKl6I/aTFpW5hfhLpcY2pnMbPTmEu0HYUbF2l2OB94beg83Y
jqtS++tj9oKNy2XjIOD0edZoR4o+8tUe8bPRfibIofvYbQo85yTmJNQPZTg8Y8Uc+hjSYrGWYRlC
OnmImo9m5HatUS0h4o5grzrJ0XhnlLcVpI03Y+KJ+ZtMg4QuWpNEYHdvWiVJj1HqCQxBdE3qi6NI
OTFS8yr0BG7BGObUQGJCIlgVHlrheQ+7JwO5G1cD6pHQD7pwquHEUlHMEIyjSRDOp8teLXohgfeD
IxNCNqlB3fILiDnFk8I6T0OhsZR1QDsheu2Rcb2Dg6vSjy0LciCYmOSucq5F2dzVhTSzK/fliIEB
GZrv6PwvOHS/3d73TsKNokTzjKNRIbUIXYGE5mkkYcxSOmhjmdquStXhG7brG3OglHtRDcQoQi4b
pFy1LTIJpgT7Qvi22/RWm0Tq2MtInHVseqo0jsUoiEdIJDdi8yJDe4RRXOau8MLkia0FS9Fo5aTX
YkOcu5ar1wuMFxF5DNOcz6BvQn5u5n+Vv/sFf1L+bv8A+5L0vmgCmZTCZP0BPORPpBGETKRwAUeY
5wCIFYwz700QwGzrHzbYJOaMuXlpPu5IzMdUY1cbkH8UfqTjA19JC5EsxVvm4VcDUQjHoAAQlOPl
2s5S+xCzZiwGJzJQ1yYnAZlSu6tMY46qH1KQsiMYR9qbuewI+TCFt6eZKnqCFu5ICTvK65Mu5AmH
mTHtSLlPCAiTnn8sBuFicBmUYWLflWQeKcjj2MjdMRK8cZN9S+LMR3IeUZ2ovQswbanEQZ5zNST0
PcPEfDAYlCd2Rscv/wBMYyCFu0BGI9OSaAYlTt8pGV68MwCYx70eY52M4iY4NRw3sowswAIDGWZ6
plMiMRiSjZ5MPDCTUl/7ISkAbzVkBt9JKFkgzHikcAjO8GgTnUzQADAYD0h5flCJXMJHFuxCd74l
73pYgZDrGxaLQH3tzIDYhathuVh4j75G/tQAoB1DevyaIwAxJ2AL8TzYlb5aJ+DZw1AZyQjEMBQA
IykWiKkleTYLWYs5+1CEO85k9Yf0vl5EAcXNTHsx93vUYQDRiGAGwdMrN+AnbkGIK/E8m93l66oe
1Hs2oQEjcsYStSy2shPl5gn2oHxDu6By0fCOK6Rsyj3phgOr+F5c/wDd3mEAMgTWSFqNTjORxlI4
k9fzbgM5yOm3bGMirv8AVeaEuWtsdIDapyUOauQkLZ4oSjiNjtghDmXv2cyfEFHmeTvRMrdyEvLO
JY4EL8XzwFmyYgW7UaA7+i6Nk/r9AYyDg4grSDIR90Gi0wAjZt1LZyXMc4awh8O13Y9EztkevOUa
6eCA2zlRWuXHjmwPaalCIwFOobpDtgN61SiDDOI2K7zELkY3pnSATUPkpElyTU9MRi1ZLRaDRNZF
AnwDwj7VpKAGA6GIcIkijIuaZBaCKZLTEMFpNXQgKgSA6LkR49VexEywaq5oWy4jKQ+lQGyI6LY3
FTO1gnCdaRl00xQOfRpGePU0HLDoJT9Jiem13q4N46J9ijqDhgg50tUALFxl0AwqQTRajL/DkgSG
94LSeA5HaEAZO+xNbGqZwK1XC8iqUO1ARpI4/lUXLiOCb2ZZbCtPT+EvF5ANCRzGxRlny9wwl+iT
6CcMyHHaKqMv+tFyPz40KlD3gQv/ALtt+3VFRubRUb+sQfu74Y/pKViXsVhviejXHx2zqioczaHH
BpR+0KMxgQ/pTD91eLx3S2IXYD4kPpGxaX4LvFDccx0DmbYecaSj7wQnHAjqGEw4Knyt7xxZjuyK
ly90cdvhl+cMHXkSranW3LZtC04TjWEthRt3KXrdJj7fTicC12FYn7EXpcjScdhTZHELXEGXKXTx
D3CUJRLg1B6POtf4ojNCce8bPRG5GOrl7ha5HMPmvxfLPO1Opb7VG5GhI4o5g9DPpmPDIYhGdBzI
D6cpiK1Ckh4oZgoXLR0Xo4EZ7ivLucN6OI29nWp8wMcFrtYZjMdiEbp1DDVs7U4qNvSbN4aoGgBy
Qu2dV2JrIAPpCEhigSGfJbELkTxRLjuVu7IOJRB3go2jGQ8ueuQdjKG5EFyNemA9pjg61QNR6wtb
vzFotIZmO1YMiAK7USKHanl3dEAMZFNkOgge1RE40UmLA4jqCBjrPsk4x7FOFqvlRM5PsCnK5bkb
QJHmD2SELcoBsY3RmFi4KYiq4VgsOii12pmEhmF5spcZxIon8yT7XUpSkdeJUr0oiXl1Y0dlc5eD
CxN2h7r7EZSrv6I2wHfHsUbcchXt+UV+Z+Z/lb/7BX9S/m7/APuS9Kz02ekZC5ePlWvpK+DbGv3j
U+iMJBwUdP3U69jotlh2ekpivwd6FGYEoWOWA1EsSSwA2of/AMQ5uLaRJhTtxWixZ16aeYapuTtC
INIE+I9gChLVO1L25yL+oLzfxE5XfelVGcybsznJOYRfs+XGVyQjEZlSt8oTQUuCLgnchd5yZv3s
eI0CyjCPqQt8tCJ1U1E4b2CErs/MILsRn0GUiIxGJKNj+mx8yYLSuHwhedzR8/mDVzUDs+QSncnF
4h9DhypwsEWbJoAAdUh2oeZOVyUqyBwdMMOqblwsAvLtjTaiQ43bU0azPimcfRvkjG0WhhK4M90U
J3o6Y4xt7d8k2XozKRaMQ5O5HleUiBAUuTljxbG2LTHilnM49Y8vy5Ib724MIjcvwtjhsw+8uZyK
EIhojDqG5cIMvYhnIqHP/wBRI4a2eXyi+Z6HNAM0OW5b7rCR2u4r2LRbABJeR3nrNbrfucNqO2RT
3OLmLp13p5mR/J1WKle5JrPM5j2Z9qEparV6BqDhIfao81arcPCbeevYuKtyR1TltJ6s708IAltr
ZKX9Q5mLXroAhH3YDD0EbfL3ZTjp1ESNLYOLK1yNv/8AF5UDzCPakMlokAYmjHBG5yh8i77vsleX
zFsxrSXsnvVuxz8eGIERdju2hC5ZmJxOYKvjaQfo9CIQ8c6DdvUzDxkaY75Sords+JtUjvNeh9pP
19anjlSI3oWbZ1cvyknuS964oj2bUX7z1TAY4haYxJJDsrNoxIeWotgEH6CUQKR+tNtqexMMAnR6
dIxKBT9D+pD9Po12paTnvRtQjSQYkUV25d8dzEKI3DogNykfzk63p+ppyKJTnqAhAhaRl1AdicJz
lioS/OICu93RMbirHLtWYx7FRCJxQQH5xXYg6aQoMFSowrtRYN0xILEUJ3LhrtBzTx8EslGUaxI9
XSJwpONYneuYjOOk3IgtvHCgJeOBMJdo9BcjHGB82H6J8SEhUEOp22aNziHaMVOyT4uOHfj1iB4h
xRO8KHNQHHb8Q2j2ghIVEg46LnLHCPFD9Eo2T4J8Vs78x6UwwOMTsOS46XIHTMbwnj4ZkGJ92T/a
gT4xSY3jo/8AsXT+rLqjmLX3tti3vRzCt87y/iiOIbRmD2LhNJVEthRs3PvbdDvG1DmbH3tvEe9H
YhchgcRsPp/xNoPMeOO0ITjgUYTDxkGIQ5e48+Wun4cz7O49JvWg8fbh+RCcag+hMZBwcQjZu15W
ZaL5PvXnAmfK3NlQELluWqMqg9ABpOJeMthTxHl8zbxHs3IpxScaThmCtUTpux8Ml5N7hvD/AJuz
5n8y0wnmDgVpbDxQ2dieBfplZmBxOCdxUrUSZW5nVEs1Co1eLd6xQhCspFh3qVmcxKYL6QX0goX7
bi7bFWzCFwFpD6whO5IguHkNihcOoiTeXcjUF8ijds0uR+8t59qJBA3ZogVKeVFtKiBiE+ZVUTm9
FI7B1GRmA5wCumFzXzt+DSJxDh9KNu3PhnVu1G1eDwOIzidoQkOK1KsZZEFCTUKgLZOkkam2LgkD
HfinMHG0VWw7OiqbocIWWAGZGJToxHeqGqN6Q4jh2f2A5n+Vv/sFf1L+bv8A+5LqUQFq2ZE7kI3I
sTkuI6QgZvIqsQIhEW4CcvoTs24elFuzEk5nIIXL/wAW7sOAQjEMBgB6F5EAbSmjMXLmUI1QFprc
DiSMPWrk792UyxJ3kBaZAgEUejjofrtEEnYKoeXYkAfalwj6Vq5m6I/mxqVO0LJldwFyRaI3oG9z
MTcyhAurcrEjAXAAJAsrWrmJc9zd3xWg8tIyQnK35AO3Z2KMAAZAVln8wkcrEXpgsz5o3OemHmPu
44AbEIWwIwGQRtWXu3z4YRCjPmSREVMHoe5NbiI9g6CZcU2cQGJQnzXwOVxjaHiI3oW7MRGI2enM
5loxqSjY5WBncnSM8g687nANU6mpJ9SjC3ERjEMAOs8y8j4YjFGUy1sGvutuQjAdpzPozKZaIxJR
gHFo0jEUlMv9SjcvAah4IDCPpDO5IRiA5J3LRb1R5eP3kTQEHBC3bGGMsyd/W8nlyNXtSxYFfheX
cD95c/vQhHAfT1IQETd5i6Wt2o4ko89/URr5mReEDhbGQA6HOCPLcseHAnAyOYCqxuS8Um60rky0
YhyVLnr44YExsRyb3vQHkeZnEX7fFK5nbAqw3lT5/mXj5oHk2zlH3iNrdYaZH8JyzGQynMl/s9BK
9cLRgHKNwnVznPHgGcY5IWwXOMpbSek278BOByIUrnInVHHyjl2ICyZWiPFal4T3KZjwXRGOuB27
vQGUsAjemOKXhGwKzygrCz8S4N+XQTuUOoblwiMRiSrkBy4NmEm1O0iOxX+ctF+YINuzY9oGVAr9
y+DA2g0omjyNXKldP7yRI7Bh1eIqd0muAG4Izlk5CiDsT5KIHt4hVT5nHojEYu6BHRVHYKDoY4hM
FGIxKHaT0azXYq4hMM5AfSgNw6IjZFdskTkgcurqOARCbqAbUBsR316rHKi0etQP5x+pXe7okdxV
q5CWm5ABihCINyQxIGSa9Exlv6B+mfrT9BkU+aEuoUQMJVHaqepDaMem3rBMJkiTbCrhsvatzNQM
e1RtTkZwmWrl17d44Vtz/RkpWZeK1Ju2JqFC+P3cg/YSxUOYt1Nsu+2JQIwNR1pWz4LtQN+aly8s
BxW/0Tl0Q5mONstLfE4oSgeKPHAhCYzxG/0seZHgnw3fskjA+GWa8ueB4Znv4ZdBhLAo8vcPHDwn
bHq6D9xf8OyMtnevK/dXaxOw7FG9Zpdh/wAw2ITj3jYV50PuJ0ux2HagQXBwPpxOI+BPxj3TtTio
OBRtzwOG5fg+Yw/d3DmNnSb1kPbJ+JD7QhOBeJ9DK3LCQR5HmwDbkPhyyZC1c4uUuFok+yhKJeJD
gjo924KxkNq12xo5u2eKOEbgRYabkC04HEFD2ZxrGWxGxzA03RgcpDd8zuOGYwkmrC5kfZkFplS4
MY9IF0cUfDLYjZM+F+EjYuIundiMCpXW8yM6Tic96ETI2pnKeHrXncsQJbvDJESoRQjeoW5EGukx
JBL7lHmOVuyt3RQkGhAOBWnm4nCl0DE7CyJtjGodF8Fgp7IVKYBkXUYg4onaepvNFGMC2nHtU5SL
k/ahWifPIqVm/DzLMhTbE7VIknyYn1prcQN/S04h9oxXkmVJB4kp9QZU6DoDtmtMsdgqUfMOm57J
/KiLkSY5SFQhFqPUqNuIYRDfNbfJeZ/lb/7BX9S/m7/+5Lo0WoknbkgOfuCFqNTEGsq4IXLAiBp4
okuKZqQsaQ2xSnLawfp0iR07B6URgCZHABC5zXw7fu5lC3YgIjbmfQvcmI7k3KWtZJxK181cA/Mj
gEfLgNRxk1ehpAEbChzMGjK3lhToboouGJKGm1Ku5Q5XmbkbOrxTJwCsPcN657WmWrVTMBf9pydd
pAH1rTYsCOxgZJoxuRB2DQFq5i7G2+OqZkVGV/mjIirQDBE25tK0HjqzZedpBuyoZEV+YdIOu4cI
xq5Q84ytQfiALONlE1q2IkZ5p7khqNIxzJRi45flh7UayktQ4psxmcW6DO5IRiMSV+H/AKbqPvXg
KD1rzuZkeY5g4znVuz5ATO4Dc9mAqStNmVz8PPFxpio6YarkfblUv1zCHFdbAZdq86/4R7RxkhCA
aIwHozOZYDahGQcn7uyM98l5tw67pzOEdw9IZzPYNqE7h0cuS0oCgMRv3rTAMOt5FltRpOT+EL8L
y33kqzmPZ3lCEe85k9T8Ny3xecnSEBXS+cl+L5u4b/OSFZHCD5R6RZsubeoRmR7W0BCch8Q1bKL5
Drx5S2xsRL3ZvmEIxoAGHXlduHTCAeRUbtvkzcieYNyTu84E01PkEHDFsB1Ycpyv3153Pux2/So2
bQYAV3nb6CHLyIFiA83mD+aDQd6l/Ub8dIlw8vD3YDrTPMWxqjEmMxQim1WLcL3lkyaZwp7p7fQN
+6ga7ypTlSMQSe5X+fn4uYnwfoDDokdxUBu6Z3p+G2DItiwVs8nalHlbMxKRkPGdi/F2Xsaovgzy
7FHzIGGggwuRwcZq+OcGu5zBBN3DdVW5WiJQMQxHVJyQNuRjIluFTsTnqtxDIOpacI4oSxAoE5wj
9fSTkKBNs6DtPSNhoVqRlkHbuT7AT0ET8O1GYkGAQlL/AKgA7H6W2AKIGJkUAiEQcR0hky7Vq9fU
1nuVE4y6jDEpkJDEKP6X2K53dEn2H6lAGjxDK3CA9kEnaSHVyUhx2xqid4QXZcP1pziCy7EIDKp6
DHZ0hPkmTjP60xxH1Jx0R0nSYSX4iEhIDxDsUb02YViOvKGLinarV00F0eXP9KOClCWEgy8qfitv
an3KVmXitFn2jLrao+O2dUe5Q5m3WUKgbswhOOEg4RjKoIYqVifjslv8OSMT4L1Y7pelMJB4kMUe
XueO34T70ckbkB8SA9Y2LQ7yhQk5joE4UuQ8J+xahQikhsPUlblmKHYcipcvdPxrdCc6YSRtT+9t
0I3LzYD4U/vBsO1MaxkPrX4a5W1Otqe/3fTmMg8TQhCxcL25/dy2bj0aDSYrCWwo8rzQEbsKAv4h
0+ZbD2JHjh7u8ISiXicD6EwNJezIYgqX9P55jcAoSGfeF+C5kk8vI/BuHAbk/RQ6bkaxnvXmwOjm
7Y4o4C4AjIAwnGkoHF00qSHhkMQULHMkCXsTyl8zOtMu4oRuEv7F3fsKa9n4Z5FBsOgSFMjRzgrc
xKJsXW+IcIk5SWi5xRYEXI1iX39INqZMM4GoKlciPLjLGIQkDUF1HmbBeQpdtP7WfrREYmnijIYI
EGUJRDAxKFyHM6RpdroLSOxxgrlnmD5FyAkK1Gob1ekeJj9DpxTcmfBEDCIZRG2vSwVfZ+tEnNB8
yqVCoXGwrSIuZ0HeohuKXFLv6rX4CTYHMIz5c64bMwhbujQZbUBLwYmQ2IgvCALUxKeEaj2jj0GM
wJRORR8uAi+LfNVPktejmf5W/wDsFf1MgiIHNX6n/UkvifEbILVCAtxHcpXblwWLIcCT8RO4LmOX
ldlehbmYCYeqnO9Ch8Lozk0AvhPM7AnbTHIIm5J5tRStnDL0TIS0+XbzlJDTHXczmfQOSw2lUJuS
GUaoaIeVbO2hZGd4m5I1YmiaEREDZ0tOQ1ZRGKNvlrIjHO5I0Q/GSFzMgbUb/LggZxGC45t2Bcc5
HuXBZneO+iEeU5OMDkdJmUABOMTl4AtfOX7dp85HVJCU78roHuhguDlRI+9MumtW4WxuAVZHp446
ghGIYDIfL3kamgG1Stx027OcxU9iEidcxQSOS1XJaQvL5KEjAeO6aDuRvX3u3iXeRcDsTCg2dDEi
d4+G2MSVG5zp8nl8Rajie1C3ZiIRGz05uTLAL8LylkTlcoJROruoocx/U2MxhaiKMNqEIARiKADr
m3YLnOYGrHYyjf5l9ojI1PamFAMAPRvKsj4YjEp5NLmBWMMYQc5705Oq4fFM+kNDKeAAwfeV+I5t
gPCICoI21QjEMBQAdYcty41XZeI5RG9RsWBr5i5nm+ZKqdVyXjmcT1ByXIR8zmZYz9iAFCSUZy+J
zNwvcunEk7OnyLRYEtOYx2sELpxI4Rsf7euLVqt65QAYgbVpA45nVM419B+DjXlbTG+R7RxEU8Yg
FmcDLqm9OuUQMSdilzl77/mKkH2Yku3oJ3Z0jAGR7kQXFqUvMvfojww+hCMQ0QGA60eUsyMbl86Q
RiArfOXb0r04nURLOW3rizb8UvEdgQjHAIcvb+85iQgOzNQtRwgAOifYyiNgHSYyDxNCCrfLWIRh
btnUREAVTBSheti7FvCQ6u2rdiVq1GQAgas+AV7+ncy48ritxliBs6sOX/DfiLZi85A1B2bFIiwb
egOYmtFOXKx0kyGvtTxqdinECj1RhbwOJQAxz6DLPDobIrctIwHSGxWnNlJ8gUT+b0GUA0DhIoHm
AQDmDQqxaiGBmKdnTLcygTk7Ls6NQzx6dRwGCboIKIOXQyYdFUdh6deWS7ExQB9+nqVzu6CMyCy5
a4X8oNqZRhfJJgGEhVxkvIsAi2fETmgifz/tUoZElE5IyOfQD3Lt6G2IHZQ9B3obXUWzHRM3CIgk
Y0U+VMgTd8I7QowJfTQegvWo+Kl232iqjcHtBypD2b41D9IUULuEJcE/s68+XPhPHb7DiFLl24Tx
Wz9Y6IXvZnwXPsKMRSQrE7whL2hSQ2EeljzFvx28RtjmFGcaxkFHmID4VwtMDJCUS8SHB6PxEPu5
MLse/wASEhUGoPUjzUPFCkx7wUebsiorIDOOaEhWFwfWvw06wNbcvsRtyocYyzEhmpWL1L9uh/OG
305hLuOwo8vd+9hgfeHR5kA16FYnavKu8N6FCNvQxqE8BqsyNR7qcFwcD6GM40vW6wkpcvzEdN6G
LjMZhfgucIcfdXCfEOlxw3Y1hMYgoSg9rmrVTHK5HaE4GmY8UTitMx2HMIWOYwNLdzb2/M7EOEHj
rs57RvWqB12fWyeBcKETgSpR5qPm8rM8VvMHaFqsxje/pFwtKJANyD0zWrkARYkAQ+3qaAWJw7VL
y2IwlCSv/i7AuSl8SMrXENJCmbMRGDlg2W9GMrcJxOIkFzPNX7Mbd2ERoMKcVVK7aZi4IKOqOOaJ
IqKqMTTXIBzvKiDhgDtbpMjlXvQeuoOmURlEBRlBEiktyE7lY2696ph14mdJxpGSGsmTZZJohgP7
C81/K3/2CueuSlphc5m9JtxnIp7ds3ZjMB2THggctyHlXJGRxqwUubvXBG5aibkmZi1SCtPKwxwo
tV+emJyWojUdpVAB0Rm1NvofhwIh75oELl34t0bcE0QwGAHXe5IBaeVtmWWorVzF1o+6FwRBPvGp
6XJYbStMAbkzgI4IuPJtnDIoGQN2ecpVTAMN3QYzDgozgTbepAwT3nmdi4OVjI7ZVTWrcLY/NAVZ
Ep7kdTbVpgBEDIfMmq5LSE3LRmbYxmAz9jozv/EmfeLsmiABsClCwI3LoyJQv85dMXwswoAEIQGm
IyHQZTkIxGZR5b+mQcYTvSoB2Lzr58/mTUzlUDs9OSSwGJRhAmV3cMFGVmVyHLHxmZp3IRtDVP2r
hqT13NAMUbVocA7zN9ijO8OIeGIwHpPLgNd44RH1lHSTcunx3jhAe7FaYCprKWZPpDaskyvNTQNT
HJ1HmOdeV6pMSXi+VEwoOt+Hs1ukYj2d5QjAeZzFz1yO07kZ3DqvTrKX2Dpc0C/C/wBPlx4TugOI
vvWi2CZHxTlWRJqfp6Ry9rM/EnlEflUZ3Rg5jE7zid/XN2ZYDAbSjzt+pl4AcB2egjZsDVzN94wA
yGckLUKk1nI4ylmeqSSwGJX4iR/7HlyRajlOYpq9Db/p1l5Xb1ZAUoMu9RjOt2fFcO85d3Wcq5zc
vDHgtdmZ62q7MQi7apFgtVki4ZeHSQQnJecqykjKRaIqSV58QTy/LhoSOEpHMdMYjGUgOpKZ9kOp
35AiWDnpHO2viRuGOq1m8TiFDm/J8kRi1Q0pE7epMwDyAOkb09wAyq9M1Iyg8LniIyZeZyw0xlN5
dyiIFjJHR4RQnamzNFKPQIDKp6Adi1jY6J6dWQRU5ZGKmdgVcM1bjDw6QyuynTTUHeuWY0Mn6bna
oba9JBTHEIAYoDqah39Go4ldnQ61DEdDBADpgfzvsVzsH1rsRpXJQEg4bBGFsCIoUx9aqjlx/aqY
CTqnf1BLMYrUt7ow29J01Iq25RidrL6FwOSCCQNi5S4YFgRq7Gbop17d8UES0julRXuXyB1w7JKN
6Pisy1d2aIykHid+SiTSUeGXaOtG/Hx2i/8AhzCjet1lDjhv3KNyOEgpWz7Q+lDV44cM+0ISwtXa
HdL0xsH7u5xWzvzClbmHEh9KlyF0/Et1g+cT0GMg4NCEbEzwGto/Z1GOC8uZ+FcfR27PpRsH7i9W
2fdlsRjhIVjLYQvLnS7DxBC/bpetV/SGYKFyOeI2H04lAtcj4SmNLkaTjv6PxVgNcj4gMwhqpcGI
6GOBxQGNmRx90pwXBwPoRzFil6H0jOiIkGvR8JwlGS/Ac8NN2I4Jn2h0iQOm7EcMgjOA0czCsoCm
ts02F0eKJRt3A4P0IWb9bZpC59hTj5mbJGdnA1lDLuRucrLTcB4oH6mTTBhdgX3FTjMmhKeJeJ8U
TgQtfLlj7VmX2Lh4Lgxgc+xMQxHQJChFQjcO3iZHkzYhfsSkZibtKo+lcweVtWuS5u0BMG5IAXXx
ijbv8rHVEmMmOYxUhy40aiTKJNRpRh52iQJeJCpdjILIrwgLkTbxlZiZDNyMVn0aTgKyKJyFAgBl
UqRjUbEZRpHMLcKoSZpSqf7H8z/K3/2Cr9u6TCzG9MSYVLSIXkcvGIMothgmidcxRgmtxMYnDJeT
zN6crZLmDllwQA6pMiBLLrjy4ERzkaBCd/4twVbILTACIGQ6zyIA3rTA+ZPZFAWyLds4lPdkbhOL
4JoRERu6XuSEQtPLWzN/ayWrmrjA+yFwRrtOPzc5oNqI5SMbsgWqcTuZedzd3jmK24igGxCEBpiM
AE05V2CqIkJWrZ8NWohG3AOMZYknt6SINcuj2AarzOdeFkYW8FotREY7vT6rsgCaRBLOVGzZlARk
eGNrimQo3/6nIyOIt4frIQtxEYRoAOuZSLALyoCQgfDEUMu1a5DVdOZy3D0hs8sRK57UsYw7UY2y
Rb/eXj4pHZFCEAwHo3NAMSrfJckYk3am6+pgNwRu3zrv3CJTGEQRgw64s2BqvTw2R3lOSZ3Z0fOU
lLmL/Feng/sx2DpcqXL2Dpte1dFaDJC1aDAYnadvSbVkjW3HLKAWuQItisXoZHMnruaAYlC2KWrZ
BHZU6u9CEQ0YhgOvK9cLQiHKl/Ur9J3aWoH2IZevrR/pdh9dwg3Zj2YYqFi0GhAMPQTvXC0YAyJO
5S/q/MA6RIizE4U2dnXjZt/e3jpj2ZlRtxwiG6ryLAYkqPI3Lo1TOtolzw4DvQvwncjauRrpJOkH
aMlGzz41ww80Y96/EcrIXLZnDWImpi9QpzhY8jlYsLQIYnptQ2PI9QWwWOJ7Ao6vFKp7+mAEuG2f
oGPVnPYCULhDSlkpCMh5hiWCjZnWdZSZW4iiACEdibaq5VRPQ21adyIOXSBmU4Ut7K4ewKqEJR8y
IwrUL8LbtmEXqNq5aE6yAJKrj0XDvKtjd0shMZ4rWcsOqy0nLFMOoxTZHBajngqdFFHdL7Fc7B9f
QVahLGZaPb0F9zdEj+cFKRzKIOCIOI6THahEZ9AK7egkFicChpxCBJ4nYharcdUmHD3q1K7EQJDE
KWiYM4cQAOxRmMJAEd/XlbPtD6Vy/Mmkrfw7v0CqMThIN3FStS8VmRiex6IwPgvjVH9IdZiKGhU7
B9isf0SpWf3dzjt9uY6PzOYH/OFKBxOG4pp/eQ4ZDePS6YlpxIlCWwhCRpMcMxskFDm7A+PZLt70
cwoXo4SDtsOzo0mkhWJ2FGM6XbZaY+3qGPtCsTsKnaucN62WO0SGBRt3KXrR0zB+tC/aHxI+LeGQ
nEuChdth7FylyPunagRgcPT/AImz4h44+8EJwLg9H4qy7PxAZJweLMdBjIODiFolWxI8MvdfJOKj
L0P4m1/jjuXm2i16HhOB7CjyvNR03bdNW3pBgdN2JeMvsRu2nt83a8URQTG1MQY3o0lEowuBwVou
PKxI8M/d3FOKg4H5n12SI3AX3HtRt3hpmzEYODsR4iTKsH+paZgg704LFCN4OcpjEJ2F6GUhiFwk
xmPZkqqcPfiQO1Rl7p0oGJETiCresvMxeR2kqdsmUbIjIykMizIWb9qMtVwDzdxOJWizZhcgQ8S6
Ojl4eYMYEmvYh5/LaJA5VCtWrcYTuya3bhUK7Z5jlzbu2qaa4jJFrZBFGdGNtoROLJwQVduTj4Qa
qgTyoEIkPGPEexCMQwAYJz60bhnEyyi7oWblsR1UEo/M1PlL+l5n+Vv/ALBXN27FttF+7EzO0TIR
lcLBapcUimAYDqmV6YDZZow5UaIe8cU92Zl0N0jRAiPvGgQnf+Lc2ZIRhERiMh1nuSA3ZoR5e1q1
YE4InmZiuQyTW4B9ufTVMZapbAtNiIt285lPfmbpd64JoRERu+bnWiL3Lp8MIjEqJ5gytQd5RBZx
sQjZtiLZ5ozmdMRiSha5eMRGXtkuQNtEDcmbhBcPt6TO5IRiMSUeX5OJhDA3kbspG7dPty9OZzLR
CFvl4zkSWlJmDbkJ2tekl5XJk6Q+wIaRrusxuEV9BqmanCOZKaMsA8YtSPag5M55yP2eklC2RC2P
HeJp3IaR5fLYkM0pnahGIYDL0ZnMtEVJR5bkISFqctIvE6YtHxE5om3CIuSbVIbuuLUBquyw3Pmp
XJl5yrI7TsC8+8K+xHYOkzmWiMSiLZlDlI01Cmv3kIWoiERkOnybVbshU5RG0ogv5MTxSONyX5PQ
GxBz75H0RWAE5Vk31egFiJP4WwQbmWo7EAAwGA6pm/xJcNuO2RwRu8wdXM3TqnLMbB6G1/ReWkw1
A35jKj/QFb5e0GhbDDrz5kjgt8Fr7T1r3L2jpnOJAKlLnQ0eULkEu8sqoxkAYmhBRvci1q7nD2Sp
cvcErcXwrpJ2hWuUu/CvQiBWgknFRt6LksotEdQAx8UsfzYph0SnmBTtU7khxDh78+qbMZaRLxHc
o2xhAM64JA5Fip7IxZD80LsRKBROcqdJkcB0CQwPRuFU2Q6G2kKZ2lAZppu+5O1VbHuwJ6Z9pUB+
aOoxzTDramqny6oTDofoH6Y+pT7B9fR20Vqdvx2y49aPnQaQzT4Don2hMM06EhnTpMtiByy6YnZR
GSB2FagWOSidpqpXYYxjRQMjpdwW3IEEg7V5cvFbw7Dh6C/yx8N6PmQ7c1HV44cMu0KMvY5iLH9K
OC1xHxLREo9yjcGEg/WhzEfYLT3xKEoeKHHAqM8CcRsKIj448UO0KNwZiu4qN393PhubjkfTC8KW
rp03NxyPR5Jpy/M1tn3Z7OkczbDzjSUfeCE44HqDm4B2aN0bY7VDnLDHDWPeifyITj4ZBwtUa2Lp
4h7pdGJqCKoWZl4S+7lj3fIPMjWxPxx907UCKg4FGMg4OIWu2fhyNNg3FOMRiNnQYyDg4heXc+5l
4JbNxVKj0PmW3Nmfjiasdy/E2KX4h4kU1NkvIvPG7Gld23pMoHTdA4ZfYhdtva5u344jCQzZGJcX
YUkCGRhMPE4grSQZ8ucD7qEgXBwPzO58TMJKVuZEbkS9uQOKNnmY6hE4+0O9CVqWuJ9Y7eh4kg7l
pvjUPfHiC4oxv2/eFJBRNonSCC5xG1XI4gHVTt6LZGMBFu5XLcbcXuvqkMcGVi1clpjcmASclKzE
vG24BGGKEoliMCvMAaY8Q271bvWSbZtyEozwYxLqN8WIeebei5IFtUtqncIkADUBNO5XZKhTC6Aj
y9uYafikqXI+tcUwWyC1WJG2dxXlkiR9+Qco67smOQoFxSJ7V+JmOGOHzTT0TdavoX6H9BzO38Lf
/YK5mTVleuSJ7ZFMOo85CI3o8euQyijDlxoic80ZXZmRO3qMyGmBjE+0aITvDzbm/BaYhhsHVJkQ
ANq0wBuSyAVR5UDhkVqu/FntlVNGIA3dLyIA3oRswN2ZwbBfFItwOQxVBqlnKVVT5vD+KXhi7OpQ
aNixE1kKmW5eYeK4zaiMBuTmgGa8rlwbt00OkOIrzebMpRytk0dNCIiN3Sbdsi5eOERg+9ef/UJ6
beItRNFptxEBuHpxGJErsvDF1G1emxHht2eIvvUb3PnTAVhZjT9ZC3biIxGAHoNMWldOEfyp5Fy7
yl7I3RWiA7Tt9GZSLRGJWpzCxhslI7kLl+IhaHgsjdnJMKD0ZuXDhgMyjG3Mw5eNIyjhhxOhbsxb
acz1/Ktl70vCEbl0vcPimcexDmLoaAPw4HHtPTK5cIEYh1ruva5aB4IChkc3QhANEYAdItWhqvSw
GzeVK2CfLB+Lcw1yziNyEYhojADrgR+8nSA+1C9PijF9JPtSJrL0EeUsSbmL1BmQNqjDGTDVI5nP
qmci0YhyexS52/FrNk6bEciR7XoZ3BW6eG2N5zU/6hzFb/NFwTiIu/09cWbZa7fOiHfiVC1HCIZ+
tO4cIh1K/P7y+TORz3dJtczbE4nbiEbvJk3bQrp9qKjy/MvcsO0hLxQQ5mzITtyDghB/FLiPf0sD
xSpHvUpAuIcI+3pFt/CNR+xRGZqe09U2PaAdTkA7AkBSbVCdyVGxxWqReTASJVyRyDInPDobNNnH
oogMz0UxFejVmU/QN8k+2RQK1AoSlg4HrWoe4qp9ikdpKiPzR1HQPV3Dobq1X1dT/GPqU/0ft6R2
kfSoyvQE7kw5fJ1+K5cadJGuIwY9E+4odg+roIz6d5+1PnGvSYbnCEdv2dET3KEhtCjCWE3ie9Qt
hwJHHtUxKXgkYnsCtSiGhMeWe3EegtcyP3Uml+jKhUoexfGuP6QxWqPjtkTj3KMxhMOrnK+7xQ/R
PWMTUEMQjZmXnaLf4ckY+xdqN0ui5Z9i5xw+0KVuWEgjan95a4ZdmR9LK3OsZYqVi795ao/vRyKl
ZlSWMJZxkMCjbu0v2eG4NpGfSf8ApXS/6MuoQQ4OKPKXC9uVbJOz3UbUj8K4Xt7jsUoHCQb1r8PM
8cfCdoRicfZOwo2rtLsfpHpzGQcGhXkXC9qX3ctm49BhKoK04vUH3hsQlE9o2dBhOsSvIvF7Z+7m
fqPoWNQcU0nNieBFWKHO8qOIVnGOY2qMLxrLwSOe49IlA6b0axl9hU5WR5fNxYzj7zZhaJPG7EVB
RjIODiE8XlYkajOKEolwc/mfTIOF+JsR4D4gMk8SxCJkNMjmMHTnDb0ULJrkX3ihRhENqxlm2xAb
UegEUL0RJLlgOiNoUBNexRtxAAiAOn4tsaj7YoUTytwTHuyoU163KO9qdYTkQLcT60IQDRjRHVIU
yVy9EjVEcIOajavWxpkWcZKnz1X03M/yt/8AYKvxMw4uzBH+Ip9QbatBuDUtUpO+ACIsQ4siUTcu
Fjlkq9RkGiRazkV5khrlvTAMN3V45B9gRjy9tvzitXM3H/NC4I1259R7kgNy08vbLH2itXNXDJ/Z
GCaEQPnB7kxHtRjYMwMpRDOe0rVzHxrpL6pF23JohgMgi5efuDFC5c+DZBpDM9qaAqamWZ6dUy27
Mo2uUhotChnJ6oTLzuYky2+n8y9Jh6yjb5IGNmI+JdmdCkOUGs4SvT4ojsKNzSJ35VncO3d6E27N
Z5yyijOVIHxSzl2IQgNMRkPRmdyQjEITuiWg/d2WrI5Ohf5gcQ8Fv2Y+kMm1zDcI34Oo3rrRtwwl
Grk9uxCFuIjEbOuIw4rkqRj9qN26XuGspZDsQuS//Hj4Y+8dpVOjVcck+GIqSo81zI02xW3aONMN
XUELfFenSEB9ZRtRJlOVb97NvdCEIBgOuZyoIhytcsZYD3If3oQiGA68rkqsKDaVPnuZiPNmeBqs
O3rR/p/Ly4BXmJjAB8FG3ANGAAA7PQSuTLRiHJ7F+GJl5QaRbAARBAP6yEIhoxDADYOvc5qVbNr4
dntHiPXtcnEtK4Xl2BCIoBTqyvT02eaP3cnEdR3q7yvmA8nZIMxEuDLYOoTjGyH/AMRwUQzE1Pae
h0NUX8yTv+bHqknAK5fJoBQ9qLSBZSlcgJVLOpyweRbuUiDiShHvPRq2Igog5IBNs6GWjJ/oWkYD
ptjeVHeT0cJIoV8Y/DiX7VMD2YgdB7Cu0obgOllTE4IHPApuoSUdWEk46nYojPPsWodQ/pBT7Pt6
bbe1Iue9QnGYEtIEoksxCly0JCU7jO1WAL9BfYPqQ7AnCbaiMjUID1pkYnAox2U6AybICnQRsqhL
ISCgdhf6FGds6jE1A3IX4j74CRHcpCPiFYneKqFz3gH7c+vK2cJghQuj7zlpV7BQ/QgRWMg47Crn
LSL+TLh/RNQrXNRp5Zae+MlTDrQvjwngudhwKIiWkKxO8ISzwl2oXYeO0dQ7M0JxwkAR3qPMx8Ph
ujccD3KnpY8zAPK3SQGcc0JRLgihUebgKYXQMxtQnEvGQcHoMDgVLl7njt+E+9HLqMKTjWB3oiY0
3I47pIxnS7bpIfatVul2NYlOaTjSYzBQu26XbeG8bFqZjhIbD6cwlngdhX4e8fiR8MveHRol3HYU
QRh4o+8PeCE4F4noMJhwV5F40P3cznu9CYywKPL3K2/YmdhRvwBly8y9yIxgT7UVGFwuJfd3Mj06
4HTej4ZDPcULtsm1zNvxxj7TbF5ciRdj4gaOmIcFG7bGqyTxQGI7EJwLxPzOYyDxIYgo37Ifl5HL
2TsPRtBxBVKHYq9MBvCkd/REbSFLt6JXiMBTraZgSGwh1MW7YhdAeJFKoxkGILEdQStzMSKhimlc
MskZCbEjSexcczLtQuEcEc96br0+Uv8AIa+jf0nM/wArf/YK/qEYEjTzV4Y7LkloEyI7lr1F0Nci
W6wjagSM5ZIT5j4k9mS0wiIgbOoTMgAbURZibkguI+VDYtUxrltKaIYdNaBacZZAIgR8uGRT3Sbk
sapohhsHzgZSLAYlRhZta9ZYEn6UfxEhLUXIA+h0IxoBQBa7shEb0YcrExH/AFCFrufFu5yl0uV5
Vp7l380OAvO5yTRl4YZgIQgGiKBvTVX4blhGd9nLmgX4nn7h8yVLdiFHWvm4+TYOFkYy3yQt2oiE
BgB6ByWAzRjZLRFJT27ghO7w28RAYy3yTAMBgPRuayPhjmSgbrTvz8FkVERtkvNvHXflichuHpDa
svK4A8pRD6Bkhf5p9JIk5NZtg42IRgAIjADrgAarkvDFG5cINwjilsREacvA4+8UAAwGA6ABx3p0
t2hWUivxPOcVx/hQPsR7OoBEarsqQjvRgZGXMXPvJ5QGLBCEcB6BhW1bNfz55BGUvHOsvyddzQDF
NAn8PaPYCQWffghEBgMB1Rbt15i9w2xiX2oa6351uzOJPoZ2Y1ERh70jQN2I3rhe5dOp+0Dr+XbD
3bx8uA7cSoWY+yKnac+s5wCvc2fCOGHd1uTsWpkXJuJDZEe0rtrzPMNyWrUzUHTKZwiHUIy8V0+Z
Mbhh06HaVwsFOQrGPBHu6pbGVAtMXjG4TItuojctyIIUOWlb4rg8QRfeUJDORR6Hzl0HejM93TsR
nmyc59NsbirfYhbl4IjVJeX5UWZnzUrUfCeKPYr52MOiZ3FR7R1NRROQoFpOEk46X2IWx2lOMUDn
gU2fSScsUZHNaDiMOxN0y/SCl+j06TiCWO91KRmSwoHQJ2dB7Ah2D6ukSzCM9vQEJD2ugyQkOzob
I0Uu11E5Fh61cjIaYk0JzVmF8gSjERfLcmV2wcAdcP0ZeguWiOC9Fx6mKnYONmTD9HJW7vs3eCfb
7KlblhIMjan95ZOmXZketKEsJBlol95bOiXdgVst3/omPypjgaFXOWP7svD9EowlUSDFS5efjtGh
2xOB9KxwR5eR4ZcVo/YpQlUSDEKXJ3fAa2ZfZ0idul2FYnbuQkMcJDYep+JHhLC4PodR523UUFxs
4HNCUS4NQV+IthwfHH7UJRwIcL8RHwGlyI+tCUS4OB9PqjS7CsCiDS5Ck45v0Upcj4ZI5RH3kNh2
hCUS4OB6DCfcdi/D3zxexLaPQmBzwOxHlr+Ps6sJBaZl+Sulwc7ckLN0uD93cykO3pE4HTdj4TtU
rll7fNW/vIDCW8LRcIF6OIwTHBG/y7mBL3LWXaFrtlxmMx6Wvyo27kRKEqEFSv8AKAztYmOcUx6G
yWzoc+yCUTt6InYXRO9CO1RBDGXF6A3LtoaziRRedy7ytHEZxTEN1QBEiGckLcBhj2/Lq/NfM/yt
/wDYK/qX83f/ANyXXaIJO5C7zMDG0KsQzoRtxEQNnS5LDaV4tR2Cq+FbMRtK135mR2BNCIG/Pqcc
hHtKIs8RGbOnvT0DYECRqIzOPzjqkQAMSaBNYkLsvzONv1UTfnPyfcPC57F8OIitUiwGJKFvlrZu
TOEm4UJ84RKQL6QtMQIgYAdOq7MRfAEgP60YWIGFgU8wBxLvQJiJXcZSO307XrnGPYjWXqU7pjLl
+Rj4JSOjVvXk8jbNy6T8XmJD6jJeZdPnXzUznVtw9DqnIRG0lloAMbWIiA0petRuXQHFYx2dvpPL
tRN2+cIDLeUdOm7zR4ZSd42+5GXiuS8UzifRucBiULFkjTMVnE6pdgARvcxR2aAo7BuJADDLrtGJ
ndlSEBWp2o373FeliTXTuCNqH3I8c9u4IQiGjGgHQeX5cedzZwtxrp/SZefzM/O5kvxZR1YiPUbx
XZeC2MSUaiXNzxbCEU2MzWUsyfQeRaLXJjiPuRzkUJRHwbZOh/aOZ9AOWsl5SkBIipDqNsDixkd5
Jl9vVlcn4Yhy2NEf6lzMdJwsROIjWv0+h0xLTP8AyhvErUdMtENvhMc5IRGADDrz5rGzZ4LWwy9o
9cgeOfDEdqhDMhz2nquVf50jhj8K0dwxI6kOWia3C8v0Qp3WoOGPYOmU2cWgw/SKjHA4ntPVER7I
JPacFbiaSiFKU5cOQCPNiYMIDhinOcXQJyDoHaEBmUI7OiJGLshAZDqMq9MRsiodgWq5SExpJ2LX
50G26grY5aWqERoLfWuYOZkOi5+iVDtCJ6GWgYn6k6cGoQOeaboJOSMjn0aThJP0PkhbHf0A5ZoS
jh0yJ94KX6J6ZOWaUlqId8kWDEbOj/CFA7YjpMTmhEdi7Og7R0CKMegEIxxo6B2MUNiFuV2IuaQR
F6uFG5EiTgO21Wr4w8E+w4fT6CF6ONqQJ/RJqoXgfh3Rpkd+IKlGPjHFE741ULo9oB+3NRu4W7w0
T/SHhPXjd/d3eCe6XslGGBxidhGCeXjjSY3hW+ZHscM/0SqZqPMRDmFJ74oSFQQ49K8PvIcUDvCE
sJCkxsIQuQpdtPKBGKc0uxpcjv6fOh91JvMA+tCUS8TUHpMZBwclLlbvgk/lvnHYjytw8JrZkcxs
TLy5fc3KwlsOxEEODiFol/8AjyPDI+yTl8g/E2R8QeIe8EJxzxGw9Hm2vvBiPeGxVLWnaUTjCW9O
C4OB6DE0PsyGIK8jmKT9iWUh6FxIxuRHDIKXL81DjAaQ271+D5gf9pI/CvH2TlVC1fLxP3d3KQ7e
nXDhuioO1G5a+HzcBxQFNW8IQvNC6KEGjnv6PP5bP7y37yeBqPFE4j5oMuTEbV0eIktFVEb0D7Vs
6m7QmNCvC/YmNNxU5HY3S/QBEOSWChDMRA9XoWkHByK8zl4CN6AoB7QGSMJgxkKEHoEYRMpHIIXO
apnoQhbiIxGz54on+Q8z/K3/ANgr+pfzd/8A3JdYBiI7UCLYNz3iHKp0Pck27FabECXzRF1gD61Q
Odpr1WdzsRFuGiO0oG8dch6k0QANg+ciRblc0+IxwHeULkowtwI4YzeZG9qBaYDGsizOegWoyErs
qMKt2rVzN0myPDADS60wjpHS5LAZleTyvHcNCcPVRHmedkZTl4YE6mHehGA0xGAHpjO5IQiKmUiy
Nj+kxiQ+nWQ8jvAyHavN5y8ea567jaBoTsMRVRnztwwsRrbsWx5frOKEIDTEZeh1SxPhGDp4kGfu
BuEbdRTz47mcjU9j+klY5Y8QD3LuAgO1Gzy7iApf5iVJzOyJxQt2g0R9Po9U8NgqVGFqBnrI0xfD
tA2oXrzTvti1IjYPQCMRruzLQgMSjdvHXfmXlI5DKIRs2ZGMI/eTFO4IRiGAwA6PwX9OaVzG7ePh
gPyrTHiuyrcunxSJ39QU1XJUhHaVqLXOdu9+iP2LbOVZS2n0GuVThGOZOxGBL3bjS5iWwZRQADAZ
ddwHkXYfap8zPGR4Bu29/WjajI/heWkTcAHDIje6EYhgMAPQGUsMO8qd2ZaOMxl+bHejzNxzcvZn
3QSzbOuLNr7/AJg+XbHbiVCzHCIbtOfXhbxt2anY460m8c+GPaVbtZgV7T1L3MkV+7tqMNgr29Bk
cgrdsGp+JPrDMTn9EegWrkxCcqgHNGMJiRJALF0RkwCG9RktWUeuZHABTBwlgu3pbYAFEbAOiWuJ
EZChwXmBzLaaq7P8/oudit9oVOgyPejL1dnTpOEvrWpOtAwjj29LoE44FMiUSc+kwNSPqTJvWpdo
R/RPSdGUyShxMQKxOS4S5I6I/ohW/wBEdLjAJ+ljgVoOALns6SN6AyT7iENhCu2ZXJaKtHLJDMq5
DCDO29SgcxTtUSfEOGQ3inXlA4SDHvUrMq3LLx744KMzQnEbxRXbHsvrh2HFTt5kPHtGCjM+LCQ2
SFD1pWzmKHehq8cOGfaFG7hbucM/0sijCVYyDHvRsz8dk6TvHslMc1PlpOw4rRPunLu9MLw+6utG
5uL0PRHm7b+VdLXIjw5KM4F4yDjoY1BXlSLWpl7W4v4eo8fvLfFA7xVu9CUaXI4bpBNKl2HDOO9G
EhQ/QUeWvH4kawkfaijCeBX4e4Q8fBLaPkHn2xwSPxIj60JCoPQb9oOf3kPeCB1auXn4dsDsPSYy
x9mQxBXkXz8T2Ze8PQiUKXI4EFj61Ll7waYpOJxB2hf/AMO5rjtS+6ufYhZvF7Z+7ufYekTAAuwH
BJAsbPOWsdkxmQvw98gXY0D0dugXbHDdGP5wTeG5Gko7/mciEzAnMEj6lotvfo8oxlpJAyqoyhYv
WbrtITDj1pow1Hci9sxllIhvyLxiV6UstnV8+Q4Y1HaVu9HK7aAjzAwOAluK0StmABqZUCBbXczk
fkVPm+vVp1uZ/lb/AOwV/Uv5u/8A7kupwh1UEQBqUIxFRmnkREDatNsGckf3UEPN45b00Qw3dR5k
Ab0TaGsj6U9w+XHYydtUtp+czKcgGyeqiLExGJ8REX0j9KSGoyuGNeIkh9rYdBlMmmQDlGbRs8qM
TcLOEbdiXnXiWeAxOwHYomTaj7ILkJqxO/o4i8soipTRPl2YmrExXC8pnGcqn05t2dV+9hGMAW1b
NSHNf1SZt23M5RlLCOUREYL8J/R+WFu1hLmJUi22mK8zSJX5eK431ei8uDyunAAUHaUQCJPSVwvI
R/RyTRcnORLn0bksBiTgpRtyFrlx47zYj804ICGq3yj8T+K42/FkIwDRGA9H70z4YDE9y0l53KiP
sxt7SQHPrTDiuHxTOfoGA1XZPohtK86+dV+VScovkF+H5Y8X7yYDiI7dq0wwzJqSnJYDElS5XkyY
cu7XeYAyzECvLsRYGspEvKR2k9TVLikaRgMZFefeHmc1cHDDKIOAUpSOq5PxSw9A5oBivxUqiJ02
IZE7U8q3J1md/XM5eGNSuInTiRsi+HemGA6o5SwNd64WIOAB2sQVG1GpxlLafQjy+KMZabccpyb7
HVrk7cg1uWq5PEmWMj3YIRjQAMB1582a27L27Ox/aPXlc2CnapXpsZTOI61u0K27HFLt6h0lpSpH
tKs8s7iA1z39vTC0MCdUuwKd0hnLR7B1ZSzaiA/6cWJ3yr0RlC3I24RbUyvXJAxE5sAX2qMXdyAo
x2BHcnzl0gZy6gj7UkJDJCQ6NwUh+cAh2InMYBMSVIZgKR2zl0XN6h2qVs7SQnyQtjPFU6XGSBzz
ROeAROZx6N3RpOEk60A0j9fUByNChIf+gnPrV7YDFvWjX2T0UU4nKRdSjYiIj2pYAI3ISF0RDyAx
bogfzftVqX5vSZHFOfFHHpcLXmyJx6IyGaJ2BHct4BXn6B5hj4s1C6HjLAthReXHw3Iu52jouWsr
g8yOx8Jegb2eYi7fnR/uVyzI0lxw78Va5jKJ0z7D0St4QvjXH9IY9fV7N+h3SClblhIN3rRP7y3w
y3tmo3/ZucE+3I9Auw+8tcQ3jMIXI4S9LK3OsZYo2ZvqtUBOccip2pe0KHYVLlb5ZydD5HZ39Jia
HI7CjbuUvW6SG0bep5w+5ulpgYCW1DnLfgNLsRmNqEo1iQ4KE7f3sKxP2JzSYpOOwpolrkawO9NM
GNyNJxO3aPTscCtEq2LnhPunf0m7AarM6Xbez84KNuUjKE6255dnSxpIeGWYK8i/S6MDlIeh/E8v
S7GpjlJMQ0x/yyR/p/OwaQpCWR7ELV4k2JFoXPd3FOKg59AlA6LsaxmFIBo85bpxDxAZhCxzDRui
gO3o8y1w3Rnt7QtE+G6PFHA/M4vWi0xSocEFGyY24RxqCa+tG3O5GEfZnCLj7CEx5mEn2xLrjuWp
j86JK4hZf9H+5VlC0cjbB+kIyjHzrfvQqfVitOiT7GqvIv25Q0eGRiwI+X1+a69VvQ06OZ/lb/7B
X9S/m7/+5LqebcDR2lNAAbgE1mLDahK/Mk7AuCLb+qTKWGLJuXtln8SEr0j+iE0YsPnPVck2wZnu
RhyluZiC05ngHZtRv3ANfsxcybe5x6NUyIxGZRt8lHUcDcOSlzfPXTOTeFy0js3o6pGFgeC0KRA3
p4kg7QnEy/ahG8NcTi9SojlYh7mBIMiO4I3ebuzJJcQcj1rTEMPTSvXS0Y5Zk7EeVsaLNmYec48c
oRO2WDoGdw3+Yj93bJ1SfdGKFznpztcuajlonS4/OZC3aiIwjQAeiMbJ1EeKYGoRTmGm37U5uTI7
ogsEIQDRFB6MzlSMQ5KNy6fK5eBpUHWShO9HRy8fu7OLjaUwDAYAejMLcddzDECMT+cSpeXMmQGm
V+A0jeAS5KFu1FhmcydpPoHIMpmkYjElG/eIlekG3RGwIcvZrdnifdG1aY1JLyO0lGczpiMSpQtn
yOSidMiRxzOdFG1bGmEQwHU1zxNIxzJ2L8VzPFfnS1b918AjdvcV2W2ukbB6H8NAtbjxX57BjpQv
4W4cNmO5sfQeXbLwhJi3tzyD7kAazNZS6srszhgMyTsUufvgm7eA06mOkbmwp6Hyo4y8Z92GZU+c
OQ0ctE/td6N8hpXHYnHTv6/l2/vbp0QHbiVG1HCIqdpz68bIDgVkPqUYijDqyuSIAiHcqfMzrO9I
l93UjD2LI1S7Tgrt/ORYdg6HOSu3wazOiH1KMNg6sLe0uewKdxvHI+oLSZAHY6iMlagAAKyLKA2n
BOBgpR2oR2dACi2EEJDo7EWwjTo0nKiZOaE/WjvmmT5ZoyPBk5U7sHFoRI3FDeSeiW8hR3OtQxBT
5spSlj1KIyOEsAoyyGI6oATA1RBxBr1dJxjQphiaBXtvD9a/wnpuTnQai6E4+KUiZfYq4Zq5o8Ou
Tdjq32H61b2gH6+hkLY7SjE4SH0o9Lbukt7NUZbU22iMTRnUeVnEm5IUIwULnvE070LopK1V9ozH
Rb5gY2pV/RlQp+uLsPHZkJDszVrm4ewQT+jIf3qUMRKPCd+SjI+KPDIb40Xmw+8snXHuxUbkcJgE
dYxBaWMTsIUZZ4S7Qo3hSEuG59hU7e0cJ35IGXjHDPtHRKwfu7nFa7faHpo8zDG34gM4koTjWMqg
ocxEVwmdmyXcjGdLtukx9IPSOYtB7sKEe8FG5HCQ9R2dJhLCSlyl+piKE+1FfhJnglWyfs6PxMA4
NLkd21CUag4IczZB1Q8cR7UULlsvE+nMJBwV+GuF5DwS2joY1ByRhN5crcNCMbcl5F4uf3dzKQ6a
UmPDLYUbN2l2P/MPQnm7ALH722MDvZRuWpNciXhOOIOxT5HmgTctlnPtDIrRMmXLE8MzjHtKcFwc
COgTiwvR8Mk8o+XzVurZTZGxfAhciWFfs6BOPDcjgVougRujLB+z5ucxD7W+St83MsPS/b6Lmf5W
/wDsFczfsxfVeuSk22UiVWJpuTMUDMNHFtq8ttMRQLiGo70wDdUOCTLBNBoWzngURcJmTtwTAMPn
QjloxnJ2Go4ndEVXm8zeInIVhbGkAbHxQhAaY7Ogwg05jEOzIXb8zCIrG2KI8tF5yj4tO1GdwkWw
fhwyA6gO9RvxDaQ3pjOchGIxkSwX4f8Ap+q7cPjuQi4gNvEwR5jnNU7cfBC9LWXzlL2UeX/pVqDO
0rmUd+kIXLvxr+JuEDHd6JzgMSjCExG17Vx3J3AISu6o2RUWy0dW8iKaIYDAD0eqblsgHJUb/MiW
k1t2BUk5OMFHmOZrOPgtu8Y92Ho3NBmVKHLSJjEcd6LaYn9I/Yo6SIcvEPK4K3Lkv0jgEIQDRGA9
BqlWRpCOcivPvh7pwHuoWrdb86RGzeUSeK5Ks5GpPepXr0tMY+s7ghd5uErPJwrCyaGZ2lCFuIjC
OERQdTXc7ABiTsCPOc2PifurXuvgO1fiL/jPgiKCI7/QiMBquzpbjv29yHJ2zWXHzNwfV3oABgMB
1/Kh4p0MvdjmUbhDQjS2PrPVMpUiA5KMpDTy1k0B0mWoesxTDAegM5FgEbEpOZNK+R7McoPvQtWy
1u28AB2cRQiKAYDrz5n91Ze3bG05nrknAVXmyGZl3ZdaHKxDm6RqG5RtxwiAB0mRwAdTvGly+Xrv
oFGGwdDR8U+Ed6s2QOGA1S7etemMLcdI7Snj4hGnaVqlI6jUl1IzAui2KA0KhzM46DIGgVqI7ego
9EpnABEnOq3inq6JSzOCxr0acpYoHLNQtjJDfPoJfRajjL8ilKzM3DF5GEs+xXNENGhwQdo7FA7R
0NtkHR3RK1EOI/WmzWsYS6gjtQA7AjE5oxOIx6hnLE4JslrGBoerqOMvqQubKFXv8P1odh+rpuxj
jrWgReOEhJ9JZG3GMbUZUJi7qqt9hVuJwkJfQUQjI5oy2qJ2F0JjCXQAhb2iqPQQc6IR2YqKk21W
r1pgI0k52FGzMuYSLHcVchA8UokD1K3c2ivaKFShLCQYoRn47Z0S7uuYnAhiO1XOWuVNsm2ew4I2
5UnZJgR2Gn0K5ZwjeHmQ2PhJVV3lD7B1W/0Jfk65jhC7xRH5wxRhLCQZStT+8tHSd4yKMf3d8ON0
xj6+h4feWzqgd4yUbgzxGw5j0tUeXl4JvK0frijE4EMVqjxSt+Ie/aP2xQlGsZBwekyAezePF+bL
b1Beh95aqN42ITt8MxWJzEhkuKlyHDOO9Mag4haJH4VzwfmnZ0CUQ9i4eL80px6dgWnGsJZghGFy
l6FJj7egxmHicQhy12TQlWzczB2I2Lv3sc/eG0dIMTpuxrCQRtXBpvQ8QOe8egbEI3oOeXmeOPun
avxHLH4jAiQzARsXXF6IaUZUJ3pjxWJHE4xdOKg4HoE4Ewuw8Mh9SIPBzUMQaGTLyLo0zjT+7oBw
kMJBeXdGm5lv+ed/p2/sBzP8rf8A2CuYiQJEXJ0/xFcNsRJOLIGcBIjcmhEBtnVqQEYwqQnmdOxA
y4yMCa/OWq5IQG2RZET5u04xAk5+hNaE75/NGkeuSEI2IWoE1nMksOyIQuyveZZl4IB4a+7Ymtcv
GEgKmLP0fFuCJ916nuQ8uVyzAnZpDduKaEAZ5zNZE9pRtWi/M3Q0R7o2ozkdUpFyd56oZRifFN29
KICPmXpB4wcCm0krzbxjZsQlwQA1ORnVSBlKdyVZEDVInuoELnNmVnlyXjaEiCY5ahFC3ZiIQjQA
BvRGUiwCGoShZJaECdJn2tko3LgYgcMBgPSCPivS8FqNZHuCFy+fN5iX3dkYWxtMQV53MHXflich
sAHozORaMQ5O5S1HRy0Q5lHilM7AyhO/Dy+XFYWHqd8wKIABgMAPQapVJ8McyV+J5mt0+GOUAgI8
V2VIQFSe5G7eOu/OsicBuARnekNbcFseKR3BQ57+oTOl9VnlmYRGWrqm5cLAZZk7AvxvOFgPureA
iMnG1DmblI/u4ZjefQmci0YhyvxdwPducNi2cgTkuKtyfFM7z1zOWAyGJWmZ451uZ6YioigBgKDq
/wD8PsSOuY49IemwnJRswFI47zt9DrxjAmNuPvz3+pMDqv3Szn3j+ReYRXCPZme/ri1b++vHRbHb
iVC1EeEV3nPri2CxmW7s1q97Ds6rnBSvEvbt+HqQsRxumv6IxVuyPDaGoj6ukF+GyH71cuu4kWj2
DqmRyDq3HO9PV3CqMJYEMgIyAG04qVuNwxkTWW4LyY1FkMSh+bHoH5xZHoEM5Y9EobahNtQgMBj0
gjELWNjrUcakqJ3kqqMI+KMjqRJwzXNiyAAJyNN6t9nQN8lJq8JQy2rUiB2hVx6dZxlgtwTbULgz
oekDLNMOwKiMTmEYnLpAyxPYmHcjE9hV+Jyb60Ow/V0UV0HKT/QpxlbEYRLajRcEtTZZ9Fo9qtEY
gy+tCYzWkYyp0mOcegyK15vRCQ9qvQGQQ3KYNGP1qQOIkVLzSwPrcLy4yaRwBzV6x/05uP0ZVHQR
7N4P/iHoIy9nmIsf0o4ISwjzAY/pxwVvmQK2ZOf0TQpxUHAq1zQ9g6bm+Evydd4feQOqPco3BmPp
UOZHhHDd/ROB7k8PHBpw7Qo3BhIfT0GBpa5isd06U7/TcFLsOK2d4yQuChwkNhGKF22HuW6jeMwo
2wXt3XlZOzbHpMJYSDFHlZ+KNYH3o9R3axd9QkhzUPCaXRu2oSjUGoKMJdx2FGxepdhhlqCMJB4y
DEL8Ndcx/dSybZ8gF60dNyGP5w2LVHsIO3oMJZ4EYjsX4e8dPMW62rmGoIwmNN6HiifrHSJ2zovQ
8MvsKMJjRehScT9Y9AYyDxNCFok/4WZ4ZZQJ2oc5ys9MgHIHFGTIgtrZpw/91QauXl3GCEhUHDoM
4xibgFDIP+QpyPK5mIcB6SZeRf8AGKdnb0AEsRgV5N0NIeGXvfNW/wCUt8709LzP8rf/AGCuYkzm
V2Z9cj006a1OxfCjnVCUy20LhA7fnEyuSEIjEyLBGMJnmJ7LeH6xojHlQOXhtABl6ytV+7K4TnIk
rFUQliy1zNWbsAQlbPaDgp3DwC3F5SwH1o84IwvRg5JgNUqdqMZQmI5S/uRNq+HAciVCFqsBxAaf
MOfZ1XKiGIswLzko24BoQDAej1XrkbcdsiB9alb5EzMBQXLcaSP6RoAvM5v41+VZzmXFO1Hl/wCm
RjKUaGZBlEeqiF3mZm/zBrInwA7o4ej1TNfZjmTuURcEpSP3doBox3yKNyVbksSS7dj+kFuAE70v
DB0wa7zc31zoBbepbOiJ8U5eKW30ZncLAd5UZ8w4BrDlyfFs1RC87mpcRDRswpbiOzP0JnOvuxzJ
2BfiOZ8Z8EMoD8q1EOTSMdpX4i6PiyGHujYhbtx83mZ+C0Mar8b/AFERuc1Lwxbhtj8vVNyZaIX4
zm+C3Gtu2cIj3ivOl/8Ajh/LgcJb2VPQ6Qf+3sF5ke1IZdgRvzAEIFrI+1/QeaOK3AmNmPv3DR+w
LiOq5IvOW3qmXtGkaiNe0o37pBu3CTTwh9j+hjy1otOdZyw0wGJdC83wbQMLIOB/P71ERqHMYbG9
qSERgAw7uvPmD93y/Ba2E5n0Em26I/ahEYDqkDxToE7MZl+pO+fDAaY/arl8/vJcPYKdErhwiHRm
z3Lp/aUYijCvV0DGZEQhEYWYMO09BJwRlEgitQr89smVw7KdEYjCKjLaEAi2EaDojLetZyCMjn0i
IqDitIyUtjEj1IHYD0G5YlpOYNQV5cIjTMMTChV27d8U8QVAbh0QG0qW6K0jLo0rUMJdAGWJTDsH
Q6I2hEbFvWqWJ+pOOjSc8EJgY49Os0J6G2q5LMgP61HsP1dN64Q+k4dyhcAbWNTdqc4Zr9I0Vo7y
re4yCL4A0R2RoOkDKVCmR2kMO/obOHQ+xahgC3Rc7qKErMzDVI6m9a1SLksQSo6fFqDN2q3I/voa
T+lGvQLsPFaIl3YH6EJxrGQcdh6+uPjtETj3IXLfiDXYHeKquFyNe8Ly5eOyTCXcafQpQlhIMUbU
/vLJ0S7sD15WWaE+K39oRjIPGQYqVi5Wdqg3xyKucv7J+Jb7DiOgwFJCsDskMEJGkhwzGyQ9N5gH
wr9J7p7eiVh9MLp12Je5cGI70dYa7A6bkd/SDA6bsKwl9iEmaYpOOyXSbcsD9BUuWvMZ26H86ORR
5WfglWyfs6Bet0vW6x37kJgMcJDYUYmkhWEswQjbuUvW6SG3f8g86Hg/eRH1oSiXicD0DKcawlvW
r7vnLP8AzD8hTtpuDxwzB6RdtnTehgRRxsKqDGcaSidvoDbmHjJfh7w+Efu5jADYV+J5YZuRHEut
Q7Jx2LXAGVg+KOcUJwLxOHQJQJhdjhIbEbMOHmYAmo0vvotF0679uly3IsX/ADTRATtmMDiWYj6U
ZcvcFwCun2gvJvA/mzy/t9X0HM/yt/8AYKv/AOpP9o9NVpjIErTEtDanJMjvTAN6Df8ANGvmLsLU
dsiAjGwJcxMbBpj6yjGwI8vA+6NUvXJauYuzuE+8SfQUU+VgQIXKSObKVu1dlCEqSiCwKqVQ9TBP
kgIxPkRPxLhwAQs2IiMRi2Z9GLdqxcvzkWiYhov+kV5nPRhDUXMRxTb3XNB3KMJDdC1AOfUtd+Js
cofDbk+o72DIWrMRGI2ej0jiunCAKJgdV7O42qMNwyTReUz4pkuSfSeRYiTM4zNIx71KHLnXel99
dq47CUIxHF7Usyd59GIgg3ZeCJz9SAnE3ubu4W4hrUGw1HcjeuNO/IASlVh2P6E3LhbYMydgX4nm
PF+7t5RH5UZ3C0QvxF8Vb4cCX0hfg+QPmc5KnDUQ3k4I378jf5yfjuyJLbov1TOZaIxX4nmBpsxr
bgcG94oHU3KQNAHechSpOSYYD0I5ez97cxPuxwMlHkrPDAVuyFKbO9ACgGHXHL25CJl95LOMM1rA
a1ClkfXLqmcyIxiHJK/FzjLyLJHlCRAGoVLRA+l/QyuyywG07ELMj8e8dV+Q9iGUQo2bVJT4Ibg2
KN5qHht/ogmvf1xat/fXzot9pxKhZFdIqdpz68pZ4Abymb7sMT+canrC2MAdI+1CIwAYdMpZmke0
oWx45075KMBhEN0W+XFRMvPsChCJ4bY1EfV1oDK2DIq7d9+X1U6JyOABKtW7U5QeRLAkYlFjQmrq
dzMmqfIVUpbSoy2KVw5CiJOfQAMVpPi0puk3D3J9qNz3YkI/oplxgl8WTAMMnRAzICFcgqK2O0q4
d3S6IzxCqtRxl9SfLpb1ITGEse1AZCpTDpdEYPgmzGKEcs0wzp03DmG+tQ7/AKuh1ftSzb6lo1Cd
oYA7ExAbcg4dtqt7pH6lEDHUQEI4N9aExhLHt6KoblGYzxQgMBXo0nCQ6JSUoZ4jokDgQCpW7sdU
QQW7QpWjbDaWG0LUz3LcjEk7QVG6MbUhLuwKB2oxOBoVKzLxWZGPdl12OBoVPl5Y2ZEB/dOCu8qc
InXb/Rl+RNhHmY/88P7uiFz93eGif6Q8J64uw+8tHUOzMKM44SDjvUeYiHMKTG2J/IoczbqbR1Uz
gcUJRqJBwd3QJYW79Jbp5ev00rcsJI2bn3tqh3jIowwkKwOyQwQ5wBpR4OZh2ZoSiXiQ4O7pPMwp
EsLsRnlqQIwNR0x5q195b8QGcc0JWy0vFCWwpjS5Ckxv6DzEfuz95EfWhKJcGoKHMWfvYf8AMNiE
4942H07GoOITH/8AHmafmlP0C7apehgdo2FfibPDft0uW9vatcabRsPSL1ml2PqkNicUkKSjmD6A
wmK+yRiCvwnNF5twyIbUEb9qsHqMSQd61gEA4xKN6B1WPbt5jeELkC8T0S5m2dN7CMtwyKvQ5uJg
JSaQFGOReqB1i5CVROJcIXLUzGQ3qF2YrIV3FC1dJkD4Zfl/se3o6fNfM/yt/wDYKvn/AO5P6yt+
xaLcCTtTXKOVwiu35xnzEw5iOGO0oxtyjy8DlbFfWVquzlOR9qRJP0/I3OAUY0hy8Txnco2LERGE
Qwb0RnenGAy1FnK0WL0LXKkcU7YJk2zUc+xCRlK5KP7y7Iy+ugX4b+mAXrntXBxRj9i/E87M8xzR
zPhjujH0nlWBque1L2YjtREaRxuXpNIy/NCELYaI9G5oBiVo5eTQHivBiB615XLUsu1y7UyltYlC
EMNpLn0ZsWXNxnMsAB2qceVL3pNrul5s+Wo7FwvKXtSkXJ9CZzLAL8TzP/6VvKI29qM5FojEo3rw
02x93DAdpR5D+nxDkfFvHCAR0vO7P7y7Ksju7OqZzLRFSUL90abEa24bd8kbNvh5aFJyw1HZFCMQ
0QGAHoTOTlsAMSVK/c4uZvMBH6orjL3ZnVcltJ65man2QMSUbBL3LnFensGQQiMBQdWHI8tpjAv5
k5ASLOzxidiEI4DM4+hPMSrZtHTYh/1Lm1Gc63rh1XJHHs7lwuNR8u2cxEeOSEYhoxDAded9tVrl
vh2zlrxkfs9AI+xaGufb7KD+KVZd/VlM5CnapTl7A/5pVPUhZHht8c+3JQh7Noapdpw6bvMywPDD
9GKneIYzNOzrXr5GLiP1KAOLOe1blLAghTuytQ1wB0yYP61GZoS5R/OJUjmaDolDZ9qjb21PSHwj
UojJSGTuOgRGJQgOiZzZlM7An2Jwaol8ATVRmKEyA+npgNyuHsHUYp/YNSmHUdEbcFXxI9RlqApJ
azjL6kdg6bvYod/1dG9XX3KN7mYa7kw+k4AHBTv8pHy7sBqMRhJuiH6R+pAn2ZE/QgNiO0YdSuD0
UiaV6BIYiqcYSqEI5lDYaIhDYYqd25IC3SqhKJMhcHCQPyqdyIpcqYojKYI9ag+MeE9sadEZezeG
mX6Qw9BC5hG8NEtmoVCtcyMInTc/RlRebHx2SLke7H6FGcfDIAjvUoDxM8TvGCjc9rCQ3jHrVU+W
yjxQ/RKMTgaFS5adRHwvnAqfLS8Vk03xNR0StnPA7DkmnS5DhmN49NHmLeMKXBtihKNQahC8z2r3
BeiNuRZfhZF4HisyOz3ekxIcGhXkXDwmtsnZs6nlyPwbx4D7stiHN2xTC6BmNqEol4kOCjGQcHEL
yJ/dSrakfqfo8+FbE6XY7D7wQkC4NQfTmEw4K8m6f9M7R0/ibA4/bjlIIc3y1QfvbYz296E4VB6f
PsUuDxDKQWuNDhKOYPoABS5GsJbCpctzAa7Gkht3hPDVoljEVi21ao1jJG9y8SbZrO0PpICE4Fwf
WFOyPGATHepi/FyS09WIRtyBucpPA9qFyPHal4Zxw7Crlok8EqA700g4QjMmVs4HFk4qD1Wz2f2P
r6TD5XX0HM/yt/8AYK578LZMvJu3YGRpq0yIOla7sjxB2VAK49anzbbhtl8jrgtWSAiNFoeKeSFq
0GAxOZ9CZzkIxGJNF5HIxnek7TuQDCI7ZMjzXMg3J+zCcjNu10JXf8MIipWq5E8vyeQJ4pIWrERG
IxOZO/0bksBmpQsTFu0KSvM77oqJOqNnFiaz3laYgCIwA9GZHAVKkSfL5aNTIYnchEQ8rlBgJeKe
+iEIhoigA9E5oBiVIWJCHLh/MvMXbPScEbViJjZj95cunVKe7FC3DAehNy4WiPp3BDmOYGkCtu2c
t5RlItEByUbt6Ojlrfh1YyO2iFrk5GHLk6Z3aggDxN9SELbmgBlIvItSp6pnMiMRUkoXJuOWieCJ
9vedyPL8vJoR+9nHAfmuhCAaIy9D9q/ES+4tuLb4Ej2keZkOCPDZH1y65JLAVJKN+dYh42I7T7yO
svcmXnLf1fLgSb1wHTGA1SFMWRuziBfvVnmfQjlbZaU63Je7DNCYDWLLRtRoXOckLUC1y7wjcPaP
qRvAMCNEHx0j8p68pRrdnwWhtlJQsgmRFZSOcjU9cyOAqoiWN2WuX6McOsI4xjxSHZgg/ilxHv6T
I4AOp8zLG4TL/CFO9LG7Jx2DDo0jx3OGPehat0JaI71GGLBurKWbMO1WrI9sh/rKZSgJGMIUYKUh
IyicYHBXOVhqF/TUEUQaXhioEl3UYDKp6CMiFJ8sOkyOMkCoz2UPQZnCOCJ9XRMbWVw9g6CxZaIz
AtHHarVoe+O/pjuipnaR1H6Ceqypn1XQEu1MOwdS7+j+RW+/6unmJjECKs8xakCJQDgZECoVy5cP
CImm3d0ROyf2KZOAITpjmjHJ3Hf0MFGOyiOyVejBRGcaIR2JxiKoEZh1BswVOEYkyoYjsK5fVaII
OB2LjiQ2aiDiyv2spEXI9+PQSPFA6o9oUZj2g/XkY+ODTgd8VuuRWiY4oPbuA7v7lPljU2JMP0TU
dE7X7u9xw/S9odeN8Y2jUbYnFAioIcKN+NZW8RticVa5yPgLQufoywPd0xvj7u40Lu4+yfTEHA0I
R5eVIl5WjtGYRjLCQZS5acm5iyXtyzYYFaiGuR4Zx2EdLYTFYnetM6XY0kPt6Tbl2xOwqVi6PiQ4
Zg5jajy0j8OVbRP7PRol2g70bVwNct0O8bUYkODQhfh5n4U62pbN3yBj4hWJ3rRcYXY0I27+nzrY
eEvvI/avxFniszrOA+sISiXicD0+fYHF7cMpBa4d42H0AuW+G9CsTt3FGMgY3AGkMCCtEmMSKjsz
WqJcHYjesE6TW5bxB7F5lmTlssQd6F1mM8ZHap2ZxjO3MNxB9O+JXl3Tr5eeH5VdPmR0XCGqnFRt
TSDhcZMrBwJrp708S4OBHTzEpikYHHsRuW7hhKBMYGJoyFnmA18YSGEuvv8AnyvW3/MFfQN1X6/M
/wArf/YKv8vBtWueGRMim+c3nxXT4YDFa7x4R4YjAfJIXuZBkX4Y4BkIwiIxGAFPQyMYG5OIcxcA
d5KF/mNFiH7u2BqkN+xGMGiMZSOZ2lfhP6YBf5g4zA1Rj3r8T/UJ/iOZOR8MewKnozOZ0xGJKGoa
LBLRicbh7NiFy/wgUhajSIG9N6Mzm7DIVJXn80fK5f2LRPFLuCF3mImNmP3dk0HaQEwDAYAeiM5u
wyAcozvnyOTlQCRacmrQDaoiYNrlY/uW06mw3rTEMBkPQmcywG1DmL4EYAvattXtKc0CM5kjlYGt
W8w+62JUohrPKW5aY6XjJxSTMVG1aiIwiGAHVMjQAOSnkw5WGApLzCvwtlgcJEeyFohht9F+GhnW
5LYFHkrVIRrcIyjs70IgMBQDr/hoyAhHivSfAbF5hHwocNof/V1TOVQMBtPejz3MARjUQgHBZ6az
n6E3JVOEY5k5BeW78xerekPZGDIRiBGIyCM4mtzgt7oDxS70IgMBgOubuPL8k8I7DcPiPd6CNkGt
wt3Zqd0eEcEewdUk5IOPvZajujHqRsx8V0t3ZoWoUM2hFlGAwiG6DR4Wf2ioQ9i0NR7Th1rVn3jq
PYFEZW4u3b0G5DGWIKeUgCcArvMczEATOmBBfNF8TRR3Yoyyy6DcOdAhLaExQiM0IjADoIzboEcz
j0n9IK4d4UbUfFMgDvQjO2LkvalKtVG/ZDWpFpR2FWIjOaY9DbAFJ85dG3o3Bac1uKp1HPaU4LjI
9fd1Lv6JVvt6b0DhIBE8rzE7dslzAGh7k1+7KbGgJp0PskPtVwfnDoBzQuDKhRVcBUpsghPZj2dM
til29Ed1FacZsre8EMgY4iNO1CErsYznFiCagsouXlHhl3KxdwEibcj24dNzlyXNs8P6Jr6C7yxw
tSeH6Mqo5Qvh/wDFFW7wpC8PLn24x6PNh47R1DszUZxwkHHWINQaFT5aRc2jwnbE4JjmrvJz8JDw
O4/kWifjtHRLu6JQlhIMUbNz720dJ3jI+m1R+8t8UD9n0ITwJxGwqPNW/Ha8Q96Oajz1r7q4ALoG
w4S7kCKg1B6fxNoPIeOO0KM4lxIOOmPN23eFLgGcUDbNfFblvRE6XbfDMbx0C9apch9I2ITh3jej
bn2g7CjYvUu26P7w2/IPNt/ew+kbFqFD7Udh6GOBxTGvLXM/dK1ivLzx/NfNAioOB6fPsh/+pDaN
oQnAuD6D8Ty4+LHxRykFGYpKOwsQdiNuZiNolwkfYU4Rv8u2r24HAhSBi8gHAziV5cYmU8NMQ59Q
Qs3YyFt2qDwo8vOBuaSdFxk12MvKwEwKA9yjZEn1YSGDpjUHELgBlZOIxMVqiQQcwmiCTsCnOUZa
7g4xuRPKRLE+HYhdukNEUAzP9lH+UU+Q8z/K3/2CuZnnK7cPrkerX5uneOIFBvUr146pS+hP8jtv
E6ZSYFW4DKI9BquzEI7ZFlo5O6bciaGIem0yKjre9cFTOZMq96Ny+W2RFSewIgx/C8h+d45hC1y8
BEZnM9p9JruFtg2lAzBMpfd2Rh2yXnXWleIZ8o9npOIvM+GGZPYhe5s6pH7mweEA/ahf5htfswGE
fRhyDclSEMz3L8RzhldlXybEHgGzJD/Wo3r4ERQxtY6Wwr6IzuFojEocxeBFkVtWjn+cR0GEnNqP
jMPE+W7FW7/M/DsWiZW7cXEictR3bkIQDRGA6rmgGKkGMeVgWdyNZGNNiHLcv96aBsIhCOMjWUsy
T6JhWcqQA2o113pmm+RXFW5M6py2k9fgGq7Phtx3lDlIl5zOu/POuKEYhgKAdWHLW3Nm2ZGeUSAG
xG9RtxwiAB3egc4I8zOJ8q0dNiGc55yZGc63LlZ/W30oWYFpXKE7I5lG8A0fDaGyI/L1zoreucFo
Z6jRQte1jM7ZHH0E7mzgh2lRjsFe3qi2DxTLDsUpjAcA7upO57Nrgj25oD2LQf8AxHolM5Cnatc8
Zccu9TvSLm6XHYOtcnlAaR25q7c2lgdw6YgKxb2yURvH0KYGXRRRhsqUJDELejcOEcE+3oMUR7Iq
myHTEbZKW+StXThCQJQnAvE1BC8qMh5pkJadwXLSOdfo6ZdgX+I9Qk0YOvMJoTXsTjtHU3BaBjLF
GB7k+3qjegYnGo6l39Eq329MycTEdAGiIbMCqb1I/pBXf8PTKBzRBoQtWcq9EoHMJjiMejUfaKP5
1eiUT2hWQMQa961xFbcSRvzQ1kQlGWl4hndVqTmp2ZYeKL47FNvFDjj2xqo3BhIA9Fu7hGfw599R
6C1eHhufDn/9K1jxWzqj3IyjSQGuHbFQuD2h9OaY1CuWDhAvAfmk9eHMjCPBc/RKoo3YfeWi43jM
K3eH3fMgRlukPD0w5mOA4brZxOacYHA+mEifhX6H82f96rhmpcpOtq68rT4b4o8rM0qbR2jZ1NP7
i6aH3ZdJBwNCEbB+6uVtHYfdUeat4YXY5EbUJDAhx0ebAfBnS4Nh2pwXBwIQu2qXrdYkZjYhNmkK
SjsPyDz7QofvI7d6EolwegwmHjKhC/DXeK1KluR+pC2a2JeGR9k7D1DetB4S+8gPrCE4F4nMeg/E
2QdJ+9gMO1kbtggk1bEHcV5cgNO0F9J2F+j8XFgDQxJbUn5a3H8RN5XSQ5iBjVTNqAiNReQFSpAx
jIGnEHbsWiZOg5A0UZQlqGLqMoSBkBxRzBTEO+KHkwHlDxkDDeSFHleRHmzNb14+GIzAKjytqdQW
kxQnZuG3I+0EBzTX7ZzFJLVZLtjE0I6KKtE0QT2B1UEdv9rqqnRXqU6OZ/lb/wCwVe/1JftH5zla
djkjaux7JZHpp8gAXKW4HVPWC2aA2dYklgMSV+H5cSu3pUGkUG9yoXeZFY1xJkTvyZZRiO4L8L/T
x5/MmjxrGPaV+K/qdz8RzGUcYxTCg9I0YyuXDhGP2nJUad/OQrC3/eveuHxTOJPpBGI1XDhF2QMv
ic3PwxApEnejeu8d6XtSOpuz0f4flom7zMsB7Md8itcmvc/MCOm2H0jM8R+ta7/HcJcamOkfV6Iy
kWAxXm3aWIn4cB7W+XQeTsSe6xNwgGQiAMOEiqje5865atcYF+56tTrOaAYlaIEx5aJ45Zz3BRtW
APMlSEQHbeQFqmdV2VZy9EZSLRFSSjzd4NFvhA+zHajzMxwDhsg/TLrmcy0YhyUedvAvLhsQzZEy
LzmXkerGxZD3LhAMiHiBi1Myhrrdk5kcgTkBl6EcraLaq3ZbIISA+BbpbGRPvJzQBagOK8TCO63E
8R72QiMIhh3dc3amzyfDDYbj1I9BKQxwHaVaskOIDXM/nHrTuA0gNEP0ioxzAqemUs8B2oPs1S7T
VSuy8V0mXdl0W+XFYvrn2BaIis+EIRGAHVlI4AOpXTjJ5nvUAcSHPf0g2ixhAk7HyVq9zDGUSWYM
6i4ZqrQcS6YobBUolSiVTFCOZqel9qMupbB2lDeT0CNm7OAOUZELXevGYf2i5Vi2MonoZT7VHfI9
L7ELQ7SsVpOMUdnS57SjI9yEnwTjOo6jnNFvCKBGBxGHUufoFW+1DZ0XJDEQClGcnuiRcHFslqkV
uwUu0K6N0T0h+9BsJkIRGGHQCicpVQCjHd9SjPZQ9AzBogfdMVEywnwkdqaU5HXJ+xlbtibwuA6S
cQQrNyA+7k05bYyomyKlZJc2pGNdmI6JRj4mePaFC57wr29ecR4gNUe0KFz3hUb81c5aRfQdUP0Z
K9ypoInXbH5stnZ0W+ZHs8M/0T15QlhIMUbUvHaOk9mR6J8scJcdvtxZRl7QpLtHQYmoNCpctMvO
14Ttjl6aVqWEhQ7DkUbdz720dM9+yXemjS5Dity2EIXI8PMWjTdIYjvWpmlhKOw9JtywKNi544YH
aOkxFJisDsKMbg448NyK/DzPBOts/Z0GJwIZfh5+GVbZ+zo/E2w8SfiwGzahOBeMqg/IKB7Uz+qe
kwngcNxR5W/UswJ9oIWbhe2fu5/Z1DdtB7cvvIbN4QlEuD12OBWuIJ5aZ4gPZO1fibLkNxCGY2so
xjIkSZpH6l5NoRnOAHi27Qua5qMh5wi1cwU5DHN1JkBIkDbsUgGnDAOPpX4m1qiIlhciWQjMxuxH
vCp7why1qP4OUg1yeL9hCjY/plwTv3x8S4dpoz0UrfMv5r8VXUbvMTlbkauCGZCFjmBetjuVjmOV
efL32jI46ScQUIW4xvXjUmVWXl2YRBOdsM3eUOZ5y955NJWoyFH3SRtcnDyQaE5+sJ/Pm+8k/Wo2
+bYCVBcwrvT4g4f2qp0V9BVcz/K3/wBgq/8A6k/2j85mUiBEVJOCnatjz72AlENEH9JOfV8iDKN2
6HI8L9YRI1XJeGAp3k5I+aI2eVgaaeLWe1ag87hoZyqW+xG5ek2yOZWmEfw3JYGRDyl2LRYixPim
ayPf6U2uX4ruchUQ7U0CZE/eX5DHdELRbDDPf6TyeXBNw+1hEd68ux8TmJeO4Xkyesrh8UiXr6Jy
WGbo2OSDRFJ3yKD9Ffhv6eBO9IkXb5L6WxKNdVw+KbMT6IzkWiMSvMuR0cvHwQPilvK3I8jyMjLm
DSVyI1CD7N682/8AEvyqSd+3f1zatU5ePjue9uCEYReRpbgMyjcuESvSHEQMM29H5I+4tF7hGcvd
Q5aBaIrdIyGxCIDAUA6/kj7i1W6cifdXnM9mHDa2E+8OrK7M7ojMkqfPXwTIyOgycE5eHIDL0Jma
nCMRiSvw7/HvNK9MYRAGCEI4RDBRsDxXMTsiMVLmPZPBaGyMc+/ryja++u8FsDFzR1GziamRzJJf
0EYHw2xrn9ilcOMy47OrKQqQKDeoQkzjjn2nqQtYxjxyH1IWo+K4REdmaERgAyJOAqVPmZCtwtH9
EYIMeG2HI3nrCAxmQFC1HGZEe5AbE6Mm4sB2qR5smJmwMxkdijy1qTwjFwe1B9i3CikN6nc20HQR
tWnJ37luHSAER1LQ3FQ702Wa06Y9rVWSBHswPTcO9RHaekyOQ+lGRqSt6BOdCnzx6OxeWDU4qvQb
ZywW49LDGVOiMnpmhIdN0j3CrZ/O6ZROcF5lsyjI5RdEXRKLmmpx0XO761c7B9fTRRkcR9a7OmMt
lENgqU2xSjsqOgbAHR3sQonYYkISMgADtVmdqQJgRIEbqFStmjhROYGkvtFEY5Xoah2x/wDfpu2M
A+uHYcfp9Be5U4QOqH6Mq/QrfMAOAdM+wqzzIwfy5/oyw6JQOEgR60bc/vLJ0SB3YHrwvexPguf/
AEno1Q+8t8UfyKlLfMDVHdMeIdMeYgOK34t8UJRqDUH00eZgHbhub47U4Lg4FC+KWrnDd3HKSF6I
eFylzc7NLqCcC1yFQdo2ITFDgQcj0jm4YYXYjMbU8TxNqty3oiYa7b4Zjft6DHCQrCWwo27ga9bp
Mbd6Y1CMJH/t5nhJwiTk/wAgMTgaFeROo9iX2dLA6ZisZbCpcve+9j4vyheRe/8A05e8OprgHsy8
Udm9CQqDgeuYmoIYry58ViZ4T7p2FW+YhWy48yO7arotASID25DNSAPiFVqiapiKjEInCBq4qxWm
5EXYmgkMkDIHypls9JQny8jO2RV8QemJuHUY5HMKHK2LZtRpEl/yIS5m1AWgAZzIBke9Ef02Ylyt
zx2SXY+9FebMvKWGZUoRkBE7qoylic05xOaJzyQOWaHLc25snwTzihO3ISgagj5hf5lp6SvyM+h3
9bmf5W/+wVe/1J/tH5y8zmZ6dkRWR7AvJsiVmz7TkPL1IiOHyMKEsQ2I29QzuSEIjGUiwUo8kTow
FyMXJO4mgRuc0fP5ifilPibdVMKAZL8NykTevmgIHCCvxf8AU5i9cxjbFYjtTAMBgB6U27RjG2PH
dJfuEVWJhY3gAz30QjAARGQ9G5LAZo2uVk0B47zOB2HBGxygIiPvb066uxaIdpO/0RncLRCJb8Py
cfEZjjm2NMgvwnKjyeXj47oLkvsQt2osMzme30RlMiMRiShORI5YHhjgJt9LJzwxiK7AAo/07kbJ
8yR4pXMDEjHhOClcpK/PxSAYDdEHrnl7Ba1H724P2QhENECkY5ko3+YrcPhB9kbvRizb+9nhuG1C
MOKeER70iuM6rkjquS2k9eNq1W9cpHcMyoclZNZVuzzAzQhENEUA6hlIsAHJRthoWbUo0I4wBiQR
g6EYhoig9A5oEebuh7cOGxHOUiWfejduVvXKzP1DuRJoBijMUnzJ0Q2xtjE96EI0ADAdc3xJ+W5Z
4RAes3z9T+gMjgA5RkT8TmZU/RQiMg3VjayhxzUrxFZmnZ0mRwFSpXjjcNOxH3LIb/EeiNmHiulu
7NMPDAfUpXTjcJPd1oj/AKYfvVuOUAZHty6NZDk0jHaVDzYjy4yEiBuXl8vdtxhM8Zl4h2BTgTq8
oCIkjF6COCdP7yjDc56AjPNmT9JmcgpvtcdnUgNkVbG5fWndQETUyZtyMtkB9PQFP9IqEc2duh8g
vLGVT0umOMaFEJzSjlGRz6RL1px2jocntRIwFB0mBxFE3Rc2mBf1K3+kOkHbD7UOalESuXCdJOQB
ailbvQEokY5jsVywS/lyMX3ZK72faFP9H7elkBmiOh1KBzFFKZ7PUnTHNSjsKlLuWo4gEKIelFet
yJLk57CrNGMRpI7F2K7ZPtcce/FWr/8A0pB/0ZUPTav5A6JdkvQWrwFDwT7JMylDDUGdTsz+8i8J
bpRwUJHxAaZdsadELnsXxon+kPD15Wz7Q+lCM/vLfDPtHQbccz5lg7JRxio3BhIP0Mag0Kly0sBx
W98T6Yg4GhUuWOA4rcjmEYTDxlQhT5O+XlANE+9A4FS5efjh4Ttj1POH3cvGBt2oEVBwPQYyDg0I
R5W54TW0d2xDm7eGF2O0bUJRLxlUHoHMWR8WOI96KE44H6CjbngV+Fu+OHgPvD5BpwOIOwo2p/eQ
x37+kXLZ03oeE7dxWmQ03I4jOMgjZvUuRwPvDb0sQ4OKoPgSxr4T3oEVBwPXMJh4lfhrwJgfBNqN
sKhzNo/CkWkD7JRlEA6w4I6CQOJqJsJZhUYdgQsznLyScHLRO1lotNcnvrGQRvTPl2tTSkziJO3c
oxu3oDlm1SuRd271O1yYjG1b8NyQeUm2lCF2OqVg8do0cDehfjdMeTuUNuUiTEd7owlbN+JHjEvy
Ii0ToPhjLEJ1jRMe5UoEwwKiU9qTwzty8JXAdN0eK2ce7+xDfKqfIeZ/lb/7BV7/AFJ/tH5wfABS
tWD598U4fDE7ypX+ZmWxO4bAjp8Iw+SxEpA2zk7t0i3bHnXpUEY4PvKEueIEZV0uSRuGxRtWhphG
gC13piIyBxKFvlpytcqfHLS1O3NPZi8z4rki8j6UzmdMRiU/FGz7IfSbh7MULnMREIDwWRl2phh6
MylQCpKNY2uUj4pyaWr8iFqwDb5SJYlm19+K0wiIgZD0Wq4anwwzkdwR5nn7mnl4N5Vo8IfaRiVE
yErXKRP3dIiYGGFWWmIAAyHojIlgKkoSJblYF4tjM79yM7so27cc5Fl5PLTnGBoYRDCRPgeXc5Xv
351uXDUn19c8ty5aApeubNwVA0Yig2lfiL1D7EBQAbfRmcizYbypXr3jNa+yEeZkOAUtA/TLrm5M
tGKlzd4PduUhHMA4BGUq3LlZn7Or+HsgyMiAWDgknwvktLcczquHbLv9COVhgRqvSyENnehPS1ix
w2okV1D2uiPLxLa6zOYgMUbwDDwQH5o6+m2NV68dFuO3aoWhERJ4p6cNRFcfQQsRxuGv6IxRbwWR
pHaeqScBVSuHxcxJo/ohCIwAbpjZBaV06e7NbBAfUvMl4rpMj0SvHw2+GH2lC3HxXCIjvxQiMg3V
dTun25U7Artx6E6R3dEJRqImveiMVbtlxKcnHYix2AnsCuyOGHRbOzFHoYdUQGMsUHwNCgekjYAF
AfmjoIBIBGIXm3Jm4Rg6u7gB0z7SrWzSAe9AhE7ETLE9O1B8DQp9iFt956ukmsU2S0jGSp0h8JUK
BW5TiMZQl9St/pD6+kkY6DTvQ5G9MR5mySBGRbVElwyNy7MRiBtqrl808yRI7Mld/RUx+YgFigFA
DtKBGfSEwDP0OhIYSD+pPnIqYOx0ZNlReTC0DekCZSbAAKGkADcFdtSNJATh2YFWr2QOmR3SU7fv
At25K3M1Olj2ih6JW/eFO1RMvEOGXaKdeUNtR2iqhPNmPaE2EOYDjZrirtnCN34sO3A9EoCksYne
MFC57RDSH5wx64n7F4aZfpDDo4fHHigd4UrWEZccBs94evpjdgHuWi8d4zCjOOEg/phOBadvij+R
CccD9ajfh95Zr2xzCjzNnxxDjeMwhcjniNh6TGVQcQjYmeE1tH7OnhpcgdUDvC4hXw3Inajys6Ql
Wyfs6dYrbuGo2HoBgdN2FYS+xcQ03Y0uR2EfIBO2WuRwO3cnFJCkhsPSL1mkx4gPaCFy0OKNYnBi
tN0abkaEO/SxDg4hMw/DywLtpO904PXMCSD7MhQgo8pzDGeW9lplFokUMRRU6HFJjA7UQaEYg9Dy
jqDUGDFeRysLkp3aShB9Mhm65eFyY8mZaOgkSj+ZI0ovwvNWpWrswGMWkG7QuYu2pmVsTIBkaspW
hM6CagGjpieFPp1xwKGgiuD4FfEp2YIEINhtQMg4zQEcGToXLZMZxwIQsc2RC9gJ5S9BX5jp8np0
sq/IcPTV69Vh1eZ/lb/7BV//AFJ/tH0NPmccpYlplc8ZGOlOXMlpylRao1BVVh8idGNdcQ75Izul
tkQHJ7lpsQ8rlAeKRzG8oSYzkMDLLsGHQOX5eJvX5UGkUBQ5r+oyEpYxtjLtTAMBgB6UzmWGQ2oe
Yca2rEXiTvkhdv8AFdyBwj6TXcLRCF/mSbXLDw2yW19oUZXI+Xy8fBaGfahEBgMB6Ly7FuV6/KkY
jwg/nFR5rnpRu82aQER9X5UL/MvpHgsnLtx9GZSoBUpiDDlYFiJODMjcMu1OQ7DhhFhQdtEZXrcI
iMnjanHXRhpwzxRlOt64TOWUQ+yOA655Xlj/AKtweyNgbNNEaYQx2lR5i5S1Ek24Zvg59Jql91aP
CPeO9DlYu2N2WwbEIxDAUA67n/8AHs+qUv7kbprahS2MidvVJFZkNHYN53KfNXiCJfdxi4jvkx9C
Z4yNIR2koWTxXbzyvT2PluQjEMAjKVAA5KM8LvMlo7Y2whGIYDrOcAjzJL2OULW2qCTHP1+hu8xI
cNsaYH60DLxT4pd/Vjaj4rp092aaPgshu89SVz2bXDHtzUbMcbpbuzQiMBRSI8R4Y9pUY5txHehs
tD6T1pEYmg7Sv0Y17VEnxSqe/onI1YUG9azAeYYuS1XKM7kIyMQSCQHClJq6jVXJHAy6H6XyCnuL
Dp3BHMCg6Ado6DIqR7FEbgo25eEcUu5aPLjpwZlK3HwniA2Or53gdB7CidpUQMohAnEUQtjAVPRX
ppigDiylqx6oORoU6MshQdUE+IUKqrhy0SA7GVv9IdMB70CvNi4ljSiaZlJgGMiT0Xf0Su2JXb0E
lE9wW+PUf19IPun61GGwKXYVUtQoCUf+500kBuUbdo+XEbMSrXnS1CsXOLH+9TgzkinbkoTzIr2i
hV+x7ktUeyVem5awjcGuHbgfQXbGAl8SHfQrUPHbOuPaFa5uIrA1/RPiQORr0Tsn7u8NdvcR4h15
QHiIeJ2HJRkfFhIbCKdEeYi5iS4iO4SHehKNRIOD0ysYW58Vs72Dj05JLWbuWyXR5Z+5vF4fmy2L
V+5umo2S6jYTjWJ3oidLsC04/b0jmI/dzpdG/agYka48VuW9VGm5Gk4nb0GMqgryLp32ycx0firI
4h95H3go3IF4yHyD8Ray8cRmNqEo4FTnc8EQpcnGOi2XaVymGxXDy9wgamlEOR6wV5tucwCXnAlx
3IXLR4h445g9JjIPE4goQk3kHwydtO46it3XBwnCsSKFGzdDXY0kDQ9qEZl4l9JZu7o1DFA4SGBR
jIMR0Qt2bs4CJ1DScNtFesXZid8NO1qYGcQrnNc5dlHnYvGzYmWcdksUZTum0ZlzoHCFxTFy3Lw3
InPei48OITnAo2zR6wOwowlWQoX3LRCBnLFgHK0xGlqESH2LGP6qeYAO5ao1jtGCdUQt3R51kZE8
Q7CtQui1IYwuFj3IXISE4SwIL9Mr05RjGORNSpWbbxuReks22N/Z6vWp1+Z/lb/7BV//AFJ/tH5v
M5yEYjElGdiWuEQz71piKo83zk/IsAfeS8Ut0QpeSZGDnTqxZMqfIzf5QRlK5wxj4p9rCg70eZ/q
J827L2CXA+xaYgRiMAAwWq5Jt2ZRt8oPJ5bCU5ByfsTxGu6fFclj3emcQlcmcIx+0qrTvnKPhtjt
TuZ3ZeKZqfSPIvI+GGZKHMc7LiNLXLxcDvXnc1/gtZRHozy/KcVzObPGK0WY+fzs/FWrnNC/zJFy
+fVHs9G8iwGJK0hxy0TxHDW2wqNuMhqw0j2RmSvxPNmUrcpEgS4XFdMY4L8RKIjIuYxxbtPX/C8r
4j95dxEB+VM7RjWUj9JJXmFxysSdMWbW2b4smFAMPR/hoFiazlkAgIh5eG3HaVxnVOVZS39ePLWS
1y7jL3Y5lQ5OzQENOWJEf70IRDAUHUM5YRDlNMHywHNDECL0baUAKAUA9AZHAYo85cB0QJjYhmTg
SyMpkG5Osm6IctH2uK4dkB+VT5g+HwWhsiM+/r+Tbfzb7wi2IejqFo+Nnmfzj6AyHiNIjeVa5cYy
4p9mfWnePhtBovtzWqXimdRPb0ynmzDtKjE+I1kd5UpezZGkfpHoER4bXEe1GWwOjOXimX61u0MP
FLsCEBUzIi25AbA3Q0sNiB06gS2KlykYyjdIq+CiHDsSQidpPRozIfplLOVAjHaEegyzOC39Bgcq
gJtqjbHtGvYpD85AHYFG63DhLsXmG8G2ZoGz4PCRuCvS2y6Jdh6BEbAPoQihcGIx6us4RwQihMYj
HqilI1KbIoxyy6so5EOUWxNHUv0JfUofpD6+iitHPSUzutNG2KmBqrn6JRI90/UhMbOgtiaDo0nC
X1hEdBlsVwHIuOkxOBXYmTAOQT9atXrUHiWc5DJGcrZ0gtw1RmAY6BqD0qKqFz3ogq7aAYPrj2HF
W7jUuRMJHeKjpt8wA5tSr+icU+R69rmPdlpn+iVuV7lJeEgmH6MnWiXjtEwl3dAujxWTqH2qM4+G
QcdcxZoXqj9IdEoYE4HeKo8tcfXZLV2dLx8dvih2hRmAz4jYfTGBzwOwoxuUu2y0x9RRgccYnYVK
xe+8hSQz3FG1c8cMN46n4m0HkKTG0ITjUHoMJBxIMVLl5ez4DtCHMWxwSpdH2p4lwcCOjZONYnej
GdLsC04/b0OA9i5i3syTjA/IDJz5czhlEoeWxhjLeoCzAQvxJOoIWrhOuOEwcRvWqUmux+kKNyzJ
pZjIjeFE3rcDD2tIYtuUbto6oSwPQYyqDQheXNvK9mWzd6DzbP3kattZNIVHijsKMWLAs7UWk5o2
5VMcFs2FaZ9x2qfMnGXBF9ma5jnbMyI8l4BGQ1A40H96nevSJkQwJzXFVRFwNO7xyl7oyUoEvOIZ
9sdqD5FQnuwRvEShaashQEoGAJl7xqiLtsCWUwGkEZ2fjW93iHciCWOGnNarRDHxQkHie0FCHMwl
yd043IVtv2KVzlJw5i3E4wNfUjBmIxfoHl3ZRA9kEshDmrPmSHtxLE9oQlGzMy2S0kKd+2PLEy+g
E0Qk/gBJPb8+v8qp04ehfodb+rzP8rf/AGCr/wDqT/aPzcb3MTEIDbn2KQsAxsgsCfaTRBqcUOZ5
8arpra5b2pHbJa7xaEfu7Q8MRuXEcclQqvpKp+ow9ahYtw8yUi24byo2LYAYcRGZ6PLsg3LpoNgK
F/nyGyth3PahCERGIwA9N5Vk67uemuneckRGWp/vLxd+yOQWi2O0mpPpNFuBu3peGIwHaUJ3R5/P
TpCERSL7yvP5g67521Eez0RlMiMRiStHLny+WBacyKy7EOV5GIlcdpSfwnaU/iuyrO4cSfRucEbc
aWI+KWOo7FDlrII1vGNwNpGnxKfN84IwtTA0wbiaP5UL90NAD4Vs4De3X/DctW4fHLKATDtlI571
pFOVgeIs+soABgKAejpWci0I7SUZ3D8SVZyK/ESHDGlsH6+uZyrkAMSdilzF6t66zjecIoznW5cr
L8nVFqyYmrNLVxH83ShHGR8R2n0P4aPhDG6d2z6F5jDybdLQD45k+roMpFgA5Kc/e82Qw922P7lG
EaCIYd3WJNAKlSvzBNq1W2+Dg0b1ehhD2LfFLtyVy9kOCPd1TLPAdqt2MZTLyP0lMMB027Psw45/
YpTOEQ6EpeK4TI96lM4RDrXPxzJke9RtZ3C3chEYCnWu3chwDuVuGUAZn6umYj7BZ96gISGgVYvi
rl/mItFjpkC7ui4qyhtQUhkzepFMo2x7OKEhi6BCZCAwjUp+gOaGi1FA5PTsXbP7ehlKPhrQha3J
ltKuHbM9E/0Sh2qUzhFgPUm2JjUGiMWw6WGJQj3ntTog1BFUYnJY9O81KcoTAqMexU6gkfFKvcjF
SH5svqUe0fWn6IbWkrtq/LTKUuEnBatQVMBQK5+jL6kBuP1IwOWHYUXyQjlH6+gEZF0JjPoJzNO8
obDQpuh9nR3qQ3lNmCQpjesFcsf9KRAH5pqFauuwJ0S78FrHityEx3ISGBDjolA4SBHrQifFbJge
7rztn2gQO3JRMvFF4y2vGitczEeA6Zj80/kX5vMxp+lH+7oY4HEK5y5oIcUB+aT1y3jjxQ7Qo3B7
QqNhz6Ic4HFu6dF1sH2lOMD0mHsXuIbpbPTx5mAxOm7+ic+7oHM2vFDxj3oqPNWKzjUbxmELkcJD
1dTTXyrhcH3T0iVuk41BzVcw0xvR5eReJrbP2dI5m2HnGkxtihOOBRjIOCjYuy/0ycxs+QGMg4OK
NqB1YmBOzYpvQvh0GQHYjdcCQxipXtQ0wLSGahZlNrNwtOJ8Nc1TDI9E45mJZ9q8k3KWyYin5UbF
4DzIhxIUfrnmLZI9+Iq6PlypJiRvC0TYnaCnFDtRiashFqyLAIWxQWo6e2RxKv3xOUTdIhAAtqUL
GUay7VCHsisuwLTH2sBsiELj/EgeEZmOaE7dYyqyjCA1TkWAUBoMQBxyyfeeoZgeVfPtxwJ3haL8
eE+GYrE960QIDVJOCBjMx1Ck7ZIBC1TOqW04p8R00QjbjqkcIjJcVbs6yP2fL6dXf87N6Dd0b+vz
P8rf/YKv/wCpP9o/Nsr96QjGI9aM5E+WDwQyAUbGgylI0C02hG/z7ZeC1/epXb0jO5LElbSnKZP1
H9A3RRPIqlAhCAJMiwAzQu3Q/M3Q5/NGxG5dkIxG1eVypAs+0WI+la213TjI/Z6eVu1hhKeJHYAt
WkwsZ6qSl3BkIxDRGA9IeX5X4l0+KUcIdpXl2D5vNHx3JPJnTyJncNZSJz3D0WubnZEVkewLzuba
1yorG3KhbbJGxyeqFmNDegwHYFptivtSzJ3n0bnJEQlo5eFLlwln2o2+SJhEHTGUQOPLhz9SPNcx
4YD4cDQRDcRltfetcw1iH3cR7Xb1/wANy9b0sT7g2rSC8jWUjiSjYtFrMT8WY/ZCEIBojAejMjQC
pKPM3AwFLQ3bUOWgeHG7IZDYhEBgKAdZzgvPl9zbpbBwJzkvOL+TaLWwcJSzk3VIiQJyBZ9m5Dmp
CQlMNDXIyOnbXb6F4gyuS4bcRnIqPJwL3LnxOZmMAMPpQjGgiAB3dEOXHhPFcP5oU+Z9nwWv0RmO
3r+TE8VzFsRH+9CJ8cuKXaat3egMjgA6nfPjulwO2gUYZgV7erG17MOOY+pTu5R4Y/b0knAKV843
DTsCt2BjclXsCAGAVvlhgTqn2BMtWMbQbvPWlPYFHV4jWXaVeunaIxO4dEpnIOoi7cjGc+IgnagY
kGORCjHbIJ2yAdRG5SlsCjI7a967UZHAIy29A2gMpTOSMjn0gAVyRjmyAUDtl0GR7kzo1yT7ZE/T
0XP0VHtCJHQxzQuAY+JU6DM4RwWnobahcHYemuAqmGJ6GPYUY7OkRyxPYmCZGWU4yftZDtHTZG0S
C8wji2haQ7AdEh+afqUe8fQonI4ozOQdGRzKp0GBNY4IBRgO0oEZVUZ7R0SluxUZbQgBmpg1DuoC
3GMvNkce1XL0iNRmzDJkxzQ2XY6f8Uf7lKIxxHaKqJNdUWPbmtMvFbJge7pIfhvBwPzo+gu2sI3W
uR7faUoe8CPWnjW/ysgf1T+RRnHCQBHRb5oewdMx+aVTDrytV03OOHbmOidqWEgwOw5FSsXT8blj
on2eyekxwkKxO8ISwIpIbx6YxOEgQe9S5aZ47fhO2K3Lyz9zdLw/NOxN+5un9WXUMJYHNHl7n3kM
D70ekSA+FcpLcVwUmKwkMVxUuRpMZg9Lv8C4e6MluQeko1iQjGQacKSHyDTIOF59qDQl4qZoA4Ia
aDou2pQAtXrZGuQo4wYqUJYxJDjcjyt0vdsjhJzipXSD5cKyk1AvLhcaeQkGfsUubtgeTM8W4oAS
DmJYOmAJOwLyrl+MZ4EFNPmYPsidR+hP577hGX5F53Lz1w+kdvRg4WuESbMjxxGR2oXI1DYDNSvc
pOJuRI1W5lE3rUZR/NoVK7pMbdoYGvEcFDlYye5ItIbyhZAe3y0dJbOe1SmRXBG8QxuUifzc0LcH
IkWA2BS8t5W7Q08VXKZvhXagbDsRndErFuPgM44vmHQ5HmLz2LjxEgNu1XOVjzIErBYOH1BHzZRm
XowanTKxeiJQkM8juVy2A4iSGOcVECAhGOAWyO1TAt6YEEvKkiVK3INKJYhNEITbTbzmc/0U1qPE
cZGp+fX+b+Z/lb/7BV//AFJ/tH5t/Cx+7t+yMyvNuERhGspy8MR+VHlf6YNINJ8wfHL9HYnPeSmj
3no4yw9aom61Oq5W5OVwqvQP6hzUf9KJ/aWi3CV26aRYUftQv8/HQMRAEv3oQhERiMAPTGUi0RiU
8SYWcASTEzO4CqFzmC0B4LUXiO2W1ACgGXpDZ5YxEBS5dd27Ajy/KAiI+8vSo/Yy0Q7Sd/ohCPHe
lSMB9q/Gf1CYlMYRwjHcnIFrlo4YSM0IQDRGXo3OS06xDlYn4kxjI+7FC3C0Ry44YWmBJO2RNAo3
buk3IRaOkFojvRBiY8rbLcTgzkN2xMKAZdYcvy41X5+qI2lM+qZrOZxJR5TlpAEffXA50g5DJ0IQ
wHpBYhS1Ct2W0+6EG8R4YR3riL3JVmdp645S0Wet2XuxUOS5ciJkGmRXTBCEQ0Yhh1JXJ4RDsMTu
CM7nDHG6xrTww3b0wwHoDIlgKkqXOyqPBytuoBO09qe4dV65xXDvOQ3DolOWEQ5Qal7mj+rD/wBk
IRDRiGHWMpFogOSvNlHhjU9nsA/X6GFiONw1/RGKt2R4bY1SH1dUyOAqVc5mXiunh7MAog44ntPT
GyPFdLd2aEY4CgU5kcNsaY9uaJOAqrnMS9stD9EIyOADrXnMuetbs+/KrbApSwEQok4yeR7+jy4l
pT+oYoyLknajpIlAV0FRlOAhpnkUAAakBBsGQjnI9EZblTGXTKHeEIDGWPUMzhH60RsRbBiR6lDt
foY4J4l1dhEvbEfpUDnXouHcoDeE2zodEbRRMmGabZUp+kg5hGJxHRXE1KfobahcHYenWcZYdi3B
OMUTsBP0KJ3jpsf4vqQsk8ERruNsC8sWoacG0hRu2A1q840+7LciNx+pQ7T9SAPepRGOPq6XTZSH
0on1KRywfs6DHOOHQBtRHun61Enajp3IQEhCVuTEnCquWDPVGTSG1xQp3Ub4xtTEqbHY/WgRgcFd
sZRlqj2SVyAdrgEx259IuRHFbOruGKBGBw69vmRQ2ZV/RlQqiFBovjTL9IOrnKyLmzLh26ZVHRK3
LCQb1o2pF52jpIOLDA9fXDx2+IfaozjhIOOi3zkQBbufDvnt8Mj1BL93foRkJjD1+nF62OOGO0xQ
nEuCEYHPA7Cpcve+8hSX2SClYn95ap2jI9QXIFrkMDtGxahjhIbD0SgcJBlLl7njt+E7Yr8THwml
wduaEhgajoMJYFeRPLwHaOgXrdJwxG0LVH1HL5BOzcD6gwUrRZsY1TOq5octy3OwhIRY2bloaonc
RIOpi1fszIqBIyiT2cJH0qMrtuVm4DgcJDNjgUeX5e/OxOUQb9i4wmf0NyAu3o2IwqLhEYkN+cWU
4cl/UITu2maAuRuGbbWVuUjIASDmJYs+SnbFqd23IAW5XTEShv4QXV6/IiRnMkkYKErMWmaEGiH4
vmLdv82J1SUpcvK7dJDEFhA9ya1GEB2OVXmJR3QOn6kLfOSN+xKhMvFHvQnCfmcrdrCRqQ+ToSxe
mnB33qf4edsxhScJ6hJ9jxBCPLW4yiRx3RNiQcsgp89cBmziH6ZwRjF3rO4S9SoWIx1eZIAgYtmt
EKCI8uHZmr3PXMvh2hSsjmvLnjjLtVy3ysNVmIeU5UAOQfaoQnHResAW7sDjGUVd/qEzp5WyCYv7
RAy3K9zWljcNDu3dFen8by8XYNdH2oCeJwhGslG9zI/Qg2H6SDBgEOYsQiJHxh2c7UDzBiIj2IZ9
qELcRGIwA/sTTpr8lp6Oi5n+Vv8A7BV//Un+0eh/mk3L0xCAzKNj+nlhhK7+RG/zcyS7yzlJC3Ae
Xy8fBaH1lPKgTDD0jHpqtyoE3QLduJnOVBGIclR5r+pzjCMaiy/7RX4bkZWo2mAeHEWQndnK9dzl
I0HZ6czmWA+lPKMiP3dqA098pFC9zDSvZN7Pr9IZSLRGJRcm1ykfFOR06uxC1YBtcpEsSA2sDehG
ERED0R5flBque1M+GKM7ktfMz75SOwBC/wA3q0CsLMmAB2kBMAwGAHpPIgDp9rbPdECqjBjJh8O3
p4bb+/VCUzrvNxXD9mxC1bJFiJ+JMe1+aChGIYDAdYWLEdd+dBsjvJRPiuzrOe0ocry4e6aykfDG
KFuHbI7ScT6QW7dbtykd29aXwrKRzOZR5qfgjw2Y7ve7+vqZ5ypCIzKlcnxXZVkdsjkjcn95crL8
nVjbtkEAtbgM5ZmW4Mo241apOZJxPofwocWYjVzFzAN7oKHMENZt8NiOVPabpjy48MWnd7Mh3qV9
mHhg/ujr+SMxqufojLvK4/vJnVLvy9Dcvnww4YnsxU75xuGnYOrGzHG4W7s1a5ceGPFIbhh1J3cY
2uCPbmpTNGCBl4p8R70LUPHdOkdmajAYRDKNoY3C3cgBgOtKWItDT3laBjcIigBgKdE3rG3HT3yx
RnAjSMHxU4C605RYUwUOWcTkXkSFbt7ZJlpyiG6ADlRCOUekyyihPZQ9LZoDNqoFGeyJQ3AqixZA
ajHeCp6cWNSodnRPuVvtRJ6WPctWUlrOEcO1aR39TSc1rGBx7UCcAmGJ6hG1EHEFCI70www6ZDcW
Ue0fWjs6LW7V9Sux5mWkXx5cScAXcJwVa5eJeUSZy3UYLuUO1SkczREZHBEZGo7+kEYoz3fX0mJo
JBNsUYjYjE+0E+alElwwZXRt0kepSlOWmLVJWkTZ8zgpRykG9agJeKHBLtjRWrvvg25fYrV52EJa
ZdkqdJicCGKlZl4rMtPaDUdeVuWEgxUYT8drgn2xRbxR4oneFY5qPhvA27nbRumNwCl4aZdtG9Bc
5U4eO1+icR3HonZmHjMN2HIo2b1L9g6Lg7PCemVvB8DsITS+8hwz7R6c2TS3crAnbs6BzFoPcj4h
tioc1a8URxDbHMITjgep5sR8Of3g37UCKg4dAvWh8W3WmYzCEmeMhUI8tcNDW0TmNg6eEtONYneq
hpxpIHb0eZEfBn4xsO1OC4OBHyAmMYi9AuJ5spWp+KJaixQnCRjIVEgWIQmeZumUcDrk47Kofib8
70R4TMk/WozM5DThKJaQ7EbJs+a4Y3LnFJ9tVCfKXZS5i6PjRIoH7ui6ec5SzftTLC9IHzYNmJBj
6ihfhZszBHjYSPe9V5PL2424QBIbhDDYykJSMq4kv1Zc1IA/0+bvCRqd8VK1dnpiC0DL8qlz39Nv
mWctJdxvyKuXuYAlcNCDTuVrkrEYw0cd1ve3qdu1COiftBXOb5mGicRotA1cyzULIjIMRGLihfNQ
5WLabEeMigM95UbVoGc7strsMz3K3ytkAaRxyGMpZko89yVzRMhr9ujTjtY5qHIwtNGcRG5dr9QF
EIWw0RQDpr0GExqicQntWoxO1g/9hK/Id3y3mQ3/AIt/9gq//qT/AGj81VInePhgPtRldnTKAwHQ
2eS8y6eyK3egpj1GToBPmvsTdFV5diNB47h8MRvWi0Df5yQaVwBz3EoXuZjOMThGcvsCELURCI2D
07mJnI4RigTpldyZ5CHbktRJncPikfs9Ibl2QjEbV53NSNvlh4LY4TI71qvxFvlo/d2cSWzJQjEM
BQD0JlItEYlaLB8vlsJ3Pal2IWOWt+Zc2P8AWShevkTvYilIvs9LLlOSmTPCc4ASETsJyQhCIldj
470otFjsasitILk1lLByjy1vw/vJ7NzIQgGiOtGzZ4788Iiuke8UZTOu/Oty4cT+RCzZL350AFdI
94qp13ZVnM4k+kNyWEUeZu/eTwHuxyX4WPhxuy2DYhGIYCgHWMpECIqScAjzc/BFxZBwb3l5n7q2
WiDgTtbqm3GTFnmQWIHbVedcYTlSEQ7Rjufb6EaGN2Z02wdqjyUJPKR18zdeu1m3oRiGiKAdBnKg
CGXMc4a7RGv1AoQjQCg6xkSwFSvMm7Sa5LYAPBH0MiPEeGPaVGxEvObRfOuKjAYRDdWd4+G3wR+1
XL8vbLR/RHTKWeEe0qMT4jWR3lWuXGM5PLsimClc9i3wx7c+iUsrQYHecesZHAB0Zy8VwmR+xWoZ
QBkegyOADo3JY3CZfkVu2MJSc9gUlL8yKtE5VRJwFUZbT0S2OpPi9el8JSqiNo+pNs6BsjUpsh0T
ObMidkSgBiozvQFy7IOdQcB8lK/y8BbuWw5EaAhXCNigPzR0HtChuPUdNnkgNmPan6j7ERnkq44l
auoyExgcUZnE4Jhl09x+pDtCPRY3yI+ha5Cu0YoWo37nlgMBqKMiXJxJxQUd0vtRHcE6jcGVD39M
Y76qUc2cdMZZgoSGBqpbqBRO8IhFs4qd23Ixm8QSNit+ZMymY17kytGeIcAnYMFetZTa5EdtCpNj
HiHaKosH1xcPtUJZsx7RTpjPK8NJ/SGHoJR9m/HUP0o49F7lRTV8W1urUBQuO5I4u3PolbBYljE7
CKqEj4maXaOvDmIeKyXO+J8QQlGsZVHRDmxSFz4d/wD+mXUje/d3OG525H05jhIVid4QJDSjSQ3j
o0n7q7huKbCzPDceoYyqDijYmeHG2Ts2dP8A9u6fUU4pOFYEbU5DTjSQ39P4i3l95EZhCcS8TgjE
4GhXkyrCXgls3fIDE4SDK5FuKBI7uppPhKb1HqAaQDGWkFnB7VIjgjIuYRpH1K9LLQaqcJF7j6oE
/UmPUvWhOJPLyLwJ4mNUWqCtVi6RCVGyPapyv2gBAavMjQE71dvylE3rhfTmAhGIcmgCt8v7geY3
lTvXIgiIaNMzgtVwa53i5Eg+OSN6NqML93xEBmGxSuTLCKEA5nOkbfuxO1CIA1HxHqkwiJTGA2rV
Gkh447D/AGGp19nWb5Vu63M/yt/9gq//AKk/2j80yscqRK9gZe6jO4TKRxJ6XCaRf0ThUDpo1O3o
qVRPIlzgFs6WiGtjxTOAUeT5C3CFsUlNwZSKEr8vMuZ0AHyDTa47mbV09q8Ln2709myMQtEAAM2D
P6RyDKR8MRiVG/zYErp+5sDikhzHNvIisLRwh3D0Wq5IR2OWdeZzB02I1hCJI1byhY5QUArKDMO9
Aniue1M1JPpZ8hyAcCl/mXaNsZttKFqyCLJfzbpHFNstWwrRbDBfh7BJmzzkKaR2oRiO05k7T1tF
qOu/LwQdtzla5kTvS8cwAO5CMY67kvDHBGcyJXZ+KQDd3pX/APHsmn58/wC5OKzlSEdpK4q3JVmd
/X/DR+7jW6fsUeUs0lMcRHsxQhENGIbqajU4RG9a5Em3bIlIuNMp+JsKt6EzkWjEOSjzhHxJ8HLQ
OLEs7LirckTKcjUknaemNh+CDTubKYAqfMSHD4bXYMT39eNoFgGncP5owHeVql95c4pd+A7vQxh7
Noaj2rbGyP8AmPVlLM0j2lC2PHOnecVGAwiG6bdn2bfHPty6Ll3K3wR+1SkPFhHtKjA+LGXaVKZ9
kOtUvFM6j39bQPFcIiEIjIMr13eIju6PLHiuERCERhEMFclKmmJNFHybxEpHA1p3rzp8UpRGrtK1
ANpCJzkw6YjOVSicpV6BHLNCIwCMVIZYjo1HGVemY3hTOwIEZF1CcJA0DjYVelckImUTGIzcq6JF
2LA7nQ2MG6Bvko9/VZaeo/Q2xNtVOo6Y5phkj0+tA7+mx+n0MwO1w61CgOSBX+P7URs6JQOYRBoR
0SmcqBPtopAdo7129APug/QidpfojLaFEe9FSsSrGUXKt2mkIkCr1qn82Rg9A1VG3ANGIYBWb2Tm
Ev8AFh0XeXJ+7k8R+bKoV6zVgdUX2SHSTHxQInE9ijMYSD9eF6HisyEv8PtISGBDjvUOYjU2jUfm
nFXbI8MmuW+wgP0ytt8O8NUTskMR12NQaEKfLnG2XjvicOidqWEg3ftUrN2l2wdEhtAwPTK3LCQR
tT+8tHTLeMj6fzv3cqTG/anGBRhLPA7Cp8td+8hgdoyIRtzpchQ79/Ub2hWJ3oiY03I0kOgxkOzc
UYXKXIU7QhfhSH7wfahKJcHA9Fc1pb4FyoPunoMc/ZOwryrh+LHLaPkA560HIpdH2o9TTLDJMekx
BOgVIyWLq9dFDMiAQkMQtcQ2qpG/qShU2bvjiKd618tLWMwMR3LQ5hmBhVfgr4GmRE9R8TdqEg8Z
jMKE7w/EWo4x9rtBVy8Ht62ENdPqdQ5O24hbOq72hW7VgAzBHZEBDVjmrl69Ix0D4cdssqI8zfrc
ng/XPNWY8QrciPaC12pPticR/Yh/Q7vlu0p+nmf5W/8AsFX/APUn+0fS0+VGUi0RUkqXLckWjhK4
PsRJNTietv8AQ4UREaRzAz6dZBI2DEoEWxaicKPLvKMhUppBj0CV17dnOWfcoiHmW7QylQHehG1E
Bs8z8gNu2QBhK4SA3YE8zKNk4sBHWPrQjCIjEYAek8uB13jhCNSmI8/nJYQkRwrzr58y/KpOUdw9
ELcAbl+VI24496PNc2XkBSJLiK0QHlWY+KUsZbgEIQFPSEyLAYko2OUn5XLfvb4DyMX9jtUYxiIc
tQxjXWfzj2phhko2bLG9PbkNqYEykaykcT1hbgNd+5SEB9ZUrkzrvXC8pmp7Aq+M+GOZKN29xXZY
PXSNg9KOWtFpyrMj2YoRjwwiP/RR5qfgDiyN3vd/XEbYMr1zhtgbdpRJ4pYyPvSKN2597drLdsHU
JJYDElCFtjr4YamYB/GAcUIRwH0+h8h2s2uK/IesRX4mQYClqOyIDP0yuSwiFGBH/cczLi3A4/Qo
wGEQw7utKcqRiHKBmKyIuXOweCPoTOWEQ6nfn4pvMv8AQtcvHcOqXf1YW8YWuKXbkhEeG0HPaeky
OADlTvT8V0v3ZKc9gp2qIPiNZdpULY8Frin25dELOdw17EAMBTrRi3DbDntKlLYCVEnGTyPf0Rjl
aGo9pVcFcNwcEiQAKUUZcUTDBjQqcos0Swfcrm9lDojHag2SjMdixUrh7AnQQmMqFCOWaERgOk75
BXD2dD2Lkrb46Sy1XL857pElaBmQEAcgEwxUN5T5AHowWCwVaJ9nWrlinHb1i+BwQ2dQPgu/ps/p
qdu4dIIaGxagQy04jFBH9P7VLt6AnylXoD4s5706hcHYekRwMh9abo05xP1q3uooSNAzK1dnMCAz
fYvh2dVraSxKF21hhIHEFTA8QGqPaKqFz3gCrc8I3QYS7RUKzcPhn8OX2dNVcsn2C8R+aevKBwkC
D3o2pHitEwPYMEYmokGI7VbmZcXLT0T/AEJsKqnRriWlbOr1ITjhIP14cyPYOm5+ifyIEVBwPRb5
yPgn8O8O3CScYHDpjzUfD4Lo/NJoe5AjA4emMZYFG1cPFHDeOgXrdLtuvaNijzdms4+OO7MITjUS
6g5iFZCkhtCEhgegXovqjiNoT+zIVC/Dy8JrbJx7Okwlgc0eXuPqh4Sfaj0eZD7yFRvWrA5jf6c2
5h4yDEHYiYgmxOsJZdnVY93SWzDdHl6jod9OTqi/EczbE5z8IkHp3o85ycfhH7yA9k7exN0iduRh
IYGJYrmfxE9XM2JRNuRLExKFu7cHmRGEjTsQJoNowVfWFK9M/DtB6+9kp3Y3fLuTOIQuCRu3bvFK
4cewIykWjGpJRFuX/b2JMB7x970BBwIYryxCTgvC5EP60Hxz+e2+SV+V49fmf5a/+wVf/wBSf1n5
mNy7IRhGpJR5fluCxmc5J+liqBwvCVXofrNidi+HDgj45ypCI3lHl+REec508M+YkHtw/RCMOYuR
tzx1EgBMOYjP86JBUYW5SvSJYQiHlI7AytfirQtC6HtWI+Jvz15vMHywRwx/uUrUw2ks5o6EY2zO
RzA+1CQsG4XYG5WL7gom9GGoVAjFvkGqREQMSU1vV5RoJRLavtQu3YiIygwJ7SfS/h+WAldPiJwi
jY5U+Zfl97eyj3ol9d2VZ3DUn0Xkcm054Sm/h3sjdvT1T96WJOyIUbl/VDl8RawJO9CMQBEYAek1
T9QDn6FK1bYwlpEQ5pmXbsUTfLiIAhCLxiw25nvTBCFuJndnSAGD7SclKUuK7Os5OT9fWEYDVen9
3BG7eOu/LxS2bgjcmQBgHLOdi/E3/GfDAYRHpTPGWEY7TsRncOq9cOqZ7cghykH00N6WTe6hGIYC
gHWM5loxqSjzd3GVLcT7MVr/AHFktH86W3qm27AVmTV9yNybm5Pb7MRhH0IjbY3rji2DtQ5WJ1Rg
0uYue9L3UwDAYAdIs4wttKZ3vQKfM+x4LXYMT39fyyeC2BO6BmMo961zDTucUtz4D0MLETWZ4v0Q
rfLx9sh/0QgBgOoZHAB1PmJ4zJl3ZI3ZeK6dR7MumHLx8V0sezNCIwAZWeX946pdkUZnCIdG7Px3
TqPZl0TkRS3wxO/rOclO775+gLQMZkR9aERkGT7FcvH95It2BGzEarhFdgdDl7g8tsJHAqUgQQQS
CE+0kojIyT7CqKU9mCdHaKpgowGPSYnYpTkK4BE9IG0qZ2lYsm0ifaE8Qw2FNIAjFVVclAdqkeqx
qE3QZEsFSSxCrgtMAS+LIRmCCNqI6aJk5wwTjAhx1Au9Ds6LZ2zC1F4yGYUbYkTHKvQFP9I/Wj06
vaiog4OhEYdEo5io6BEZsojJSG2o6DH3h9SjLZNh9SkHL6ZN6lOEi7Fw6DK9IhoEhtjjouWD+6mQ
P0TUImPigROJ3xXmRHEALke0cSjMYSAPr6bd0Ck+CZ7fD6AADgvRZ/zov0BxwXh5c+0+EqMZ+O3w
T7Y9Fag4q5y5oInVAfmk9cxlUSDHvUrEzx2Tp7Y5HolalhIEd6ly1z73lzpO0xyPSYyDiQYhS5eZ
47VAdscvTi9bDzh9IUZjCQfo8wfc3KTGw7VoH3N2sTsPV0n7u5huPRVGMqWp+HYCmiWmKxO9NL7y
NJjf0iUKXIVifsVfEKSG/o892j7YQlAuD6eVm/ATtyyOXYpXeRl5sRXyz4h2bUYXImMhQg0PUbNa
TQAEnu6ABiaBGEg0hiFDV4Ilz3IRiGiKABEGoOIKPN8hCuNy1EfTEIxkDEjEGh6fMsy0yX/fcv5k
s7kDpl3g0KazzEuX/MvCnri4RlZ0Xoj2rMhIfWrtvmhKMpeCOb7wU0qdqjyM7wFqdRK7Kg3RX4Hl
pfBj97MHxHY4QjCIDZgfJd39puZ/lr/7BV//AFJ/tH5l8y6eL2Y5lEyk1r2YDDpqqIbFRMQ6oNJ3
JxxRVU/UELYcmm31KPOf1u55UTWFgVuXNzZI8pykfwnICkbMKGQ/PIVFw55J9OkH2pUCjON3VzUq
RMaM+xDmL5N6/IVnOrevoZrfmZmQdu4ISlPgfCPCG7kIxckZyLn6fkBnKgC0ipfghCpPatd4ap5C
VdPpHKNnliRAfeXRRuxeRyrRtfvbtTM95WiAoMzj6EykWiMSVotfD5YeK5nLsXl2bZlM+zEVJ2yK
F/m9MrgrGLeH1+lDka5UhElnQ5aBNzm500QJhbhHPVmUZkDzCAKYAD8vRtnKkY4k9yN6+dV+dSco
jYB1gANd2dIQGJKN+60r88TkBsCNyfhjsxUecvxYD7m2SeHeRg/pSSWAqSvxMh8OLiyD+0uGs5Ug
NpKeVbkqzltPX8oVsWj8Q+9L3VHlbJa5cxI9mOZQhANGIYdQaazkWiMe9kLsq27dX9+Zq/c/oTOZ
aMakr8QQTevDTYj7kCaSK0jE1kRRyek3DlgN68vC9frI51xQhENGIYDrGcqAIRljL4t8d/BA+v0V
y8cBwQ7Birl32YcEPt6sLAxuGvYMVDl443CI/wCEYoRGAoOm5fxjHgh3Y9F28cBwR7lDlon7wvL9
EIAUAwUpGjBAnGXEe/rSINTwjtKjD3QrUPdeXRJsZcI7SowHshXScy47ERmhCM5aIRYhy1EwLAB1
KZL4qYxavREYE1PQYlMRSNT1GTCj9NAhqLAVWiBpiX6AZB9SfodE+pQG4onf1DGNN6I1OTkUx6Ik
BxmFg3Z0v04BVCzVEAJaQNq0yIOwqgosFgXQdHtQ7B0Wv0woWIeKZYOhG6DOecnavcoztkysXCwJ
xidnRc26ivV9XSY7VMnawRPq6NxUhvp2IH3Q6O5Qn3dBlsH1oN74dWyKklioiNqPFLipioXTaESw
pGg9QQhbAjEYAdAyF+Lf4of3Jtqny5/dyI/wmoU7JxszMQdoxHTKOYDjtFQoz2ivaKHr6442zqHc
hOOEg4UoYE4HeKhOSR58eKJ9+DfZ0w5jOBAk2cSUCMCHHXhfbhPBc7Dgem3zkPD4LvYUCKg1B6Y8
zAObfjG2KEhUGoPpzaPguVgd+zoMZVBoUeVmGGNqSNqf3luh37+oYyzw3KVib64YE+0OgxPaO1aZ
UlChQ5m2MaXBu2oSGBqOnz4hwaTGbKMrk9EDUE0fchHlLrTgWlAH60ZhjcgRqiDxdu9PCTXh4oGh
+QRu2IxF+GORkFK3cmLUoYifCmEwVUHt6JGVTKgdOhOWEakdilL3ijdI4pYHd1NV6Gm5/wBSFD3q
HkiRszFJGtR069JEdrU6NVuRhIYGJIP0Jzc80HEXRrf9aqFuXJA3JUErJMR+rVRncmYvUxaqELYo
M8z81t8zv8hr6TD0vM/y1/8AYKvP/wBSf1nof5iNu2RcvZAYBG5ekZSP0LBOVU0C2vgoxgHJ20AW
sHVONS2HU3LVChRM1Ra7vCMhmVpsw02x4pGkQN5U4f065a5jn4hvMkXET+aApXueE7lyWEpOzfmr
SLUiTuXmc3cjZhsfiWnlLfmEY3J4BaQfMnupEL8dzNYw8AydPclpCELEJyhKgZw57lE3ohwXZ3QA
DAZfIKl5HCIWmMdU3ZzLhgNulGuqZxkzekMpFojElHi8rlBjIeKfYhGDw5YYRcgn1IRiGiKAD0Oq
7IA+zHM9gUbvMaoWRUWjR+1kOX5WIMxsDiPqXmXT5l44yP2D0v4flh5nNSwiMI75HBGxy3xeeukC
dzxC1HvTxJncOM5VLnHo1EGRyiMSV+KvD4svDHHSO3rBgZXJUhEZlG/fOq/LE5RGwIzkWiKklefd
pYj91DB959N+Gh4I1uy/+lACkYj6AjfmGiKWxu29cWrVb1ykWyG0qlREd8pFG9c+9u1luGQ6hnMt
GIcrh4Z3AWOcLeDntUbcA0YhgPQ6H+BY47pBqZDCK8+5FpmkQaaR2dSMP3VninvlkFPmD4Q8Lb7B
iev5f7u0PMu7C3hijcufe3Tql9g7h6E6fFLhj2lMMYinaVGJxxPaercvnwx4Ifap3PZtDTHtOPTI
jxS4Y9pUYZgVO8qUnYsw7VEd5ParnM4g8EOwdFuwA5mXPYEwwHWtW8o8Uh2YdF65kGjHu6LVrKPH
Lu6JRmOCEWptKEY2wXqTKqnft2xC7Jg43omOOmvqUdsqqUdoQjtKYYCg6ZyGMj1CUGWZVE6wHFiU
ZeypAFnGIQ1yMhENXd0lSKj2J9p6oIxQMjpMsCpRukmMvDPegJ1BzVFs6mKpTtxVCU4qnDrjHeos
Y12rVJi+BjVMSy8SoQU+gOuzoiDTTIH1KPM3nlajGUdMal5ZquuPbH8iNuzPVMyiQGrQ1WKnIAmJ
NEDhQfV0gokdLqMxmGUpnP7E+1EZio7uiUtpZRltYqBfCQwWkXAZwLyiMVGPLAQhECsg5K8m+ALr
PEjCQ6IXQHlakJdz1T7VGWEb0TE/pRwRrS/F2/Oh1LvLnbrh2Sx+nrscDip2iGNssOw1HQZRpUXb
eVYtrHqUZxwkAR39BjKoNEbUzx2uEjdl15W5YSDLTc+8t8Mu7PolamOGSPKXW8y1h2U6SDUGhCny
svZ4rZ2xP5PT6QWkKxO8IE+ONJDeOhgWkKxO9C+ARds0uRGJCEo4HDo865F4bHAP0oi2GY4SrRRl
fIhdGEoU7i6jdtlwegX4YR8YGYTgvGQXkTLwnW2fs6WOClCpMi8BiylOTmUi5QuRi7Yjco89a4YO
Cwy3KF+OEw7fIJW7vKcve1AgynAau8sUZQtzkCXa3KLDuIXHC9AfnweP1IGUgDtA0/aowsAiEAzk
Gveyb6lxVu3MNwULYD1ULUcIhurO1EPcjxQ7QtJtG3HOUwyEuY+NcGXsq5y1m2IyA1QAGYRtCzPW
CzaStVwCzH8/H1IG/KV2WYDAIRsWowbMCvr+bX+YW+WP129DtVenmf5a/wDsFXv9Sf1n5iMuYuCJ
yjmja5T4Vs4yzKJJ1SOJTyVTXYtFsMNuaE7k9MTmcULdgcWdyeXYg93UcyiJyGhqlTFo6oA0PQ/Q
ZTLALRGLRzJWqI829lHII/1H+rXBY5SFQCWlJsohHkv6dH8LyUaARpKY/OKFyzIgheVet25DB2L/
AFom2BGR2Ba+auGZ9wFytEBotjCI+1RtWgWfjllEKPLWAJG2KuWqhIzEYy8QFWCADyIzPyHTFtZz
lgE8JSI9q4KR7lpgANpzPb6Qzm7DIVJ7F5/Mjy7A8Fk+KXahO+dNuP3dqDxHfmmFAMvQ6LcDdunC
IwB/OKHMc3pldFQwpH1o2rAIjHxzLxHZtWiAxLk+lFnlDFpOJ3nB0AY0R5L+n3JSmfvr71A7fyLR
brI1nM+KR2k9BnMgD61518U9iBy39b3py8EBiSvxF/ivyr+iNgRJoBUlPINysDQHGZH2JhTd6URt
1vTpAfahHGRrKWZKHLxoBW5LdsQAwFOsZywAR5i995PAe7FGTfAsFov7U9vd1dH7uBqB4pSyiEZz
+9uVmfqHd6EQt/fXKQGe89yjbiXtWS85F3ncNSepK5iQOEbTko2Qfj8want8R7gowjhEMOtK5LCK
jZNXPm3zgMeGPooxxjaGo9pwVu09AdUh2dUkeI8Me0r9EP3lRfxS4pdp6YQ9iyNUv0sui3ZZ4k6p
dy0R8dw6YjtUbYwiOi7dPht8EPt6929k+mJ3BGWwOhI4yeR7+i7exD6Y9g6JkYzLklQjckI6/CTm
oRtyEnmH0l1IbmUY7k21Tl7vSEFsXEV4U2HVFqKjaFWrNEQoDkmVegnJkQcShOOAFVEd/WHkzDxr
jUleVzAJicDsKeZJiMEKHScFQ+tVYp4syclnyCbPMpytRwyC0inYsU0sFplhlIYheXboMztVZdwV
JdDEsN64hpP0dUbFqiahz6k3RUKo7OhkRmFToIzjUKMezoY4GiMdhUPzqlP7qBxoFcAhIiTsQDtd
REgQSKOGULg8EAST3N0Sh7wIUdsaF9y1x8Voice5W+Zj7BEn/NPUtXxQROmfZJvQQkfDdGk9ow6D
Ihzb4h9qlZLvaLB/dOHTC9lc4JH1N9XoIXfYu8E/0vZPTDm7Yq7T7FGcaiQcd/TG9bD3LRcbxmFG
5HCQf04uikJUn27Vuy6BfhiPHHaEz/CuVg+R2LXcLQjihY5eRFu1QtgSnNSniabCp+bPVZidLGrJ
wXBRBqCjZkRonW3kexEEcQrE70RMNchSQ+3p1W6+WXI3JwCuLNG2TwyyVsCQJjQgZfIqrjs25Pi8
QvMhZAOyrepcfL25dsAtURK0dkDT1FarQJltlX0TtX+w+Hz1zP8AK3/2Cr3+pP8AaPy/XemIR3lk
LPKXrdq17V2X2I2uTvzuyI4rpoP8IRndkZyNXKZGUu8oxtcMdua1yOmGcpIiyRIjE4lEXCaYBPCL
z2ywVJkdi0SnqhsTYHoMjkjMvp2KtIjAdGqOIQ82ZkBgDgO7pxWJTBfiufl+H5QVc0lLsCHLf00C
xbzkzSlvdEzBkY45+tNEADYPkFUYwcN4pmgWqQ8uyC9amaEYhojL0nEXmfDAVJPYhd5rVcmfu7ID
CP6S8680rg8IDsPX6I2eTkNQpKbOyM7ktUhWVyRcuvF5fLCvD4p960wDD0hlItEYkqQEjZ5SHiug
sZgYshyXJRjbtQYzILyL9i0WxU1lI4k9BlLwipXnXA1uJ+HHbvPWBPFOVIQGJKPMcwdV6eAPsDYO
jy4EjloeOQ9s7AhGIaIwA9Kbky0YhyjzV0NOdIR92OSpW5LhgBmSuKtyVZy2nr//AGbJ/Wkhy9ot
duYkezHMqNuAaMep5UD8WeA2DMoXj91bJ8t/aOc/QmcqRjUoGNL17wg10W9vehCAYD/0/U0/urNZ
bDI/kUuZ9kcFvsGJ65jL7qwNcztk/CEblz7y4dUtz4Du9CZGgFSSpXp43Tq7sgrl4+0WHYOrGHs2
uI9uStWBmdUuwdMpnCIdG7Lx3TqPRcu+y+mPYE+MLAb/ABHonPFhRRcNKXFLtPWlLMCijE4s57St
PvkR9aEdgZTlsFFEHE1PaU5pl0CEai1AkkZErUX4puO5NKoomGCCLYyx6KlkGqU7udieapQKhWD7
lwkxOxUYpyQEIRxKBGdNWxODTORzRmKAYdSR7kSztkiSDGOABUezrNJwRmEIvqYuCU1ssZUKGuRk
2Dp4wJG0BPKBYY0QhagBk5QepAqnQCIGPQ/QIDE1KjIVyPTpfsVMMwq1jsWqJeOxV4TsPU095TRD
ppBicHRiH1M4I2o68Rn00RCBO0OiR4TkgOgFP7zFRiMgpgZLCukP3MhMAAmIdW/MgJtg4dNCIiNg
6b1o4Sa5HvofqRBwNFc5aVfLJhvbIqAl4oDRLtjTplAh3FO1RJxFCN4p1yYh5wIlHuKjcGEgD61u
Xlyl8O6Gj2Go+nplbzIp2jBRlLxjhl2gt15WzmKHYckNXjjwz/SHRK3LCQZT5WdNBOjs6kuXZrc+
K0frHpzCWBRsT8UMDtHSX+5nh+bLarkLx+LIaKFsqFFy6wR+pS5SXgvRNN4U+XvExMJaYBqshIVi
agrThKNYkbQq0nGkwdoQ5mFdNLkcyEJx8Jw6JQkHEgxWhgImsd4KfBas0SRIxNDjpbuUeYu3AIT8
AFTLsQuQjIROGoM/Yf8Ags/ouZ/lb/7BV7/Un+0fkNPSGUiIxFSSja5QeZcFDI4Ba71wyOQyHUM5
ipwVaRGAC1y8LsAjaieCJYRG5eYX057FEQtCBjjLM9vUEgUDisHO9eXgPQRtWgZTlQAKPMc6DzHM
YxtgcET3pyGMfCMgFog8ifEcgFG1bFfaO0/IHNBtWmMgInACspdiF3mAdI8NqX2pgGGQHpBbtx8y
9LwxFG7UMLvNnEgUtutcyZ3TjI4dw9CZzLRGJUowiYcthrNDI7kIhojKIxkVG9zL6RWNp+HvCYUA
y9Ibl2QjEfT2KN2/I2eVFRblQzUeXtwkIf8ATwhpyJZaLYqaylmT0GUiwGJWsuOXieEO2ojaFu6r
ms5UhHaV+I5mt6WEcoDd0HluXk1uP3twYfohCEA0Y4D02jHl7R4vzpbESaAZo8zN9ApZjk2cuuLF
o/Eni2Q2onCMB6ypX7v3t2pGwZDqGRy+1acJXHNwYiMMG7ShCIaMQwG70Mh4rdvEDCUvdRuXG824
xk2TYDqGXtGkRtJQsW/vb5Z958RUYRwiG6xmatgNpVvl51JPm3zvOXoo2Y43SxOwCpWiGMmhEdqj
AZDqSmcIh0bsvFcOo9mSuX8idMewdNvlo+2Xn+iEAMApSxJoBvNFqOMYue0oGXjnxSO89FqwD4jq
kNw69u17xc9g6LUMovI/Z0WrI9uTnsHRate9Jz2DonriJCWIIBdQtQgIQESSI0QGoVLmqJC7Fj3p
5VOxcMHG4Iyhw3BkUTOZlclvoFXiOxNKIHchKOEsE6EhTUKqVu68ZRPcj5cnhk4QuAvIbVGc4AOc
s0NVTGjJ+oI7SgQSCtAJZ1EbB1a9BiYknYKIgQ0ThUFB8HqvLAGkbk8KHNsELsKAYhF6HCiZAlAn
vCeDuMj0AIjYpR2dNFUYhVRjkjKQcDNaoHVH1hVx2L8pWoYHYowtByPFJquvMuSAj7uaGScdTu6A
dodbDtTHotnMFj2LsUgKuEQTVmZQ5a6SLkgAABRCQHDqIG9lSh6bF5qEm3L/ABYdH5t+Lf4o/wBy
u2npP4ke/wAXUnbZo3OMdufXZXOXJ+7lwn809Eb0H12y9Nm3uVu6PaFe0UPTKP7u+NQ3TGI9A/sX
/omPy9MOahkWk23JRmMx06ofeWzqgd+xRuD2sRsOfpxej4oYtmEJjA9BjIOCjy9604l93ebBsAiD
iCyjQOauniwfYocxZPxYlwNq5fnhbNqVyPxIkNUKAlMm1LhIkSQH6BdiQIGlwHDtW2JXlmlidYnI
Ht6NMQ5VkcvIDmLYzwITc1b0xOE41j6+j8Pyxl8U1jFnKjG7qexIfDnsBchla5iyPLGkGIjTSR2K
ViUvj26SGZ3/APB/mf5W/wDsFXv9SX7R+WEs7ZKfLMbVnBh7Xaq9TURQKuWHRERo+zFedzR0xdxE
+IoW7Y0Wo4AKrPl1dJPCtRwXBlieu1mB0jxTNIjvXBKHmEfEveIjcFptzNwD2jtTjhtA8Uz9iFuz
HtkcT8g1S9QqStMYGRNI28+0oXb513ctkez0pscsddz2pDCA7UbfLPK4fveYn9QWmOJrI7T6Fyzn
wgll5vMnTAeC3DDtJXl2Y67uUBl2oXuYOq6atiI+lfTK5PKEcVHmOeAlPG3ajUj1qMi3mT8FrEgb
054rkqzkdvToiW5aHiIoZHY6EYhgMB1dUqyNIwGJK/E8zxXpYRygN3R+E5fxGl25joj+VCEAw9NH
lrR+LcxPux2qNuGAX4YYCtw7tiERQCg6xmatgNpRvXB8WdTuCf8AcWTT86f93UfJPEGVuMtMB7M5
f3KtbkqzO/0OgeKQx90bUb0vuY0tRwEi9ZEdUl3tWKbjJlPmTWEXha2MMZddzWzy9Zb5/wByNydZ
3DqJ3ZeindyhwR+1QjjG3HUe04dWHLxxuGv6IxR00J4Y99FGGOkY9DlXOYOB4Ydg6LXL+zHjn3YK
3y2Q47mxhgOm7fI4Q0Id2PXuT9m20R259F2eUWiOi5PK0NI7Tj0GZAlKMWgN52p5aZDOIH2ozs26
GjyL/QF50i0yGpSirXp0jEp5VTQAAQMgNWRRAz6Yg1ZMVqGQojzF0NEsSEYm3GMfpQA8INAc0AWi
B7IwUpDMuq9QAne3R2yTdec6u2SIdh0gDA4qUSKkJ9J0nFPmmzVU4PQEd4dTfBlVVwRiYCQ3qR24
DoDKMJjVbuAiSItk7mWqYYnbimkGKhATNsgAOfCo+HzZUJGHanuMz4yWqBfaOqCghtDgqmKaa4fU
iO9EqR/NRCs3bVuUwW8IfAo2rkTGduZdw2NUDsR7eiW2PFHtjVRmPaDoXY+K1ITHZmrPMR8OBP5s
upC+K+WQ/wCicU4wPXhcFIz4JduXQYnCQI9a5nk508ueq2B7pbDp1R8ds649yjcGEgD39cxBaQ4o
neKqM8yGkNhGPRK3LCQZS5e4eIHPb1DD93d4o7pZj5BokeCdY7j0+ZbAl5R1SBrRG7FhA1kQaBCA
mCyABFFCMpGJjxDZRG2DpkKwO9GEw04Go7FauRLvEA9oRicDQo2JOwrA4uEYSwOG4o2JnjhR9oV2
5BtQoHWq9N7pHqX4Wx4JeORx7B0CUSxGBGKM5kmZxJqSo8rYueXZjHSYRArvUeYjNrgOJr61C/zX
Lxny02OuAYgHvKjctl4yDg/2vp6ff8uouZ/lb/7BV7/Ul9Z+VtcmNXup7RhCBzOxE3LsRcArIEBT
tRkJiJYSGfTrnWWUUREEnICq+JblF8HCDprY8y9758MexarkjLtVOjUO/qBsULYo2PVoFGxy1vXc
l3CI2yKFvnb8ZzofKsPL1kshCMCIRFIyw9QRIPcMF517hse7mULduIjEYAfIKkaj4Y5lGESZy2R8
MO1P4rp8Uzj6Uws6Raj47ssO5GFsaLD8UhFtfetNsMPQ6LR13MGjxEI3LzzkcBOun7F5NgEyPttw
hGR4rkvFL0vlWAJXT3iPayE7s/O5y5tct2QCBuA3uaueCOAiN6M5sbs/FIfVXp8i1S2PvJnAjYEI
RDRiGHV1M8jSMdpX4jma3ThEeGI6Pw9it6WJ90bUIRqTWUjjI7/TG4csBtOxG7d++uVluGxcIe5L
hgN5XEXuSrOW09fzK+VbPCDnLahy1o/FubMYxzKjbh4Y9T8PDGQ+JM+GMd688hoRcWY7BnLv9DK5
MtGIcowIOmhuSw0xxEQhGIYCgA6hMazlwwH5xUeWgfi3uF+3xFRtxwiGHWlMVlhAbZGgUOWBcj4l
6Qxr6KUh4maPahEnCpKneP7yRI/RFB1Z3sRHgh3YqEMrY1S7Th06IeO5wx71GAyHRd5iQoS0T+bF
T5iQrMtH9EdEpZgU7VGGeJ7TXrSmfZDoSOM+I96MtgdajjMmXrR829ESHsO8vUiLVuVyciSSWiKo
i3ptD80OfWUZ3Jmc5eIlPjuUAAIjYEBs6KVK3lajQZlUw6BEZobV2ocMjLMlaQWRjIUjnkVG1jEc
UkNNIxH0onUQDkEJDEVRlLwxDMNqrn1TuHREfnegY+E4o+6cCtAIG8rhMZDa6ckGR2dNVQU6KJwj
MnjFU4rOOW5V8UsV5oIO5VwWCpRYB96eIaKEY1YMEJyDzljtQMwI7AwVW1Q8KEBiVbFucTICuooS
uxbaxWi3Ft5qeqfo6JR7COiIn4c0Rvp3LemKfLNPHeCVviSFMDJk3UlaJc25EdyMThIMe9Tsms7b
x74+FQkQxIqN4oemUDhIEetaJeO0TCXd15Ygx4g20VUZjAjo5f8AqApD7q8cmkaE9S7yx8L+Zb/R
LP8AT6CVvCF4ao/pBgR0xvxplI78ihLPAjeMekgUkKxO8ISzwI3j05A8QrE700m1xpIDoIvlwQXj
jTertuEIw5aZIgM8cypRNGOWCotfNatMhpEhUB9oUbluQlCQeMhgQvxVvxRpMbRtX4W/LTbungJy
l0PCk41iU7gnAttQlClyOBFD2K4MJDxA7VPUaOtrLDo1fQsESof0/m4aKeWLuMdzq5ytwsInVA5M
V2/2mf0e70lPl/M/y1/9gq9/qS+s/KpzGIBZSuniJJWjURuWkTLdqqgIgknYhMw0R2yogb8zPcME
BatRG9qqQaMTnMirKVq1IzjGhO0qvUbb1Pzul4hMIkptPrWu9LiPhhHFSvW5+UN2Y3ol3mcSiZFh
vQNuJFgFzclQdyEBgA3yDREGU92A7U8SzVncIbuC02xTEnaUCQ3ozOZ0xGJWqRlDlhg1DM9ijKcR
a5YeC01TvKYBgKD0D5LyrMO2cqRb7VKZYSPiIDBaLIa2KSlIYrTANtPpHNAEbdhyB45eEetfhuWj
rvzrKeQ71QeZzd3IV+tG5dOu9LxSOW4dJsWDX25APpWmAYfX1QZAyJwjGpK/EXmNw+Ee6OgWrXFf
ueGOzeU8jquyrOe0+mJNAMSvOI+DbPwwcztRJLAVJR5qfgHDZjuzl1xy1o8UvGR7MUZYRgKDaUb9
3727Uj3RkOoZ4nADaSjbnxTk0uYngwxjAIAUAoB6GXLWYiQgM/akTpcdiEH1SNZyOJPVNw/d2OGG
+RxKnzB8EeC19p68py+6sU7ZOpXpeO8XPZkPRQt4wtccu3JaIeKZ0jvQhHCIYdQkeKXDHtKET7Ic
lSvHG4XHZ0k+xYDf4j0SI8UuGPaVb5eHim0fyqMRkG6LVgbdU+wdeNoY3JAd2aAGAU5yLABXIWoz
hO3AmOoBj6iUZyrKRcneeiA/eA1O5XLjiMYuoTiXAxTwm0dhXFLUdyIjVtiZ+1E5rTAYYpjRE7Fu
+zoGnJVFUXHqREabHUzdFZFnRiBn9C8y5IWo5CWJRt2+Js1p9fan6sz3dD7HPXpijIZYppDVErVb
kYlcRcqgbf6CURmqUOxcVE2XU2JndAniiTiohTEsiwHRGeLLU478VEFtoVOqWUoyDSemxkxxIb1V
6H2LViBijGVCqqlQrgJZpFl8VyJSppU4cvHTqFJnFkJa9QzBq6hej4ZgEd/SRhG7Fx+lHoNeG9F2
/OjRXbJOB1R7D1CMI3g/+KPXqrtg4QLxH5suidiVBMY7xUKD+O2PLmDi8aV6bfMxNbR4htjIgFCQ
qCHB67w8cDqj3KMxmOgwlgV5M2Gqh/Sy9YWu5KIJ8IkWdA8sDqia8VDWr4hlCEp+VdkKiVA+x1qf
4d31CXyAXgOGdJ/lUrrOwoAroJac5t/hW5eWB8SNYnaNiboFnmJn8LOhBNIHapC1chdhIMdJEqFX
bVwl4SeB/NyZDl78wOZtBg/tx2ocxzB8u0aCRzX4jl5xu2ZlpacQexOFK5aFZA6w2K1DLEJ0xAKY
RY7QqSfcU8W9QWmRZtlEIxBlI4AVKsnm4m5c0MbeEtGyTryuVEbVoeGUw8mQne5iVwD2BGLFCPNW
TCJ9uBdu0IXrExctywkP7O70/wA98z/LX/2Cr3+pL6z8qmDsOKnEFwCahEnAKgcKNuIeRyFShO9A
SumtasqdMoO2oMp3oHUCSWTHEdVjimKcp+gOq4JojvXA0rm3YjKZNyT4HBeVbi0malAEAHlOVBEV
JKjd54tHEWR/9SEYARiKAD0lSy470I9sgnnzECdkS5+hNbhKWwlgi0xallmUNUjdPtiIqe2RWm3a
Fq3kAHKi+ucpHwqNxtL1EHqBvWn0JnMsAhcvGn7mzDGXbmhf5upHgt4Rj3ehM5lojElACWjlziAK
kdqfwxHeShOXBYHhGBKEYhgPSGciwGJRDiFgVeJcnvX4fkjHT+8uu5CEIPcvzoNReR9aN260r08S
2G7p/D2vvJYyyiFpjiayODnq6jWRpGOZK87mOKZ8EWYRHZ0NEarsqQjtKlduHVfueOWzcPT/AIS2
SKPdkMhs70IxDRFAEOWjSON2W7YhEUAoB1jPE4RG05IznW7crM/YgXexYPdKf93UJNAMShdhxOdP
Lx2nAyZaX1SNZyOJPoWjLTduAiGZFMV59xzduRjSWMaOfpPV0w+8uHTAbyoWLf3k2i/7UlGEcIhu
sdPjnwwA2lW+ViXbiunaqehMjgA5RuyDSuEk9mS/NtDH849UR9myNR/SKFqPiuHSOzNCIwAYdEpn
IU7UNXjlxS7+i3bbhhxFTuHwWhpj259N2/k+iPYOv+baj9J6I2QeK6cNwUj75buHSHRBANueBVJa
YyqtNuVAuKRULFvgifEcyiBg7epBGcSQ9WyVfWpAYpzR1DSXJFSqYlN0tnkU0ouRmqnSMERENtkV
pHWkTmVipS2DrUR4xE7CpxJwDEhMcFgqDrknJVo9E8Sx2qrFMzstihblQyzTdbzI+I4700IEqJuN
xbEIipOCEX8IZDs60nbhJxUZGlfrp0FGLUk1U71zT9BYMTmFHy4GUoyy2FRlK1J9LyAQjAGUjkMV
asyqYRY9vTC6zm3Ieo49Eboxtl33ZqzeHhl8OX+Ko+rqC5EPO0RIfahIYEOO/r27xfi4D0yi7Q5o
aojLXGhbt6TCVYyDFSsT8dk6e2JwPoJWWaEuK39o6ReFHpM7MNMlzdi/bjGcICNszq8va0onoiLp
e5aJhqONPCfUmJ44cMu305icDRXrJ1aokCMtsTv6QYliMCELtsESwuAtjtHSL3LzMJjMZ9qtX5xE
LsY6ZgZkZqF22WnAgjuUr1+1A2+WiP8At8YE+9VXL9q1GELnjsw4Y9oQABBGGoNRQszvebeJYW7Z
pHfJkfLYRqYn3gjtKY0O/oYYptmxR8y9GyMDKT/YpW7eqUbETK7zQkzEYMRROZm5bfSTIuWO8qcc
Q7g7j06rE+B+K2axkhGJ8rmM7Uv/AKTn/wAGOZ/lr/7BV7/Ul9Z+VSh7wUhcHAS4KeP0ICAaOcti
4YvPOZx6xhIUKNyI4JYt1XQkcdnRQJ5Cioje5iTQjkpR5YNBSERVsSpA4uhG3b0WPauyB+haox1X
M5mp9EblyQjCNSSiLcZXSMGoCms6bEPzay9ac8xc/WKAu37hidsihpL7XK35Khd0bdvxYEpya9Ak
MQvLf4xwkaAIW4PdvSLGRpEOg5ctXr8TmZ8MBiShKcTd5g+CDvGHavNutO8cZNhu9DxniyiKkqN3
mAwBcQJ+zBAOBI+GJLI3eZOOFuODb1TD0j3JAH2Y5nsCHMc0ZQtCotzIEf1QjbsiVvlgW1jhEuwY
qNm0HuGkLccSdqN668r0sTIvp3DpEIVuy8MRitUq3JVlLPq6iDKRpGIxJX4nmR8U+GPuDo1SDk0j
EYko379b08vdGz0/CHuSpAb1UvclxTltJVKzlSIG1MS85F5Hf1iTQDFefL7qFLQ35lCxb+9uU/Rj
mULcMB1PJjIRgC96b1AFWHavxEgzhrUT7Mf7/Qyu3C0YBypc9zMfhxLW4kYgYdY3j4bVIduZUr58
MeC39p68+Yl91YBEBkZZlG7MNO4XPZl6KNkY3DVvdGKJNIxH1LWcbh1Hv6hmcAHRuS8dw6j9iMsr
QYDeem3YHhHHP7OhzgFd5gmsi0Nu5Rjmay7T0SlmzAbyoxzZz2nrEnKqlcONyRPdl0CyPDZiB/ik
hEYRDdNYGWxkNdvTHaV5MJcO7FkL18uZYRxQgLbROJICiY0DoXAxgagpzgMkyBmHzAUpNwklwMFo
jggNnW4g6oFVSuTofZTSy6dyJURtc9Fw7gE3VdYKNqIYyNVuPoZNimY6o7E8qECqeJxWKZQl7I8R
QW/qtIONqJgdJ2LiIYYVWqR1TybJOfDFbutID2lTEIS2h1VSi2SMiciVWh2FAycPsXDQ7SsQChCZ
0ykGD4FOIgHaB1JQw1AhQkfEzHtClA4SDFStn7y28f8AFHBQue8K9ufSQcCGKlZONo6a7MuvKI8W
I7RVQuY6hXt6PNiHnYkLkf8ADioXo0EwC3Tbv+zP4dz7D6AXIjjtlx2ZqM44SAI7+gwkxjIMQrtu
5LUNRMTjTL6OkzBczmXGxqIXQwhPhuHfkfkHMQiHOkluxEdL5ZhOPCcOrzFjmL4sWpQ4iSxPYpct
yNgXbk6C9Osu5CXP2L45C3ExjdsR06T7z4K/dMzK5Itb1tq0nGR3q/atnitjVaidgUqGMolpDsVQ
+/oNw4mkU+fRd5eIiIXZA6m4qb0+at8wKyjwT7sOmiF32bVSd/zqel/m+nTRP828zs/DX/2Cr3+p
L6z8rMZxDnNMD8F/oQhbFBn6CUCK5KVuQo9D1HPq6MGQBrLYEIgPdnhDEjtWq8TEbFG3AljtQEhq
mRUlOx9dF58oG5J30y8PqQjECMRgBh6OMRSBPEmFAsV8TmIWYD3jXuQlKUr52mVPVFavIhTOQB+t
QtRswnXwxiMP0mVzmfLiOZmGgAKRO4IyJcyLk9SIgc1DmOZxxiD1/LtASunAF2G8sjG3LzL58d1u
GG4BcLymfFM1JPodFuOueYfBC5dOqeO4FC1bJndPsw4iO1G7ded2WBnUgel8u3A3bxoIxwHaUOY5
sRldHhjEYHtKM7sRCy/DEhyVGMQDM0twRvXjqvTqdkdw6XxkfDHavNuVuyqd27q6pF5GkIZyKPM8
ydVw+CJwgPy9BnPAYDMlfiuY8Z+7hlAenMpUAxX4qfZCJyCJJYCpJRvTHAKWx9vXHLwwobh2DYic
IwFApcxd+8u5e7HIdQCuuZ0wiKl0bT6rcJGV6ZxnPKPd6IcpaLWYF5yG7FRt2w0YhgOqIRPHOkdq
jYt/eXOGLfSUIRwA62iH3l3hgO3NW+TjgOK4d39/o7l32Y8EO7FQsAsbhr2CpTDqQ5ce2Xl+iEZZ
RCBn4pcR7+gyOAqVPmJVN00/RGHQWqZcI71ZsCkLY1T7cum3axiDql3dcxHimREd6jEeyGV26Dpk
A0SNpXmXCZyJMpSNXVcekHWYNsK1TJuE4PVAxxNSF5cSwGCNy5ImL0dARpEYyUbYqIjHpcYhGc3A
PsrWM1RDofq6bnhNAd6bU4T9SR3IDYOictp65KgNnoXlmtsJYIyjnRTjtFEYnLJOcUdJwwUQfEaH
uVT3dbctixO9UQ2mvWZQlkQ3qRj3qJPZ6uh+5ETLAhn7VOzGsgWByKFkhpwDHuQ3qocmqAfHwy2L
yecGq3lcFSO3co3bUhOEsDEuOpetZA649kuicfZugSH6QxV2x7p1R7JdSEvYvDSf0hh6C7y4widU
ewjoIOBoVd5K4a2y9v8AR6ZWzmKduSGrxx4Zdo9BPl8o8UP0eiUo8JETXLBXxNjcnIyid4yTZrVI
MhrwuSM4diMJBwV5c28y2wk2zL07GoOIV2MY6bc+ODYMeppl4Smx2EdQSGIRJwtwMu9XOUMY3rES
XtTxIOOk/lUp/wBOuDkecIrZlSJP6P5ELMLkI34kgyEnBCnKceJ/iAYHsTjA5LSO/sWkeGNAmQit
MfDGg6LkD4ZSB9SlZ57XGEqRuA0id6d/M5eVbV6NYyHb0CcvurlJj7UJwLxkHBH/AAX5n+Wv/sFX
v9Sf1n5mN22OMIwkGIoQqpoCuZKqgSEIk6YZlRiLk7V0D7/CanctXJ3/AP7tyiaI1kmj4dyjzXMP
AEPC3+X00r9+WmEQ6YDRy8TwQ27z0vEkHcgRcMgPZJLKOmLSOJLsO4L3rhxkomdzTJ+GG3p1TOmO
1abfEEL9yLwhWuDphh1jbtY+1M4BGFoGNs/eXZYy7EIWxpiPQm3y5ZqGYD17cEZODKIec8TvUbf9
PlG/I+OYcwgPziEZMPMl4pAMPSnl+UD3PamcIrVIid+ftHEncvMvyMh7MG0j1IAB5mkIDajfvcV6
W2ukbB06pHsG0o371Zy8IPsjq6pVkaRjtK/E8wPinwxygOg3JlojNfib4aEfubZy/OPyDy4F7MPG
RgTsTCg2IctDwit2WwbEIjAUHW2zlSA2lFy85F5Hem/c2TX86X93UMpYBC4zXrrxsQzhHAzKFuOW
J2nM+hFm1W9doAMQNqY1uSrOW/rSvkcMeG39pU+YPgg8LWyniPXnzcz8K08bb/8AMVK/Px3uLsj7
I9ETHxGke0qMBkK9uandLGMBoh259W5fOHhh2BQsj2i8uwJhh0RsRPFdLd2aERgA3QIvw2hql2qV
2Rc3C77uh1cvZEsOwdeEMrY1HtPRa5cHxuSido6kpDJAYyIcIykXJQcER2lC1CkQmHVxqmy9Dbht
kjsHVIUt1OgHaT1yidnoSGcbkRJ9G/JcHiCY7FqhR6IxuSoA7lEO+/cmwOQy625V6Q+G1Bii+WfW
jL3T9aD5qUdh+voKqKKUpACAIbuXmZtiNiB2Fdi0nuKAiDvZCVifC7m2fCT2IR+7vNWB+zpt3APG
DCR+roheGNuVew0KtXvZlwS78OodPig049sS6jMe0Aevbu4A8Ej29Nnm4UDkT+x0JCokHHS3sXx/
zj0Eb0fFbxG2JxTjA4KUQ5cYDFStctIicD8QH3jxIX5BoTwGQlmojlbfmQBAlMYR7VC1EMIAADs6
OXvEGXmkxuRGcQyF6zISjLZluPp481aD3bHiG2HV0nuKY0K4g43IXzJtRLR3Dou3QW0xZRvDEFzv
Rly/Mfg+fA4rM2MZHaMD6lOF6YldhJjISdyMwUY3PiPR5uT60bTaJ0IkdoUokNcNH2hYd6coyzlQ
dAhANAeOWQC0clwX4ChwE+1StX4mNyJaQOKlZBE7MsbUxqj3DJNcOiRNNiYYKNjmuLliWBzihd5e
YuWzgYl/7Ln0TdevzZTo5n+Wv/sFXv8AUl9Z+ZeKQA30UrVmQu8wQwAqB2qVyZ4pFzl0OU7LRagT
Lbkhcv3Dbt4tgV5Nr4khStQhC3GVyRwjEUCHM8+BK4PBbxA7Uw9NGySRajlv6olOssop7bEH2clS
zE3DhI4BG9zM9cjlgB2LBMAAExDhCYiIWAeKRz7FG1bAjGIYAdVzQbURbkI2h4rmZ3BAye3y+Ufa
nvJQjEMBgPQaplgEWiRaNASdPeAMURrFqIDAkqF21ys7f9OcR87DXLfuWo2ZRsTPjkGHc/pDKRaI
qSvLsSEOX9uYfUdzoWbMTO7KgGJJ3krzrlbjNtZOGMj4RvXn363ZYDIDpM5YBefdwHgiMG39XVM1
PhiMSV+J5lzdPgiaiA7OgzkWiKkrzbkW5eB+HA4yO35ALVqt244G4bUIRyxKeNbkqQG0ppF5yrM7
z1jKRYCpJX4qYOkOLUT+0o2LZ+LdoGxiM5KNuOAz2nM9SUD9xardb2jlFHmLsRG5OkYj2Y5D0M79
0tCAc/kU/wCpc0OO791A+zDrRtQ8d0iI3DMqNi3Wc2hH7So240EesLdsPO7wjszVrk7Y4A0rn6I/
KUww9EIM8bQ1HtOClM5BRB8RrLt6hA8UuGPeowGQVy9lHgh3Y9Ny97EOCH29BOxEM078v+UIRGAo
OiUhieEdpUY4ECvb17t/HXJh2DonB3jaAjFtqYu+J6mklhKiDisaPuUh5Ruk4Udk5DSOQyW/rbvR
Q/Nconf1QNpUzv6LfXJVwnL0HhJJVaE5Ix73VaBDN80RkjIGsmAb6lpcVxBqtQPFHEdDP6Ag4FTt
mpgfoTSDHZ1p7mPqUTsKkHxD+roZbltEkYn2T9CMPUtM6b0M8lIY7k49SEgSCKghC3znHDK4MR2o
XLUhOBwIRnHxQIkO5CUcJBx3qUdoUrZ8cQY/4o4KFzMivaKHqXLB/dycfoyr15RzZx2hRluY9o6J
286ENjQuvJJcxDxO2P8Ad0nT44cUe0KNwe0MOuxwKlYqZQ8L5gqRuzMKUMSxHersZXvP1nUJ5kZa
t6lak8tQeG6SJ5a4bZfjhjEkbQVCxztoHWRETt0LnbE9HLyLmGgtscFWoxkRGerXHIhvTmMg4IYh
fiLA/wC2ulx+adnVY+IYJjQqIkXDtDs6JZGcm9SbNBpNKLcQ3Lyufti9EfvBSY71+J5GQvWZBwJe
IbqISu2tVuBecTuR8sNGJoE0y0t60x8OZR2YBQswHFIsVGzaiAW4pZk9B5mxH/ubQct7UULcYkzJ
Zs0Jcz8KGw+IoR5VxetijnxMpW5x0mBYx2FfAuztvjokY/UjD8RKUfzqn1lWzO5KcZy0yjIkhj/w
U5n+Wv8A7BV7/Un9Z+ZLk7VJiJYom7enIvVyU5qenSzvsRjK2ZCNCvhWjE7SyaUiI7AhG3Ei2Dx3
MgELNiIBHilmT8gF22HManpYKlZ/UnNSgh0V6BIR0Wc5yp6lGzboIjqmUi0QqxIs4CLHVM9gQucw
AAPBayiN/ocHkfDHB1+I5yflRHhgC1N6/D/022ZzwEmduwI83/WL5EI10O57Ng7lCzy4lHk7Hg1k
mIbPtQ5DnWfCE2YH1rbA4H0TzkBsGZUZ3njZBfyyaS2UXk8qImWDe73IGZ1zGf5E5ck4ALzrlZkU
zbpMpFgFG6X0jCJw9XV1yqTSMdpX4nma3D4YDCA6HJYDElZjlIHs8wj7EwoBl6c3JYDIYlSv3fvb
mXuxyCJJYCpK/EzHCKWgdm3rxsQPw41vEGvYjI0jEUH2I8xd+8uYD3Y5DqC1bPxZ0j9qFvGxZJMt
k59ufotAf8DysmmcrlwZdgTCgGA6rqXMSFBw2+wZqV8+CHDb7faPWc4KfNSLQi4i+wKV+fiul22R
GA9EZHAByjcPiuHUezJW7OIfVLsHVAxhZFf0ipSGOA7Sow3Oe09EjHxS4YjeVGGbVO89GmJ4pnSO
9G4S8bY0R+3ptWfZjxy7sOvMjFmHaVGGwVVyYLGMSQewI3LhckmciU/TROMXTXIiRG1cIEdrBU6r
9VusTsV2Xuxb19YbgSidp6ID80dcq6fQEGOpPHhOxNjRihKFSckwJDKM51EqowiGIIIHYhKQ0zGI
Gak2GHetAqMD2p80DIiEBlmehutIZSQOfWlD3okfR0Qltp6x09qAOVQiYyEJYkHNPtVarsK2UT5F
MSt6MrEmi9YHwlGzLgvEVhLOmW1eXLxWyYHu6J2/ZmNcftV6x7Mj5kOw0P09S1eApL4c+88PoLlo
4PrgNx6ZxjQQImN8JNq9TJxUHDpnYfhl8S2O3EegHOmJmLVJxjiQfyKMuVuEW2OoI3LhMpSzNU4L
EYEIXY+0OLtQkCxFQQocrzB0c5EM5wuNmN6lPS8rUoyB2DAq7zIjqlY0/qycSZAguDUH08+XvB4T
Ddm9S5eY1DGEhgYrwlYJ2VaKruoWxhGP19FuGAZz2lG5ejqtRH0lcJe3OsD9nR+EvH4N40JwEl+J
5UaR++EKONtFVu3Ary4SjEYmUi1FHl7J82WM7uT7AVG3EMMAhdlH/uJB3OMVWhFJDf0m9GxCNw1M
hEO/TPmbFuVy1eOp4AljvZcHLzY5yDD6VxRjAb5IXuZIuXR4QMB/Zxvnjmf5a/8AsFXv9Sf1n5kn
5x022LkqZ5f7pyz9DriLBGPL2nuHG5Kp7kTpJJNWCELNqRfEkMB3oT52Ru3M4CkQha5eAtwGQ+Qy
jc8LVV2NisAceho45nqUTURhzMiBkHZa5A3peyJ1ATAMBgB1fKgfOvZxjgO0oSJBmfBZjgN8ivNu
nXdPqj2ehFnlIxuyH3siDIR9SBtQPMc1MA4Axj2ALzP6jdLf9OJXDGMHwjEcUjvQv8yTb5UHhhtQ
tWoiMBkE8KXI1iRRHkefoRSMzQ97psYnwnIj0DCJnLYF53MEPjpbDvUrPLmTDG7HD1laYCucjiT2
p8ZHALzb1ZnACgA6XyCeVLMTRsSe1MMOo8zU4RGJK/E8xxXPYi7iA7Ok2oU5ePjn7x2BCEQ0Y0A+
QazXl7R4RlKW3ojy0PAC94jZsQjEMBQDrAQD3Z0gN+1COMjWctpWkVs2TxbJS2dQzOA+1BqX7+R8
MI/3gKNuOEfQw5LlT/3F+hOcIZyUbFoNGOeZOZPWFiPiuFidkc1Gza8c2hD8qjbGEQ3WjYh95dp2
RzKt8pbpHxXAPdH5VTD0ULALGZeX6IqtyuXzmdMf0R1JTOEQ61y8dw6pd6haGEeOX2dMYisLHFLZ
qPS3s2xhtlLBCI7T2nodXb8vbk0P0R17Vnfrl2DoNmP3nMcI3DNGW2g6jlRG2rKRwHyI71eni5ZD
pp0S3R6ANpCA2AdeWxlcPWYV2riNNqIhVsUCM0XAMclIS4SKxRnAh8xtXlAHWEH8USjJmYUUqULN
2rzCdUj9CoxRMnBejIA1br2pbaFAqnVqpw2SIHrUSMYn6i6foZQc+J07MBiQqJ0+xQkMMCUN6Bli
R9S3hCQwK1RkQYNpIoQylHmzqjcI+IMjhVCdsiUTgQrd4V0Fj+icVa5gYQOmf6MqdSUc8R2ioUZb
RXtFOvav4V0S7COm3fPhDwn+jJG0cbZYHbE4dMb8Q8rRB36c0JDA1B69y2cxluU48u3lAAsMiQ5C
cZZJkbcs02xRnA6ZxIMZDEELzro4rlqQn2xBD/QuZtwLwlCJY9pU7d2WmFvijOWGk4+pMLpun8wP
9aI5abXI1NuVJN6adykbtoPGe4ZLVcJhI1iJBhIblRMR6kSvNIPlW6yKlPaVGI7T2LTHMsAoBuKf
EVKzei4OBzB2hSsTGHhltHQeT5meuzciYgyrpTCcZO/hLrVEkdiafhK+EW7KK7bnI3YgAtIknuJT
xoCOIHI/PdPmfZ1q9anTu+bOZ/lr/wCwVf8A9SX1n5knC/MQBBxLKcLZ1QEjpO7o1ynxnCIQlaiY
2/flggb8jclmBQJrdmAHYE0YiI3D5GbNs/GuhhuCMpZ1JTRoOvCQlpANSoi3PVICp6seUsPAXPHc
3bAhCIMpyNGxJQEgPMlWXoZ/0/8Ap0JSIpcuRxG4LzP6hI27Ma+Tqqd8ijy39OsC4Y0MhSD/AFlR
5a8IWdZYTFG/WK86/wAyObtCoAyOw4hCMQ0RgB0+bapdjUHbuR5XmaXI0BOL960y6xscsdV0CpbU
y828fMu4ynL7EYwjo5cUlKYrLsCEIBohapAk5RGJXnXgBM4R2Df0ucEYhxaji7hyhECgw6hnLLAL
z71ZHwxDgAZdJ5eyWiPvbg+oIQgGiPkA5W1jL7yXuxQhENEYLhrclSA3piXnKs5bT1jOZaMQ5KPN
3BWQa1E+zFCzarduUG4ZlC3HAfX1DdJfl7JYDKcvtqjeu/fXKnYBkAPQm7KuUYjEnYpczfrzF6sj
7oNRHu6zmgGKlflhKkNwClfNYW+C325nrEmgFSrvO3KCotjZAZ96lfPiu1HZl6Od44eGB3BEjxGk
e0qMB7IbqW+XHtHVLsC2AKd+WNw8PYOiVw4RDrXPx3Dql39BkcAHK1mokdcvs6ZaS0pcI71C37ob
r3buUeCPdj0SgC8LA0Ab80I7B1GaLbc1Mj2cVT5ABdmIkhwM09lzEYyNAtiJzJKI6lKKfcOiA3hM
MR1y+BUtAYSy6zjxIm7SKLHhO1aIhwNhQIDAZZpiaIG2aNVatqBieIh0YFyInAoRIZCBwl9BTYVb
0NuewoHs68j7wEvsUonDFQlu+qnRqQfF6dQoRORr3rSKmIdCUkQjE4IwLgGrkqQid47FGyZHyrrx
0vQSxBUobQyMJZgxPaog+KHBLtjRcIptR5jmLkYWxiSW+teZy12N2Oekuyu2TgTrh2HHrzt+8Kdu
ShM1LMe0U6JWz7QZW7sjWB8q73Uc9JBqDQqVgkk2jQnYfQSMAwvQE231H2J0+3o8wY4S6LvJcyf+
35kMJk0tyYh+9eZdmI25QMTKWG5SM79qXKCEiDYLkBnYgsualzUTc/qJlH8LKJOkRp4h63Q53lj5
crMhhhXJDmbhaOjW3c9EYx5aUiNsh9jr4fLQA3klC/a4ZClyB9k+iMJAEEMQahR5eHJ2ZRtlwbQI
m35oKML1udmY9m4JBUgO+v1ojyx2sAhZiRGAxjtVRp35etfD+Jen4pxqIjYo6g8Y1KERgKDo0y4b
sfBNG1dB1j6d6d2bBC5EkzbjfauxcMSdgAQjbszL5sVpNbs6zP2f8HuY/lr37BV7/Ul+0fmLySDK
YxZfBixK4zigHfoFu1DVMmgULV0CMhkPks70y0Yhyp35lw7RG70UAZHQ+CjMBhIP0kRINxsMW7gp
XLstciWiGb1BDmL4e9IUHuj0JZcxbuzaRJYDNyrn9Ps6oSBacsHG7oeJYo8hzDCUzwyOb7SSmxif
Cdo6n4izSQ8TIWpsLsRQ+8q9JlItEYleXyxaz7dyv0KjRiMZHM9pWu7PTy8fBGDjV2lNEMAtUjXI
bSvPvlyfBDARC13aA0T+bHX7rhVZsmUbbiMJVltZCMQwHU1T7gMShfvdsI7O7p/DWPEfHPKIQhAU
z3n5BqxkaQG0oynW7Osz9iJJYCpK/Ez8IpaB/a6/kitm394cQZe6jOVIxClzN37y5gPdjkOoOUtf
eXPFLKMc1EwH/bWKW9kp5y9CSSwGJRvTD8tapaGUj72/rxsRxueI7IhC1apOfBAfao244RHWjysC
0rviOyIxVrkoUiazbKEcu9MMB6I6fFLhj2lRgMgAVCGMbQ1y7TQdR1O/7x0x7AtMfFcOkd6jEYAN
0W+WjmdU+wJh0EA1mdO/uQkH4ttC3TbtYi2NUu3LrynsCiDieI9pVy6cIRJ9QRuTqCTcm/rTigKr
0knJR964dR9JUs9AgJVlKkYjEo2JR0yEdWL0Urds6bFvxzzJV8wfyokCL7kYyiLl+6eFw+kIW4AA
D60I5NVCMcEel+g1xl0W9xdP1wm9XWJAcjAImZMYYsiCK7VxUAOaEoYHELUKFSBRBrEbFrt5IyEW
2naniChrD3BihInCvRTrOidhBUTjRuvbntBHqL/au0LScj0uMiCECn6JWzngmHai+JCByKIjFyCF
JiIu7OvKuDVP6UaMLYYoTjSUSDE7wXVu7HCcRL1qdv3uMfUp6yBbvR1DLijijy3LNa5eNDcAeR7K
oRvTa1CkLcaRG9tqMbz6b7DXlGW9W72UTpl2S9Bes5Sace/HpuWT4OYi43SGK0XPvLXDL1UPTC+M
Bw3Ow+gsCJ442+P106A6dEHwyoUxy6JA4gIQBkYSpKMTVjsRl/S43RygAJ89tRlm2lXbLP5kSwPv
RqEIXL0pRAYBywCr0eTcPwuY4TulkfSaL9qN2OyQEvrRlys5WJ5Rxh6iiYG3cGwEg/SFq5q75cvd
jX6U0rtwjZT8iEuXvmMTiJY/Qj5bymcZHqefbgZXrfu+7nRFoktuRt+F8aVXl8/OULz8DkCEkBZs
QA2sCfWmAb+17fOHMfy1/wDYKvf6kv2j8wlXC/8A6dAnNFk8IGRGwIW7Nszk7YUHagZ8V+XiPyU3
b0hCI20UrYnpsA8MI57yq+ijIZFWyC5Ar0GEAXzmaAdm1Ss2nETQ6aP2nFDnb8aD7uJ2+hlduFox
QjYtaBdOm2+J3oXBekObuYyBxJxRmTq1VJNS/SLkC0hUIcvel/3EBiaOiDQjpYoXYUi7uNqFyHiF
Jx6Nd6QiMhmewKM7onbsCotktq2OEI0BwhAYnuQv39Rbw25YD/COjUQZHKIqShzF4NL2IGrJg0uZ
n93D7SvjXiQMAKAIPMlRhCdIl6qN0l7kmBlL7EDtHSZnLBedf/wQDgDt6Ry9kvenmPZG1aRWRrKR
xJ+QGRoBUlfiJ+AOLUft6By0PCK3ZDIbEAKAUHWFu3W7cpHdvQtiuZO0rQPuLJ4vzpbO7qGZqcIj
aV5T/wDcX63p+5DYEIRpGIYeh/8A4dy5r4r8zgI7N5UbcfDAADu6xlIsAHJUuYljcLRByiCp3T4L
fBb3nM9YyOAqVc5u7R307oBS5ifiulwNkch6OMfZshz+kcE5wCleON06h2ZdTRHx3DpHeowGEQy/
Nsj/AJj0OcBirnMy9stH9EdNrlYlzGp2DagBgOgk4BXOYl+9lTsFOvbtDGcq9gTKcAWleIgOzNTO
c+HuHVjaGMiyphEMOo2ezrmUsPrRvypYtOIx2lXuc5iT3ATGENg3K9zU3E7oMYPsQ5OIAncNSPFV
PL2ay3yR5m795dqB7sch0412J+jb0FR3knofYD169BmDxwNRu9ATHFEyGmO0ogF5FCIHEMymZajQ
FUzK0iJJOaY03IkYlAE8OwLSDlQrTEmmQUdfiIqm6sgMwtLEEUQkesJe5L6Conepx2gHp7R9SHSL
jVwJWrMUK0yzfuWgVAUhDxNR00yddXVqVstcEm7kZRA+JRt6rQ5o2iXNmRHcahW72UZaZHdKiuTi
4MCCJDEVZEbCUAcEADQh6qwZESlK2A+NQok+IcMu0de3eBbQWkd0tvTG9EPKzISpszChfiHt3gBI
j/lPTKEsJBlpPitnTJ8adfzrcX5iyCYtjKOcUcQqdOrMUK+xS2Mp3ZxDwLWy1a715YnIRzrir3MX
G80kQtE4B8frU4HEE9AChdBcCQII7VamcZQBPq+SmJwkGPepxjCQsEvCYFCO1NfgJjazFOJShuQh
G8b9n/p3Kt+iVLynjch4oH+wdP7LU63M/wAtf/YKvf6kvrPzFKOAxPrUbNgcW5CXNy1y90YI2424
iJoWCIsWxBy5YY/JZXZloxDleVbjosQPD7x39Vh0V6oUOXMuPIBGNu5Fx4m4j/cvLtSrnIElCPsi
s5qNi14Y7fQwGRlgj/UOdaJEWtW/dH5SpX7hLHwjYFolgmyyPTG/bLGJdRuRkPPA4ojLqGJzQ1Ei
1IsdmlCdkG7qw04Anehd5hjLERAw71pjEzmaADAdpQu39JuDBhh39GqRA2DavOvO3swIZlc5m5hA
OBtOQU+ZvSeUzQbBkB1LIuyPliQookBgRh0apdwFSUL93DGEcx29Oi2NV6Xhi/c6cnVcl45b/kPk
QpbjW7L/AOlACgCpWcqQjtKrWcqzO/rGcsApczd8dzwg+zHJCza++u0i2QzkhbjlidpzPSSaAVR5
mdYQ4bEPeltRnNzduVmTj2d3oXtgSvTOm2DWq8y48uYu8V2R2mrdeNiNdVZ/ouhCFJz4IBRgMh9P
Wjy0SxlWZ/NH5VDlYUBrNsoBMMBh6KU5UEQ5WuXjuHVLvQtx8V06R9qERgAw6m2Nof8AMf7kZHAB
1rl4rh1Hv6BaieO6dI+1RhHCIboMjgA6u81diAdREG2dJiPFc4R3qNuOEQ3XkcrQbvPRDlol42Yu
f0pIR2DqPsRmcLY+kreeknMIzn95cqT9SboETICUsBmtU6k0jEYkoi7EQpqAGI7UZEd2ajy8Pvrm
Q9kflUOTiCZCLyOQ7VotxEbEC9yYDajsUoRtkW4DTbi1O0lSvXTqvTxOQ3BRtxIHE57FGIwiG9XR
vRcdU9iiNg6Jnd19WAGKMXojHEGnb6E25EAkURie4ogh04DHYmURiMSEIWgxxdEEvIGqBmWYP2ri
iaLS2NE+IOBQkDw5g9YjYq4hEdO/pux2Rf1V6IHKQ+sdIOxGOzpI2Ih8cEWFUWzC3LDNCfsvqDKP
E0ZYdqc13ryiaXot/ijUfapQxcUB2qXLa4iRiNWx4o25huMxm3vAsU4AAybFAHKivPMHlxJowzjI
flU4M0bg1jtz65iajZ2KO2PCe0dBicDQqfKz8do6H7KxKGovOHDPa46RKmi6GI/OHoCYBrV/jhuO
YRGByTHo3GhTepaGDfSocvDw2xxb5HFACpOCjy8S4t+IjORxULv/AFoxJ7fCU9ydN396ELJdvFJe
U9MirJJeUBol3fJtM4iUTkQ4ReyLU8p2+H6E/LEX7eRdpd4K+JGNqOZkfyItx3peKf5P+D/Mn/8A
bX/2Cr3+pL6z8wsjzE5aY7BimtRbac/lB5WxNz+8b0LAOTgAuO3KPaCOhoh07MjcgRBsSfsUrcJn
SfFIU1FRhAPORYBRg3xJVmfQujeu05PlS7/9SYyTyLWY+COQCYYdGk45JjiOmJtTMYkjUAaFW7vv
AE9SVqyYyuCjeKT92Cu2OapG2Wi9eLchyVuQvX5jh0kNAn3ytV2ZuXTUkk6R2DoMmJbIKVy/cBnE
tCzHik4U7/MRlZsxD8ekSPc6FmzE2+VgXiD4pHaU8qRCaOCwXCArZ0/DtHVOWQTbEZzLAYDaV53M
YexbZgO3pc1kaRiMSVK7Mk3blSTkNnyEW4Vu3OGAGPb3IRFZGspHElEmgGK/EzFBS0N23r6P3Ns1
IzlsdGZwiMAjzFzxz8I92PU/DwLQFb0hlHYhd06bdvhtRI2Z+hlduHTCIclH+o3gdAcWRIv3gZdc
ylgA5UuYl4ruA2RyUrx8Fvht9uZ6xlIsBtUuYu0lKr7IspczPxXTQbIjD0cOX98vL9EdBPs2Qw/S
PUlM+yHWuVJ3CZy71CwMbhr2DFADAdErnsWuGO859PlQk1y8dEWxqoWhTSA/b0xj7Nkaj2nrmRwA
dG4cbh1FXLsiwhEn1BG5MkuTOROzqyK1HG5J+5N0CMS8jgAjV7VkvI+9P+5TjAv5dCRgTsCu6oiM
LWYQhapbgcMu2SlzJc27Dt+dJT53mHlGBa3AVruC824GMqscUdPiag3qXMXpa70/UFrkDqNJMWft
QjAaYjADqS0jhjR96qsananx6lUUd1Oi5Ibgg/W0jDYhMU2qN2Jd8etUsniQhcqYjLJG5tyTXDpu
A0XvB0JAaTmq+tDej9CJjUmrBRt3BqGCcksMkJAvHZmmxGCYdYquaAz6xic3HrRicQW9Stk+yft6
hHvdDhExxZwvMyJfvWrJgUwqJGvQ+a0nxRwKhGR4ZgkAGlF2K3ejjCQl+VCUaggEd6Aq04l4b2cF
c1KY+L5huDuNQgTsdGHL2pXZAOREOwV6HMWpWjKeqOoMTRlauyIEoyaIOMnxAQlEvE4Ede5bOFzj
j9A6YzyvBpH84MzoEAablJH87pMS9CJAjFxsUbmZFe3PrysvpujitT2SRs8xExuxJBB+sHYmlEEe
orOPb0Ae7mjcIeMPryRJxOKlflhbHCNsjgiTUmpKt3bFiUrcXEZYYlRlfuxg+MSSSArN7lpSk/Dc
1bdoTmstqv8ALvmJAf29p1G+bKdenV5n+Wv/ALBV7/Ul9Z+c3R86E2GAiKFPbhKEdkseqY2fvZ0B
2KRuPIzxJxqnAomxPXHPcyBKZ+7ict6nasxjrkGdEytGUdsaqV2ViUbUQ5nIEDudHUCAFpjQIRA1
SOAQ5nmB8aXhj7o9EYWQXnSUhUiOasclyAuk263rtzhEjsEXTDponHjCbPoiBUk0VmEsREP0GcyI
xGJK0WHhY9q4DU9i45RtwGJJqSrvLf0iUbMZObnMXTpAHqJ+hCMwJGEjqnE6gSD4nQGxSv35CFuA
ckqULFyVrlhSMYltW8shdgWmMCh51wyiKgZOnNUzIE+pYMEIWRwA8c2oAo2rMQG8RzJ3rVJzsADk
ocxfDD93A4jt6dcu4bSvPvgavZjiw+QmcsAjzFzxT8IHsx6By8PAK3SNmxAAMBh1hYt/e3PoG1C3
HAfWhGJ+FaPG2Z2dThD3J8MBvQsBiZcV+W16shGIYD0NuxZIjy8XM5ip9SjCIaMQwHXjYiaeK52D
JaYeOXDADeowGXWhywdjxTOyIUOWhTU0p7oxP2phgPR3OYyJ0w/RClM4AOhI+KfHLtPUt8uPaOqX
6Meid3KHDH7egkFjgO0qMT4sZHeelvY5MV2GZ6STgFK7ncL9w6+iPimREIRGADKcH4rxEI/arkQP
EGJ3KnS5wQAxmWCjAeyG6BZci3EPID2icApWLX31yWmAziCvKjU4E+9I4oD99zBoPdCEYRJlPAZy
kczuUxFvxF0VJyfJCEqlmOyqaDnMPUDs6miDahWROACEgeAeEDPenNAM15YmDJnpsWiyHlM6RLLt
QiMBntKd2apUi/hTmp6oG9SO/oJ2nrhRG0qMTWMgH7es79zpyWG1Y03qkgxUdNSMwmlUIaEx6OxC
QFK0Rlg1XQHikCcdiOuQEjgFTrtlmmyCrUIjrXR+cT66ojYaKMtoB6G2qMtlOghSmMsFOLcJr2Fe
XLDM7FIS3EFB1XBduCF0F4jLYtL1xLdFuvFa4D3YfQrfMwnouwmAGpIiWQQuwiIwoJCIbtRtxYRG
BCuedZN3zGAkC0gyF3lbsbkSHIBqO0ZK6Q4naa5EjEaf7lYsSuSNq5LTKBLiuDdeF0BzEsezNOOg
mHjg0o/4S6icBMCQIxGaBPiFJPtHTO2W0XOKG0HP0A5q2ZC5biwYOP8AFuTka98eIJpUOz+5PjE0
TDHYhboGLybb0QsD2az3yP5FRcq3ukU3HouWDiQ8e0YKUJBpRJBG8IRek4kf2lb+yeC5j+WvfsFX
v9SX7R+eTKRaIqSpiJaFvhjvQgADdlidgTSNDtRMS46tENTaRi5ZRsULChUhGBhpzwdSu3yBbjmV
5NkmHLg0hHPfJCdom4R4o/kWgg68GzdDm+ajx424HLf6OcYeIgspzu2pMSTraleq68yIrmFpGKjz
nMxa1CsIn2imGS1zNDQbzuQ5jnJShYhXyjRz2Bfh/wCmWCAKCTOfVgEea/q/MEAV0guex/yL8PyF
v8PygPFIUJ3yK02xxHxSzKlduSEYRDyJwAXk2SY8pbPCPeO09VhitUy3ahasQldmco1Wm7DRcmXk
DijKRwwG1edzGA8EGYDpM5lohR5i4JMPBCTN2t8ib9zaNdkpdBMQ8zSI3pj4pVkdp6xnIsApX7v3
tzI+yMghah97coGyGZQgO0naT0mci0YhyV+JmHnM6eXhs3pjWci8jv8AQ+Vb1GUqEwqQclWtyVZn
7OvKZwiHRuzDTuEk9mSlc9iHDHtzPWMpUAUr1w1nxE7ApcxLxXPCNkcvRkR8c+GPaVGAwiFCx/1J
cXYK9W5zBwkdEP0YqUs2p2qIzNT2nojbyhxHplcOEQSjdmGu3yZzJxrh06QHM6IRAYAdeEcrY1Ht
6IctE8NiLy/SknOMi/UEAWzJUQ5IjVTuywjUqd2+BC2fu4sxX4mIBkB4No/KjzV7gckgHGqF27L4
cPBbH2qMrkXMcE7dieRACcVBTOH2dDTmBI5Z+pcJdlLl4kPeOqU6vGCMLR+HaDOMZSyCt8sCXucd
yW5XD4B4Y/oheZIgxEdMIjLow4Wqrk/ekwQ6o7yidp6I7yT6CEd6iNgHVMk8nMdqAagGC4ThtRlM
ggZBNEURhJnGxCMU4Y7kxTMml60ZRPCMQmwQnGhH1pzUrBlTqlCcqxj9KL8IOAX29UoS96IPqoiN
o+pD80kdA6AfX0NiEYYOSH3qVueMfqUJ4huIZqMuknNSBxx6LnLnC4Nce2NCo2L1vzLUdM41Y8Qk
HHqUpQHCS4BULtHhwS+xEqPMctMwmMRlIbJBTIGm4xhet+6SPqXl3BxWyJwltAND9ChejWNyIkO8
dYxzIQfxR4T2jpucuaeXLh/RKDlrd0MH97d0i7Gs7R1DszQkMJBx15R94EetSlbu23egEpAt6kJ3
rcb4eujjkO0fkRiQYnMYV7EZSibl6b6HrEfUnK/ETDQHgf2ju7FG0H0kvcnsGanytuRNsAShI4kE
bt6PLk8diZcfmyqOkc3bHBdpNspKxdJ0jWATuKcYH+2zehp888x/LXv2Cr3+pL6/nn8HZl8S54tw
TgapZPtRu3S7qmCotUccwmPSCuDxSxbJRkSzUCdjKTAAZOoi7w2geG2MBvKELYGr2pJitflR1bWD
+llbmHBDMUYt8OReJ6lFpZ9pUrsyI24h9Gcj+RCGloCg04KfJ8vcEb7NO5KkYD8quCEJc3cf4d6b
ygP1iGXmf1G8bhx8qBaATQjGJ9m3GjocxzhMOXHhht7ELdqIjEZBOckbMBotTOk6gxl2daPORkBF
2Lmr+pCfMvbsguHxkEI2bcYUZwACtUnO4VJQv3QwFYQz7+kykWAxK1zDWR4Bt3qnyGPL2/vJ4n3Y
7UIRwCINSKlSBYQt0i+ZXmyAnsIRNs4Yjqgn7q0fXJGcqCNSpczc8U6RHux6hg7WLPFdlkSMkeZn
SI4bMRgI7fQmTPI+EfadyPN3SJDCDAgPma+gjYHhjxXNm4LTD7yfDHvzUYDL6+tGwA8I8Vw/UFDl
o011nuiEAMBh6P8ANsj/AJj0TuZR4B9vU0QpO4dEe9RgMIhlbs+z45dgw6DI4AOjcl4rhfu6bPKQ
PFelxN7oxdADAUHSADS2HPb6C5e9+TDsClclSMAZHuVy6fFemSOx0IjAdSZGOmimT2BG2cCENRMy
MHWHSWoAdIO07kIgs7udgC82ZoaWwcANvaUZRk4hwsMSdgUpRtjzpECIxx2o3HeUAIMBTUUBIHgr
K5IeKR2IQgWGdWP0IyM5SMsSS/c6LQDkuaZqgY7ep61HbIk9WiL4gdNsbvQWxvQHVY1CYBuiURQD
NEXg8WrIJoHhyZE4pxQjNNI4LiCExgU+aNvEHJCUS0jjE7EYmJANKpq0oUJbR1iENNS6EziQ5TY9
GPUtz2Ex9dVE729anDYx+xN1NqBiWZEPU5b1GZDSFCdyIOBcKENlHUfdOPQVVUwUbtotOLgHKtKr
lpCUo8xy8TZuhsRE0Ka2JGR9qX5ApwlhMU7UQcRigrMifhXj5V0bpUB7irBttrEJav0XC5YHEQAP
d15QyuDUO3PptXX4ZcEu/BSu3ASLZBAGLktT1q1zdofiJGLThRxJqji2KXNC8RcrI2C+hvdGSjeA
8QacdhzCnYkXMCTHbpPotfMWokx9rAt2hWuS5CyIW4y0eYB4tsqLzuRlrnEfdXA4l6kBLl7sWpp0
nSOxCJA86dbh+xaLcZRu2Y6LhOEvaiR60L0S9ubRuxOcX+tRv8vMThIO4y3HovWBSRi8DvCIkWnA
1B2hWJXLsY3iNJiSASRT/hxzGz8Ne/YKvf6kv2j88ESOq/LwwGKlemSZTLoTNmQt5FloYxbI06aJ
pUO1btvQ5RkM0BqGo+yMULtwl8ghGDR1etRMsSK/IJQkOJuE71K1cDGJp00WuQ08pbPHc2n3Qhzt
m9ajaDR8sgg91aqM+VvQtwniWqPoK87nuYlezMRQE7yXQEpRtQiGiMPUF5XI2yTKgnMV/wAMQhzX
9RkblzEWzgO1AAMBgAnNAMVLlLDwt2nNy8SwYI+WfgWuG3vbPrDl5ykbMOMQGDrQQ2mjIzmWAQvX
/CPu7eAG89LmgGazFqJ/WTCgHyEzl3DaUbtyt25UvkMgp8w2q5LhtR2yKm9wk3HfvTyNVblckZW4
0EHLVGxfiI0lcxGQ6g5e2WuTxOwIQjgF5Y+5tHjPvS2dQWofeXKDdvQ5W2XtwOq/KOEj7pQF46rp
HBYhj/cFKVqMLdp+COlyBvJQHN6THOjH6FG/ZLwl9HVM5YRDlCEw8Wcs4EYfaShGIaMQwHXlclhE
Izn95c4pfkUrp8Nvhh25nrGcsAjdmeKfFIqd+XiuHh3RGHo5TPsh2QMvHMmUu0qU8wKdqjHNnPae
o5HDZFD+cei7dyfRHsj/AH9EOXHtF5H80IAYDpv83jC38O39rdJkcAHRuSxuEnu68iMSGHaVCGwV
U4xLTvHQOw4p8oD6T1C2QdAe8CD2qVveiTkmE/LcgABiST2qUZyMm9o0Hcn1AuWDZlCFs8dz/lGZ
KMbcR5UKQJxJzkowmMcZEswUrsItGAMbMTnvVoymxjxS7SoyZzHB9u1UAGZ7etXpPYrUdw60z3dD
KAzAHXKgOxAdDKtOpqx2IkxbsRm3AdvQNYZxR1jU4IMGIVuXvUI7kLcvChKhEsEJgMRmNq46oEIn
bigBgOvcJqRUdyi+ThuvI5xIKB2IbJRPTVRIPCaHpF6OAxW0HNAn2Sx7CotQEu6HS+8N3qMWAbPo
nfjzJt8wBIysyArICjdpZVQIyQkMJBOnBYhWbV+Wq5y4kBczlEtircT4rbxPrp14XhQ2i5O40ZPk
c+iUc2cdoULnM/dBp3M6xrnvV+/y8hHlrsyYW5x8X59MCviW4y/RJH0EKdo/AvHxPHTUbSKKPPeY
C0oQiQWpI11bmRtcvMczzM4vZFsiUe2RBUf/AOIQhKxMt5kPZO9OOvWqlc5e0IzmXfFuzqc1V6xP
/LHo18tcIHtQNYntCFjmQLPMmgbwy7OjmbcPCZaux0DGRDVFc1+H/qMpzrwXX1MNhQ8vmrZ1YDUA
fpTjA/8ACinpeY/lr37BV7/Ul9Z+djOZAiMSVK3yukzwEsWRvX5GU5YkoX7o+Bb25lCIiGFGRuWo
iNwbEYXYtsPU0yWiyRCzGty7LCPYMyvJvRNyxjbvWwTEjfsK8uAJmaNsXm3y8zhH8qfE5bAvxF3w
W8Bk/wAiN22Guxr2owkGkKEdALaOWgeO5t/Nio8n/T/6bc0WwwuaTKHb8MH61+K/q1ydr3LZYMN0
clHlIXDcuPUPqn9CJtRHKWP+rd8bbhkjy/JCXM3iePmLlfUhOfHfOMsh2dEr12WmEA5Ku/8A8P5a
VvlQCJczIZfm5KUYyIE/Exx7evckJCIjFyDmvxUC4iOIDFC/eiABW3A49/SeXnzEISiHkZEADvR5
azceET4sBPsQjEMBh8hco3ZD4cKWxtO1GUqABytNp48vYeMAfaL1l0xbF6LXPw0Ee4dJuSywG07E
bt3724XO4bELdv7ydI7t6EI5YnaekzkaBG6YmfMXqQbCMTgaqV0tK/OluOGqZz7Ap8xzEzcuTLyk
ejTDHahyd+YPLyfUcokB0bvLy8y2MZDBOKjp4CJRiQIgkgGfdihqrclWZ37PQC0fBb4pbzsWmH3l
w6YDtUYDIfT1hbFYW6z7cgo2B7dZbohADAUHo7dgYE6p/ox/L0WrIwfXPsGH09QzOSD+KXFLtKlI
Y4DtKjDYK9qcqfMHPhh2DpnIB5kaYjeVCJ8UuKXaekW4lpXDpQiMg3Xt2tp1HsHRCxEvCxHi/SKM
yKzL93U1EPE0TZO8ewqRCaEo6o1kDkjdgak6LROZPiko29Ra2OzVI5lS1UEaQOQfEjerhJ8QYHNR
txyGKqH7VTqN1arHoEdpAUQMh1jvl0RG0hAbB0N1ShuIWL9BkcvrTnDqMzk7VWIc4rSNq1SHCKoE
Ui1SsOFmBQl7MauoiQ16S+KJjWBQtOz4dqNu6PoQlE8JQB4DvXDIHs6r9DKUdoKlE5EI9a5D3olv
V0Wpdx+rpDI1aTOO6qjLaAiY+IYLRcFTRu1S96BeoxCFMaSGzJGBq2CiDmPpCrn0dyB6IXW4ZDSe
5UCqjH2o1H29MuzBcp5hpd1C8I4hjQlW7lmD/wBMvSjajKQImJyHiY5dYxOBDIQOMOE9M/6fbkPK
M9cyMYxoWQhENGIYAdBhBpW5xNub1BkA/wDcrnnTEbUQ4hNjAy9mhYYqfM3SCZhrcc4h1dhiWcdo
qrUjiBpP+Et6Q378ZC6QAZQLO21f9tfnA7JtIfQy4uYg24FRu3Lk7lyBcYAOOjmRIkkTNTj0i3ai
ZSlgArNrmS92Mavj2f2l3/JX+e+Y/lr37BV7/Ul9Z+ddV+bHKIxK8u09uz7oz7VxY7VCxCoJ4jsC
hZtBhEV6TGcRqyKPCZW8iOp5nL3Ty9o7aBuxS5Y3zfuChkBREjjvH6E8nJO9DmebhqlLwxlgFphE
RGwBvkRHs5lGNiOiBDk5E5qPL2gTAVuTGER2qNu9f8izbHw4Wyx7ahye5SNqJuRfg81wWUviQtQ/
6dpxI99frQiOQlO+P3komUnP5zJuY/7exnDB0LdmNc5HE9NTw6g4VrlbUgLhYSgMgOrqESYjNqIX
LwMLI20MuxShbGmIAYBRlcjKYkGaKib0CLc6x1O1d2CmAB5JAMD29Gi2Wu3uGJ2DaiX8WJzKPN3I
fEkWgTkN3yIWYFs5ndsTkiMIipOAAV6zysjb5aAOqYoZ9+xVTAOUfMOmI9bqoeAwJVuFh2gGkd+f
TrI+DaPA+ctqMpYCpRvy8U/CNg6hmf8A8ezUfnSUr11nJ4AKAALl3JbyywyFcuiqEOXAIAbURVH8
LaNyQGqQFAzr8La/p+hxpiQGjHJ9itQ5g6rwjxkbejyRQyHHLDTFecXMIuLYP7Q9AZPXLtTz8cqy
KN3GFt4w7cz1jLE5DejKRrLimSpcxLG54Qcoj0bq5fPtlo/ox6J3spHTHsHUt2BgOOfYMOi3bxjD
jl9nRoHinwhRgMAOm1yuo6bZ1yiMD2pul8rQ+n0Fy5lFoRV2/L2IkjtyUpyrO9Kp7UIjABuoc2yU
Z+68T3IyOJUxqaNwvIjxHc6iNAIt+F8k/pi+DfSmGB6LcdsvqR9SbqFAb+i2N49BTvK1SaJOZWoT
EhuQBTdV8x0aZCi0xHQxDhNEaRsTROK0yqEL0QzCgydOzTGBQhItMCmx0HqXqAgR4TUFRnt6KdFV
ToZXbe1/oKHWCnD3ZEeoojYUDtAPQ+ajLJ2KMcgS3YejhHhqq4ihitUK27lOwoiQrEMSoj2sXRBq
Bh0HcpR7wsFMYmPEO5P6ugS2ZI+7Ko71RNtDFWfw5J5i7MW7gd9MgXduxeVOZuStSgRLY0mCjdv3
4wBAID1Kjy+mduMjpjdk2lztrROOpIezcDjtA6DPGZpCO0q3zNxzemWuyf3ugWrX396kGy2lcrOz
F52rg1ECpcuV5ca8ny5eWyc/tboL4NXsXMWoF427nD+iR/d8hu81yxtyhdL6SSCPoQPMXIW45gHU
UDcErshjqNPUE3L2YW94FfX/AMNuY/lr37BV7/Ul9Z+dKYqXMwkbg2bAtMqHNRt2Q8jicgFq+8vy
8Uz9nVMLkQQdq+H4ZZI3J8Mcjt7EOZla1D2AfrQs3pixbIYRiA7Ixs8Il4pZlOalDmL0dNmNQD7R
QADAUHyFzQZlDkv6f8W/OhnGoHYv+8uGUpV0uXD7UTO3GUsjIAqcbV0QjckZHQOKuS1Sec85z4j9
PoIBneS8wYjEDpfoHLSiHBcFbAFPTtaqs2uXGq6ZDXLI7lHk7dZwgJSIDgHeVc/GTFvyQ7yzG7aj
Z5R7HLv4geOXfko2yZ8xcNIxrIshe58aYCotA1PahGAEYigA+QuCAZUBkWDoyBFyeJ05ojmRKxYB
+6AIf9IqcAfLtkNE7TuVA68y5VskJNXJG5e1R5eOBFNRUbNmOmEcB0DlbR451nL3YoQjQCi0D7u3
WR2nqfhrRacxxSHsxQs2/urLVFATn29F22T8Pl+CA2U4vp6DOMXiMSo+/KpOxRHLW/MFxrcgxYDa
ThTpMhWXsjaV5ZHHKtyZYsPdQiMBQegEG4LdTvK0w+8ucMe9CAwH1oCcgBtJAT25CQODF1XpDH4d
qpG2SjYjjMvLdEIAUAw9Hoj4rh0jvQiMIhkW8UuGPaVGA9kN0uclO+cbhp+iME5wU75xuGnYKDoJ
xhboO3plcOEQ6uczMVJYUr0mWwOjOXimXJ68pnIIE4y4j3q3ysTW4Xl2BB/DbD956X6DtZC2cTxS
7+vTodnUCDSQqNnoWjU0YBfFOndmtdtyYYOq0OfVkXqyiN3RDdVdnWYYrSKyOATyLlGRPiwCtgmp
T7fRYJ3IQJNRmEwoBhtVU4r0mMg4KEBhl1CS5QlDwFnCfb0SHvE/SEOmnUl+c0vWpDaH9Sjucerp
7CChWooQjHPEK4BSbU7l5kCzjDsQJDTBAkBgd6lB8Qok0Iogc5dDbV9HQQcwylbw0lh0tnD6lVGe
JoIjeV5nMmOm28pbHIZXI8vM+TCkonM5FSjKRIiWAJw6J8relqu8swjI4mBw9TdSNwB5QIIAxO1C
YPCarz5D4FqloHM5lThmQW7ULszSAaXaEecvCsqW4+7FR5bl637r9sYjEq15ew6v0nquGg2lDkeU
AMph7tx20wf7VPl4xNuF62CAdsf/AEf+D7/MlevzH8te/YKvf6kvrPzqYyDg5LXYPlz9QWjxXD4p
dcG9bEyMHUL1wgWbfhsxDBGRaFuAUrnsCkBuQUb/ADQMbQrGOZQjANEUAHyL8NYj5jfeS9kDYpcz
oj50tgpHs9JZicXJZaTgcURliOpCeT1VK3JjgA+tEDxS8UlbkZaADWRyVyHI2/MvTLyvXa/Ujdvy
cyyFB6kIRIG+RZRuyi/M3A8pnIbB8iNydThCO0rz7s3I8MfZj2BExukvkaj1KJGibni1A1WvmZBo
+CEQ0R3LZuQjEE7sVEXYShZFZSIZ9yjbtjTCIYAdGo1JpEbSUbk/vblZH7ELcfvJ0G7ehEV2nf0f
HvwEvcBeXqCNvlw5AfVMtEdrK6BGJnPGYf6HQnehqnVxE1xorvMzkOXFp9cZSDtkyv8AMWjLy7kz
IGWKbAbVG3GgCEYh5FgAMyVC1IDzDxTI2yr0GUiwGK/EGjkxsQONMyFxVuSrOW09d8WNQEXkBIey
9UZTIEayJdEWbUrvkgiM6adRU4WpC1AeI26EDtQPmSl+kSVp1+TKIoRR/Uo8pzJEoz4YTzfGpPQZ
CssIjeU8qHGZ3qXMSFZlojZEekJxjaDD9I49EY+zbGo9pw6gtR8Vwt3ZoRjhEMFoHiuHQO9CIwAZ
GR7kNsqnv6Y2R7XFIbgoxAZw57+mNp6zP0IRGAp142s7kgO5ADAKYBeFkae/NG4RWZcdgW/pZRhk
T9ARl6Gh7VXDL0Do3MWyRkMSVpmaSRGXVfatMaksAh4a5OtVxiGo3XMzkEZbcOhohxvQuTGHo9IA
EszmnJWuXtFgFuNQmmKHMIGJeOz0DFYdyMdhQOO0BQmAWJFeluh+m3PaDF+wqO+ilHKJB9fT+kCO
9PmgRlVPkT9aoWFS31qhpNx2bFx1yJRYtMOWQOEo1LKO0gFPmjIZh0D0CYHjFe0dIJwzG5FvCajs
QhCRDmgBzXkiZ0wAia49qncBad46Y9gRkMJgS9Y6IwmWhzI8snYcY/T1IS5q3Kdq5w6ogHSewspR
/p/NxjK5UxJ4t40yaQdR5ZxGYFIuOifJxk1nVrm2BljpUrs6W4DAfQAp89zA+Le8MT7EcGCmZxke
SmXjMCkCVO750xdZrUbZIjqPhpmp37U5X+ZuiPnCZ1CWn3cCrb2hIwMLRhKrGY4iOx1etw8MCMN4
f/hB2fNPMU/8a9+wVe/1JfWf7AfgLEv9Qj6kN+CjzPNigrC2ftTAMBgPkWgS0xOLYrRbAjEelhby
jF/X0fnxq6ZF+gCOKbUZSZiep8efl2hWUs+wK3K3a1QhIaYispNmrfLjl7tq7NuK4BEfWfkJuTIj
CIckoz/dxpAbtvTplsXhJG0BeEvsZQuaXuzDykcezqeefurdLYyJzKMjgEb02D+HcEbPLn8VzI9m
B4In86X5ETdvGuFm0TC2O1qlapF5HADBQsWjI3pVvyo36I6SASxxCaVQgImpoBmhcnytzSQ4IGr6
nR5vmLZiLdLQmGJO1j0+SC1i3W/IbvZQ5kxaIDWhuwduu+YQsWIxEsDVy+8BRlcvfGuB527bkxhi
HKI8y5K2z1k4+kqXNczMW4yDxD1PcVITrCYIl0ACpUZ3jW1EzAG3DoJ/d2aDfL+5RsRNZ+L9EIAY
DD0cp7BTtQfxS4pdpRJwCN0hjcL92XUnc9m3wR7c+h8rQ/5pdELWMfFLpcrWKCUmG0xj1JH2bYYd
p9A/s2h9JV267aIkol+K6ce1COQDKnSTsU7mzhCC1Ma4FPt6zs6EhRvEiGcGqDYHIp+tvK124HSA
zs4XHjsQIyT9RkBtKDZSTmiIyHXO006BMhwcNnegLk3l7sVwx9ZUrYwGCcdR+iibEoSBWFUAoW4+
zVCYxAfuW5ByQ2AyKcHu9BREhR4dVyWL5IaDpnmDUfSmNDs64kPYl9BQOwuiMpR+qq3dAKlHeehz
iFIAgmND2qUT4RgEYyqDR1auSIaQIKoXhNwVERwFOjsQEsMOjUzmBfuVAWVQejQfEKg7kbkvDaGo
9uS2ykfpKhy8fDZDd5qVPm9RhOENNmPvSiM0YyDGJYg7QhKJaUSCDvVjmneU4tP9KNJdJsXgDCdC
+W9Su8zEysu0L9kvpLtDWCzOhMXZmQ8JcuEeX5vjuUFu9sy41DmeSjG+ARO9iZHSdWqKjoBjythp
XIS9qZ27WQlINEYDajEgGOYIoyt83Ev/AEyzMR0DaYsS2wHBS5u8cBwgYyJwAVzmhKcucuTeUIg8
LnUJvlXBQjqlPXHTOUi5Mhgf+DDdR1T5s5j+WvfsFXv9SX1n5/PK8sRPmJUJygjckdUpGu8lR5rm
ovI1hA5Ld8w278YkvSR6HCE4lxL6Cm2oBARBNw5oylIR2A4k9iYhkIRDylQKN7nbpc1ELf2khARs
ibZ3OL60eZNm3bMcDGIEidgR/qPMDTHC1A7PkBncOmMaklGunlweGG3ees4xVyBkNMW0ua1THEdA
5a3ia3JbAhGNAKBfh7DX+Zzg/DH9JvqRs3LotWpeKFoaQdxOKx6POYSkzRjKuOaJOJqeq4LEYFR5
L+oXCJxPBdmcXyJKBiQYnAjDoFq399cpHdvUeWtnhidV2e0oAUAw650xM5YCOGKlc5qxaInJxoHF
KR9+ckbQFqUIexai0dR2kry7EfKfxafqWq7MybAHAdiIFAM00RUYrUayxV7nZUFw6IDNo4n1po+O
XDEbygDkHkd+alekKypH9H0kLNSBxy2MMOjRHGZbuzQiMAG6ZTGLcI3qMM8Sd5RmcIgkrVLxTJke
/BOclO77xIHYOkiNZS4R3qUmpAaR9vTKZyC1mkpnUe/rk7FK4QxmSVoBY3ZCLbRmo5i2HPai/d1H
wzVuJxlxHvQjkKk7k0fAMEB1XiGgMZZLSA5WuIDHFUJrkEBiUI57OsNVBsQjF47jgoXIVEg7oDey
A6o3AoyOQJ6DXrncxUYoRhQmiclyt61DEL6wnHW1DFF8SqB0AVLcjDY49fQ64sZBh3oiOQD9vWfo
DYk4JziMkZbcELcsR4Tt3J/X1roGLavVXotyywPfTpIQ/OH1URBxGCDqVyMmEg5AzKEohyPEN21a
L3hlR8GfAoQPisyafZkVCcKsQShMZ9BCqgDkpQOBDIioaixBTxDbDkt4Qhp0m4QZbSBgvMPhtDUe
7BCN0Hyg87sgo2bMRC3ANEBR56zFrXMnjGQuY/T0X+QkcGu2x9Evs6k+XF06JNwSrGhcb8lxA23z
PFA9iE4QeQxMaghXoTkR5jRgDlHCWlRnydbBqZZylv7EwwUeVsH41zEjGMdqld5vT5UIsQQ4/RAz
JQtxlKcTOcrNicjKFmBOBGQAoELceKRrOZxkVTEVHchLb/wqPpK/IuY/lr37BV7/AFJfWfn2V6/I
RhGtVK1yp8ixgDHxEdq1kvI4kmpQv3vBCsY7Uww+YjC9ATG9SjENalWHQBIAvSq8zzowt5knBARv
i62OnBcFoE4apIQstO/IcUhhFSlI6pnB071Vu1C6TGRAIkSQyFy5ISmRwwGMihznP8NoVt2ckBEM
BgB8gn5g1RiCSOxAW6A49nXE7cjFRm4F1qxeqMqajSIOZRMy92Z1TO/Yrl2Ba/c4LQ3nPuRnM6pS
JJJzJ6ST4RUrdkOuF+AmSZwjqjI57VK5LCI9a/EShq5i7SEfdistRrIjb6DhLFGPMc35fL2H1aKB
+0vVEclr8gYG4QSTmadDBMRXNMBnirdgeK7IRH5VOyLhuCctVQzI3T4bXDHec1GyMZ4tkAgBgKej
JOAU7uJmadgwXhKOLWwzbyvM5iQhDaSy02LokdxT4jaoW/Zt8cu3IdELIrrPF+iMUy0w8UzpCERg
Oho3AZDECqM4TB0CkTtQicc229MLUcZmvYEAMB1yB4pUHehHYFb5cHhsxeX6UlK4fbL9w6sYAO7A
smwagBRAxl9SHVLByowNI7EZYE/Upylg9ECMQpyliBwrVv6rZBao4DBOcUYn2DTvRGQLodNOiQ2R
Urx8Uiw7AnRlk5A7uvKJzDLScRio7AT0m5OkMsnQEe9gVK2DULixVCFQ9GztVcVqyyGawMTm6jMM
G+lTneGqJiNI3p4fd3A4GwoylQE0CIlMDcnBfejI49crTKTRi79qcgSgcJRxWqOGe0JxlghPbiN4
6xicJAj1oxORY9yAB8KB2h+jeoywYshpNSgSeI1ZODghejWBGmQUC2mX/UG5arcn8yAEiMyMCp2b
gqMO/JRjsFOnvZdwPQRGQD1Ypw0lp0kbDj9SN/mZxjGHEYDxS7kZswwA2ALS3Fd4pbojB1PlYxEO
Ztkymc5xJoe7ov2oh7kB5lvthX6uizzIrGJa4NsJUKjcgXhMCUSMweoYziJROIIdGfJy0nO3Lwnc
Ni8nmYG3KNADh/hQfVc5W4fi2X/5o5P9ajc5aYuzu/d2x4n3jJXOe56bXZ1uSP0RiFASjQluX5c1
EcjK4NrJhxXZVuTOJPSeRkCJmOuBbhI/tfX5+p8r5j+XvfsFXv05fX8+TvQjqlEOAV/3MyQDSApE
IECpUYxGqRNAo+d97MORs+ZJGI+JCsUYyoYlioxzNVU9LQiZHYA6eQFuO2VPoQF29ql7sULPKWfi
ZzzHaUZX7nnXIYDEOqfIbsZHSDEgntT3YSjD2ZkHSew9faUDkQX7EeYmGhGluJ+tGRwCjzQFLc9W
kY6cH+np1iEtDtqY6X7VoGAx7fQ37x8Nq23fI/3I3ZH/ALazXtkFLmbhpJhbjsHoZcvyhE7wHHP2
Ybu1C1aMbb1kMyn6DIqNu2CZSLRG0oEzhrIfTX61cv8AMwjqA02mq20rREtOdIsqkCNsPImnerl2
zcFwijjIekFuPimW7kIgANgjKRYCpU+YuRIxIJGOxarktNofd2g7Ab96MrUzHaha5ppWRQkB5fWv
OkGle42OIBwHRO7lHgj9vQZ+zaDDtPR+F5ZtbPcn7u5ebYuOZYxrpKnaMXul+MbSpxuvwM5O09Mr
juIcI2eghDKPEQpTlhEEnuV68cb0zp7HohAZBuo6aXjOQxUZgMWquzqlExJbYnK0DOiG+qwLbVqB
7Ue3qMESADEVLYpuiROBRl7U693Wn6lb3hz3qZ2AqO9z6+u6HMQy8Y+1UzVQyA2laIZYdqczKeRc
7SmeiZvUgJOHwWJTCp9ajcuROknE4LhIi2IWo13FUOOWxMfEEZHFmigJE1z2BUBIBqTmpMNIyC+t
OC/V3Bb8kTE+LFVkjbIEb0B6wtORwUrZzqO5EdacgKSLgplGW5vVTodMMHdCY4oihYuoz3MUYCmo
Mo2YhosX7k0PZqhCVQDQrYSEbc/FHA7QmzGI6O9DsHRp5bT5sAZAEsSBkiPw0xKPicMO4lPPl7g2
8JI+h1xAwuRwcEdxU7s4G7eieG17PauP728dUxsAwCt81AajCk4+9E4hSvWhL8dlYkMwduDMrfNQ
DC4CJRORFCFesgNbkfMtfoyr9GHRG3Ivc5Ym2f0cY/R1tN+3GYycOhDy20+ExoVC9YlLTZY25wFA
3vBW5XxK8IUhbjEiAOGo5LzbjT5iXini24dTlucjcFqXLS4pnDScin2/8Ea/IX9E/wAr5j+WvfsF
Xv8AUl9Z+fJW5YSDJwPhzLg5BRhp1TlghduRjK9Krth8ykHNfDP3tSAhE3NNwepPDSY+8SwX/ccx
ANlGpXBbN07ZYLTahG1HcFxzJ3PRNbo1ZSyiNqPL8qaH7y7nI/kUZWQ8yQAFbneDXJRBkBt+Qi1K
QFqPFeJyAwCiIRMLMPA4Yy39Wqoqo27Z0wtsbkh+yhGIYDBaB93DxHInYpWbgeEwxG5E8vzAjaJw
mHI9Sib+rmLkfFqLRJ/RCh/TeTjG3Y5VxKMABHUccNnop2rJ+PzEybkh7EI0CHKwHwbTaztKYegP
9P5WRif31wY19kK3CETCUazGT7d5QnAOY0I3dADFsygA7HIo3Jj4kI/DB2nE9M78vBHht9gxKuxh
OQ5YEREQSInTmQidBiZTJ1H2mpT0ki3DAsDvQwKlytg6YCk5DEnYo2hIyG8unlgtg3qJhESqIxfM
ktRAbAApSOWCjDvJ7aqUsSMO1BxxGsu1XeYP7uJl6lOciXuEyk6ndNDhHtRkaryoloSiXjlTolI5
BAnGVT6C5d2nSOwK4AWnd4I9+KA9m2HPb1XxkcFKc5cW9CILgAuj1Sc8lUd6E4lxmpSuDUfZfAIQ
HhitNBM7UYmsZJ4GiqAmJpuT6q7EZzwZmUoSHiiWKICYd6EfZCr0bupLtUQNgVw7mURu9AxqEZ26
2sxmE9G7HChKDavaZcKwqgMHTjxAOnNNiiQcE0s8FF1KNKMyx6HBVcQnJMiMHwCaJrmU4GOC05YJ
wmGKY4hN0Ph2o2oYsSCcERNxLacFpkGPRG7A1ifoRnHxRaXcokIHq1TZIgYGoUo+6fr6JDvCc4Kc
ASYA4HYUY5AquLLVLEEkd6aAYmiuGbFni28KOqjvp71CUaEUPYhOOINQu1OuPAVXcrN0+HVpl2Sp
0tKAl2h1O6bRefsgsATmFf508UBeEYDH4ctv1KgIOSEeXtSntkAWHerXKms4uZ/pSquUugfEeUTL
80MW6B5hblr3DcGzZLuQnbOqEg8ZDA+goG6srTO7EgZsQVGJBDYA4gPT/g7T5q5j+WvfsFXv05fW
fn0xI4sjvQu80xMfBEfb8zG9zNyNq2M5Fn7FK7ENbHDb7BmUTZtTkHoYxJH0LRG3djE5SJiPpT3J
24dsiT9AVLtoja5/IpXNduYgHIBL91ERfn5Yj4h7XcF5HLR8uzn70v0j0CUSxGBVuciSRQk7vkFz
mLmQaMc5SOAClzvOyPkykZ6JUjI+8VcNqQlCPDERwYdHb01yQOeQQArOdIhGyZf9xc45PRREYnVd
OmJZaDjm+L9JmJCN648bTh6tsClcuHVORJkd59CLYIiDjI4AbVahYhEXpwDkUFc6oDE5n0EpnCIJ
Pcr/ADUpBtRmDI1JJ4QjOeJRWtu4rhiB3I0QuWpGMo4EITl95Gk2QsxrK5TuzUoigjA17lDl7VZX
JY7BmVCzbDRgBEd3o5SxIw7VGJNTWR3lfh7Om9zMgRpBB0b5soW+fi8ydUyIhyxfHFTu8nZNiywE
Y0BLZtvRMSzZZp5lxvVg27ZnbtyJkQKBxn0RtvSPEfs6I2hXTxS7ui/aBbVA/UvL3soWIQYRqSE0
cc1MXYxNycCLZliOzohZGMy57B6CR3UUY7vrVvlgeGzHVIbypXTjM07B1CSjI5rzLs9UpYRihKB4
TQg70xpkeo5TgPsZRiw1kcToDCMsEYYVRl6lqNXyKdExmABgCjAY7GWmRLhV6DM4BTnvYOtMaDMp
hgOpToLLiwJQAllRaYycyIFFGLUbH0FU4+lU+HPdQKoMhtFVUPHYtTgbmRlaidLrQPEaFk+kgbTR
NKYPZVapyNME8JsRtRjcHeME4rHpM5niOCnGJ4AaJ1GzMAEeEhUFJB+/oA30K823wzjSYFH3oyg5
EcUz9GqBYoeY04ZuoX7VJAAtu2dOmXu1UdgVMOinS3RCeWCnHaH9XSUS3CaE9iBHhlTvQOS1AdqZ
XSGaZ1AbCmHigQW+tCMt/wBCp0diIl2INlTos3czECX6QoepzNmETKekSjEVJMSChGPLXIg+1KJi
B61a5aIAMIgSIzOfQbILXrZ12j+cBge1St3QYyiWlFAD1LyS12x7MZez2K5Y5gvC6DK2B7Jjl6v7
fN861+YeY/lr37BV79OX1n+0ZuyrdlS3DaV5nM3DJvBD2Y9gULMKQFZyNAAo2rcQIxDdQg4KXN8n
FpisoAYoxkGkCxHREaTxGm9W7MqSIeQ3n5BcHNjXY5SIMLWIJIdyFDlrPweTA4YRLa8nIGSiX4Xq
EJRLg59R0btw0GQQv3RpEiI2onIP4kebiNUoQbU9KCpUuatRkWJjy0J1hAScBo4alE35arpA1nf0
XLouCF+Q02RmZbgo3OYnq0DTEZBYehtcsxlqa5fYUjajxF+1G7J9MgBCJwAGz0JEvCRXsVzmrMxD
loyJjGbmUzsDU+hduXUZVKtjWY2ZlpxdgaUdG+7xkwh2bkbNsgTvAjuzV3mrgr93b/8AqPpIwJaF
seZcJoGGDo8vyAErEaTvZzP5m7ep8wR5YuHhjiWO0nMp/F2obT9SEhXcM1ZvxtvbvkCMhk+1C1bF
TWctp6J3WpItHsCMjgKo3JRDzL4ZKV7mZiEYB9OZ7Arpsxly3JF4jDXNsXOQWsjgFYnFOiSF+NlE
G7cPATjEDondIpDhi/oIQZwS57kZGkYgk9gV689bs2j2YBRjsADdQR2rXeOi3jvKaA0wHhG5AyyQ
nbx9oLTMEFUcrYiRxHYE4kQXLrU7nagJFpxwKrJ2xKIkWAwCkI4DoOxqFSunOgTDDPuRApXBPJ32
vReXCu9PIkDZmmwGxU6G6kjuUjcv+TciWgKMVO/5kZ27YcirsrfaOtu6hE8xiiBgMCqFcUQe5P5Y
TACI2CicY5L4hdsk0Qy2qrKiqHCe2WOwoA0ZAPiEXxfoDZVUScQeiMo8RxYZIvQkFAPhUnaiRtQJ
PGfUExy6GlgIlGcSKFmQMqnYvLHjlRhkFrn4pfUn29Yz2LSCzsQSoxlU4U39QxOA+1NbZ41DBigC
eGVD2qiEo4S+gqmKjcY8Q0lC9HGJqENu1P0OKMgdo6L3Lk1gROPZL/29DYvWwI+bAiZFHMT/AHqq
Fy3b0WyxErnC/YpcxflGd4jTHTURBxx/4Fdnyhvmeno+Y/lr37BV79OX1n+0Ur16QjbgHJKlcFLU
aWxu6BzvMEgT8Ft2Haeq5LAYlT5H+nH825eH1RT5nahK4X/NCjznMxY/uoHLf8hMuRiNekCQ958H
7FK9fOqciXIwBfDoAJeGYWq2a5xz6canJQt2ogRjUnaUZaQ8A7qM+fun8INUhYctM41jsRnC3G3Y
5eohEMBkO/plBmtWHhAfWU6r6A3ZVEcI7Sp8xYaWHmQk7SjsooTLPIA0wr6GNmHju0O6OZVvk+XL
2bGJGBl/cm6mwDFPlkrcy2gTAOr86lFqPDCA+gJ7bxrotAHaWVqyPYiATtOfoyTQBC3AtPnpESIx
8uOI7+iETSMWoEbVoVwJTyLlAyDxeo2hcvc5QaeWswAjH85m+joIHinwjvQgMIhlGzE8Vwt3BaqG
TUGQbap3JXDcc5UiBuQt6NHLxiIxhGjNme1RbCRUbVkxjIhyTh9C0i/Buwq3YBfywz7VKQxAooiX
iNT2+gnLKNArgBad3gj34oRytBz29QlOaxjihbB4Y5IAYla7jOciomywI8TYLihVeYIsDhk691TY
cbVOdVSgOXR5szxSDgKVXCiY7VKWZKAkUwjw5JowJ7AhKQI2utQnpfJNO5KQ2DBfDiBvKGqoqqY9
crXOhMyQ2NFehOQAMCA/YoDZXooq9Z1wYsx7Ohj3FbejYmGO1b9qJKYdGFUXYpyGO5VrvGKZ3bDa
iS4O0LGnYuGj4yK0/WtLcQoyErktI2BMxK02yI7ty4osNqAOYx3oymHgSWOSyG9aIGmZRA8JWm3G
q8y4dc9mPVx6Zx2gq3I4s3qordwZ5/ol0+3oBWrPAqUiXi+1McJ4dqbNNsT4rQcck0+ISx7EDD2U
+R6S9dJp3oHarRPhuA25d+HoRHmrUboj4dQdnQuQ5WGoYOH+tARDAYAfO7Z7P+BlOnd6fmP5a9+w
Ve/Tl9Z/tEbF4nQchRG5yp82A9g+L6Fb5aVoi4ZcQkMAFC3GgiAOrL+n8nJhhemP2Qn6BzXMBrMS
8In2imAYDD5DcmD8QgCPayEJBwfE61RrA/R0CVuRBCMbgacRiESCCoueDJB8cVfka3bxER2AqFiz
F5GkQMhmShZjWZrcn70uid+/IQtWwTKRV27C2bVucibZP1laZhgMDkeo46jLTHwjAK7KctItNqA8
Uny7EIxoIhh6CfM33Fu2Hkwcq/HlXt25gQEz4hAY+tGWWAWPQUSiJnELvUZDGJBUbFk8JAM2NSVC
UqwtfEl3eH0gtRIBuFq7M2Vjk7BEhy0SJkZSk3D3N1REZlWuSANu7AEkSDanLuEb1+Yt244yKFqN
o+T+6OdBWUui/wA3cYW7IoezFXLVotakayL13Ju6v1SQjVs3x70wNI00nYrF0lh4SO1P0W7T56iO
z0BlsCD4mpUOXB4bEXl+kVK4fbP0DqE7wpS3n6FKRxJWs10j6VKvAKAKUo0EcStJwBqom4SIxwAR
aJZ6UWltMmYg5piKIMgWerKmJWo4nAKJiHc1QJrL6AqOexMQR2rirvXCRIZAqtsqkD6l+cfQABCJ
FHopE8QlEgbApxOMCR9PonCwVQsCqBgqesr7eghVwyKcGuaAR2LBULJxVMqhcVFqjWIxKN0ipzW1
ZJgQTsCp6k7aCcdiqxifUnMPVRUifWn0knfVMKbhRcQZsfQVVy3/ANOZA7CoyzjIP30VuW4P3U6G
2ptqMThJGDO1QhtGKLIAihQD4O3YnxGfYUwLwlg+IKZUTSzTZSp6kNsVw0lEiQ7Qrd6OFyIl6x/Y
F/7H0/sFX0vMfy179gq9+nL6z/aQ3IwiJnGTB/X1TGB+Pd4YDZvRkS8jUkqqHMXxp5eOAPtoQgAI
igA+RQ5qJlJy0o5BkSmNYnIrXbqM459EpEMAMVWTbSKLgBmRUkOQBvQCjBntwLRiMZyK8y5EDmLg
GrcPd6eVtXSfw0rmq9GPtCLUT8vEW7RAFuAGloRDDBCF6r+ta7J1w2ZhMelxj0MMVoo58W3sQfB1
e5xtMb5EYDIxg9fQn+l8kDIwHm81LCMYioiSmPFM4smIZCqoiehgHJphVRldiwngMx2jokTgvOuB
r1/iIzEfZHpL8OSuvatjRCWIBbiIUjMvIlycXJ6tu1EPK7ONsf4jX6FGXLtb5i0AIEUBAyLKNnmC
0eXoIjOWBJXM8zzVzQLcYxjHGRdyaNuWuBrcAEP8SvwgWIiNR21DphgKV+qX2L6K/sy+xSm24GWX
auGJJxaIchW5zhciNQdwBH6VG7gXIbs6J3CKYR7vQRh7xUrkqRtxJPcrl0+K9It3qMB7IbqTiMcV
KIrIGg2goyLxerZKXeHRnKgdF8Zl2RnPgicjj6kwqdpxTyiFgEdB7ihQs9UwAlE5FaiB2BOtOAFS
ny2JhFjkgTjtTHFAA4qpWKoOuyY5UCtxFGiPWndcwAK6yzqtPQU6KhYJgKquGZVMNnUoFWnoNyYU
ByUbYzyRizSGIKzQJcDMjFBpah7JzTTwKIB7lWhThj2LYok4CpKMp+1gN3obsMpxEu8UUw1Gf1VQ
Hun6DXo7EUJ4sjKTUepRNoiocgKtYnFAjtUZHaQtJ7EA9cHT5pk6JkGMJVXauxG0fFZkR/hlxD/i
Q/yN+pzH8te/YKvfpy+s/wBqJ3rp0wgHJUr0qQwhHZFYKPM84NNrGNs4y7UIwGmIoAPkZjOIkDiC
HRvcpEW7w9kBgdylbuxMZxLEFOE9sgS91fEkInPaoxsxaxA1lLB9pX4blA4/eXfeO5CzYgTKWJyA
3lCR+JfbxHAfo9S1KIh5OriJ8TnBkZnAYdgRKcHuRl4Lm3IppjsOR6NRz6BMR00YAZkZpzig4MOV
tl7tzb+aN6hZtREbcAIxiMgPQ/1CzdkLZ56DQuTLBgK/Wpfhh8INEHaQKnocUTSLLUMKIlRv3awt
cXfkrk/6hbMheMjbkTSGMm3I+WXg9CVYtzGoSmC24VTZeju+VLTKTR1bBIsVyUOViBZaTkYykw4p
HN1XFV6nK3bpOiwdUm2sjb5aBszm+ueYjsHcpx5WLzGNyR4Qd52qz/StQnzF4+ZzVwVaLuwJ7FG2
BwWB/wAy/DD7zmZCI/RjWRX0V+ophnSv1S+xRtuzZnB96rO0W3yf/lCtxAgQZAPGEicdslGzbwxJ
O0onA4DvUYnH0Du4gG9a8mJ47509wxQnKI8yeBOXVGzBebY7aLRcptyKaAMnyCe5HSMo5ntRkQ5y
2IkrVLDIIyLgDYhKEn3riDS3Jse0Kn0KiJzCEwHdOPDs6GOWCAQOxU6uCdOSyoDI7kCI9y1C3I9z
UURARtgbalXbPMAXJR8JiGCnfZjMuyDsqt3FYH1hZrVqYbCvEF4x61QpupvCb0bjqPsUZYSGBTyH
E/iR34Jk70HQPaIomVDgg/0rAdV1h1LdyJYsYuPWtOqhoUYioZ0BMkRzITWnYZnMp8wvqUjbJGrE
bChcFRLxBOD2ozMgwWHBMYIAYxd+xAijqT4g9DqRIxCjqBBbNAjvRtEtG/Fh+lGo+h/+GFfkFPl/
Mfy179gq9+nL6z/adzQDEr8Hy0v+3tnjkPaKwUec5yNcbds/WVu+TG7bAjfGB2qVq9ExkNuau8zM
ajEUB2laTYMpbjQrTcazZyswo/6RUeXsDVOWJakRtUbVsVbilmT1JXbkhGEAZSkcAAjO2W5e0TGz
E4yyM2T+900xWi6NUTtqtfLl2xjmtMnDZFNEEqPL2Yg6A8p786proZsl5MIxjGIrCPsgUc9vooaf
HhGWytVVoyqNyaQ7N/RRGEjVUOKhbjWWMgvL0DgGqQAYkbOxA3BpkBqI2BXOcNbdkaIn86X5B6SV
qYeMgxClyF64ZDlSRbBNCJV+pEMxGSYrd0ajhGrISIIi7kqMLsmBxAxZaeWgRGAoABxHaVe54fEv
3C8oiphFzpClLEykS+1GwS9vl4iMY75DVIrb+TehIk0w396MpCh9k1dfvB2ThEK2YzeQkDpNwzNN
0QoTvEGZ2bFC0zgVkfQE7EZkVkXUOWieG0AD2mpQtjCI6rCq8su6qASqADsCp0AbUc2U4yp2oxGD
06rHNFpPEpym6alN0Y9DRiT2JhAgbSGXHIB+1DVKUtrJhaJOPF/enaEAMmQMpkROQovBKZ2yKgbb
POFQNxVMU8oEDaypQdFCVQn1rFP9KZgvyLNVMlj6wnDepMQD3rw+orwlZhUl9a8YVJA96bEdQjMd
WmKYl1XqUz9NE7D0CuI6a4FN6kJR8D8Y+1arfFF3luUbo9qhG9aJtioyGVAUJGrogCurhQMvDKhI
29B9fQCdgbot34Y25CXqKjcjWMwJA7j/AMDK+nr86P6fmD/+2vfsFXv05fWf7TG9zMmHsxzkdyNm
1chGzcGEKyAORKcqPO8zHgH3UDnv+UwPLxj5kSTImhPehGYhI6iZA9uS02bMLcdrVTkh83U5zhqj
OhOalpt6YAAiTu79JJLAVJKlyHJy/wC1gfiTH7yQ/wDpCB2IAR0gJ/p6WUREtmSpedyvmz9mRA+1
eTYh5Vol5COQ3lHRECEaRGdyW39EIyPFOZ9ZKNy/DRzHMHVIHxCPsg+ihK2JO7GQGAURsWmdQngd
UfpCdcMtPYULAk0Tuc1Q1A+ZhGJqe0rzJyJmaQgCQ5NPVVG0SZXZ1unf7qs2machrn+lKvpYc/ai
SJDTeI3YJ8JjPamkGKYpkBdYmR8OJWi0Pikd0f70eY5iWiyC8pnGW4Kdy1DRZh8OxDOcz7R7ELl0
yly1yIjzEBWoHDJtoKnzVu7C1/TYRN2JiOKQA1EdnYrl+4TKdyRkTie0LGuTY9oWkjim5Ib7EBFy
P1h+VA+U43WQVCzbBjckWAk0R+rCq5YW/hXOX+HzFsFmIzU7xrqLR7B6BhjIgdyMj4YRc9wV/nJ1
eRI70ZbT1QTR3qteLlBuGQVW7k3Q0ixGBCABftVFv6tVpy6N/TwhzsFUwiQN4ZcUgETxTbHTgg9k
jUHBmgRpFMAogkkEYCiBIBBwJqR2omFyAuHKJ+sAKM3eJjijZiNVwUpQBW7MoaJO2p6Jo0uToNqt
3STIyBBJXmTDiNGW7YuHwmq0YAVKbywdp/8ARXB4JIRFSV8SR1EZJwdUTgc+jWGEd6aYxwITLhiS
uOBHRmnchM79DssEwJWJTavWqGNdoVQGXh+leEokiVVUkdy8YVJj1rEevpdvTSjmKt0QOw/agMig
DgUQe5ADJSMqkEA9hRlaBnA4xGIdEkEFwQO9RItnUC+oDYjGOZBTHuT4nH1Iw1GMgXicnG0Li8Qx
RAx6In2BQ71p2YdiY4FQgfFZJtnsFY/R/wADKjof+yHMfy179gq7+nL6/wC0uq6dd6XgtDE9qN/m
JP7scojYFg+xDIbFCOrVOAYjZ8olcuyEIRDykaBS5PkR5XLmkrp8UuzYEDdmwycpxciBuVKQGG9A
W4mROAiHKle5wmM7raLUsYxG0dNz+mcpLTZhw35jGZ93sHTb5awNVy6RGI+3uVvkjAPbH3jcWs4l
0ZCJu2cdccu3oMzgE9m3KevMDIL4sfKjnKdF5HJxjKR+8vSDud25a7kjKRzKP9S5q29uFOXEhQy9
7u9HOzc8MwQe9G5ylw3CP3ZDURt3ImE40MTQqhZcPDLZgFS1I7wHCiZH4g9mOSE7vEZF4xzPavM8
Vwnw5hRFyIlat/EunKhoPWmGA9LOzdDwmGIRv8nKV2MaygcQNzLTcoRnmEx9YwTQBJH0JrQa6RxX
NnYvxPNkxtYxj7Vw/kUYWbEzbwtwhEmICsctzMGsWHtiEg/G7SMguc5blp+dZ1DRMMWOMovuKn/R
+YhM34AQtXI4GALh+zBUw+ooRYY17M6LSA0I0GoOKb8UABGmY/vQIAI3xn/9NFy8THTx04RAH7VE
3DPyeZgTOEGrLAEu3rUZRIIlxACpY9dzgFDmrXMmU7NzRLlQ3Ex2Kc4w8i1c4AJeMutEPGfEdiFV
TqaXYjBNrDFPM6iNqpn00VT1AMelk0YudypEjeU8pgHPF/pT8ctrYJ42ae8WUYAxGosWyURCcphu
KoxWsRBjIY46SjZvF5RPsux7WRnCsQGG5DlowHnxxlkAhraUdrMQoSB+8FCNnRK0S8fEH3Kc5GpJ
dAxxyXL3JVJxA7FAMzS7UQ9XWkxptQq9ApHArTqYoReoKrkHCqcd4UwWfEKI3hCI8MaKQ2BwhEZq
IwcOVKJwNFKOwshKURKUtqkIxAlkQM0RsotcjTYiYSIIwRByWmOOfYi0gTkEYyDEU6H0pjRYp26X
osAmTufWqyl2Kki28Kjd4VRFYAjtROk+teEhVdeJUkF4h61j0y2siNhZEbCol8vpREvEcCjCVRIP
CRQPciB/6ZDJRkWkIl9JwRnJoxyGS1Cj4hEHEYKcs4hz2FEgOBVah3hPtROS8qTEkd9UAcYHSezo
uWDhcjqHbH/3/wCL3Mfy179gq7+nL6z/AGlM5uYyA0npZRsk8FwsgRgfk4FokWxJ7gGYXC75Icz/
AFEys2PZgfHP14BW+T5Kx5dmxFuCOJOJJUbVmxORJYyMSIjeSrdm3bj5kRx3GGqUsy/TcjalpuSi
RGWwnNTt3wfMBOonE7+n/wDiPMxa/dDWonGEDn2no0yAIORX4vk4EEnjhEUrmArVqVqcIzqDKJD7
1HleXuPexuEYQHuxT3bkpbiadEec5wGPJgvGOBuN/wDSo27cRGEQ0YigAHpbvNXgIXIDhmKEyyG9
MQo6w8Q9DuClbhCegk+GOT7WT3AxFRA496ccUmx/IgNOqUiwAfU5wQjOt+7xXT/9Pd6dihc5exLy
rgeUoRLa+5SvXYa5W6ThIZHcV5dm1Gzb92Aqe1kDC3qIqdXgj+mfsQv/ANRmeZuDC3hajuAV2dqE
YQtQlIRiGAYPgF578erUTvd1a5uH3d6IMmyKDYSDhMpXDHUzM1fUm1H9Ekj60AzDImv0pjMDc8vq
irXNC00bR1aiNAPeaqGrytYIEdNZDvX4Y89ctzI0+JgGVy1Kfmi2Wjcd3Hb1j5NLlwi2Je7qo64y
J6ZAknAg5qxbtyEoQjrJGDlTuk+OVBuHWcdDmoGKJPVrj0cESewKkW3mi45htydpzG7BDRbjEHOS
EJmIJDuAiGJ2E0Rtxt65kM7Zo2rwMpAsYxYN60bVomMcoHFlGESPNnVyMAtRmTmXzUpQAqAI7inJ
rmhCEtMblCrplJzqNejl9TlsO9cQIfNiVc5hiwDRJUiJERckFQjen8N6zKtW7c4zANREuQg0JMC5
LJg7SyQlN6jwozgX2g0ZbjQrUDpHrJ7kZu4AcvROKHBE1kcmQiQdKiTRjVDTIgHYpQkXcEOUJKJA
elXdEyDDJnKlLIl1By1BgHXiI7QwUy7AkqI3KhA71JsypTzdliXzQkMwE59ni+lbFG4BXAlEzrGA
dlpACjdgGE3BAyICEcMHOwIDNsUGLxlgUIxxNPWvEAcyUYSGH2plqahREgx3pgnAcLEhu5M5WK/K
sAqMqk9xTRlJ0SZHsUi2BqRvRBzwRhmD9a+pEd8XRCL4qgeK/NGJyQagjgoaQzDJEYHIq5CWJi3q
UYywfTIbiml922K0d8TtC8q345ZoS9qBUeYh4Lgr2oHZRWbuUZAS/RlQ/wDFynRzH8te/YKu/py+
v+0phej/AIgzqU7DXLYwj7TepGEg0o0IPQLlstKNQVDlr8tYlQE/J5WrsdUJBiCjdha8y5iDc4tP
YFuTmAJOZCaIA7Oqb1trd+AJ1mgIG1ECrLzb33HLNKQ96XsxTDLplYgPxPNAeAeGJ/OKlzfMzA1D
TatQGkDs3IykXJqT0R5znAY8mC8Y4G43/wBKjbtxEYRDRiKAAemH9Psye1Y8ZGEp7O5CN0ExgNU9
jZDvUrnli5AGQiIVkXwop3eeL3TEmNqBaMRi3ahqJzWL7WyC/wD4hfiDbt0sgjE+98hqHV3m7EBc
jKI12s5GODALzOasR5SyDgI6bkvXghZsQEIDIZ7z0S5C0RLmeZi0hjotnE9+XQYXIkwtz06jhxV+
hQhP7y2WO8HArtUI4xkOEdnYmjalGJ9udI/SFq53mNcxXy7P5Vr5a1CIHtTaU/Whf5fmYuSfhu2H
YhGVqRmOL4fHTadKtXYUlepIZgjFXLcKm3IRMttOtOycSHidhGCFnmfvIcGrMttWmJ4i0AoWxkPQ
EEY9DE/SuCBI3B0BoI3lguKQ7lwRnNsWWoW6bZVUXkBE5AYKIuSxxeVfUhPl4REdoAKlabXc2YI2
78WkfAQxDqGV2WBOxa9Zfa5V2/I8VoHvKM5FzKpJUJQLF2xUvNm5ADPWjJwXRmRVwQO9OYtHM1Xn
10Q9ps1O8Q4nJwFGUgdAPFHcoQhIStwrjmmNRsqhytvT5UqAkVAOKGBOQ2KQwpVRuQwdiECcCFEF
mBCL1cUzVylNJCiGoTVRHsxFAjHEYKbSAqUbh4pGgO5NKsWrij5YDEavWhGTCAxzwXFAUo9XP0rS
HINexRtB2JruTGAk2MitcPCTgtLucAnlLTqwADoGVRkVoJOkYL4YLjPBE3RXJGAzLuvGexOQQAGq
pPmGB3uqTqowd5OXUnLOKetUclAAuXPbgpvlEfWgDIAhQP5xbPIKZOynrTKEvaJIfsZOfYr9KNVG
ftRcE7QmlUCpG5MAzKN2AYycS7lGIxJYIcIJDOjdgGMcRuQG1a7kjEnAblrgdQGI2dDqEg07l6In
cgcnwDrXCPlXAWY+EkJr0dJ98YIHxDbFEO4x39Dp37k2SKZGOeSoKqJ2oPUjPtVoTPho+5ah7NO5
G7H7y34xtCuWJ4B9PapR2JlZuGstOmXbGh/4eP8AMPMfy179gq9+nL6/7TMUb3KtC6al8CpebZmA
D4gCyEY2pykcAIlfjecgbZH3ds49p+X67khCIxJLKX9P5GWq7cpduxNIx2DeU21WrUB8ScRO7LMy
I6RyHKcFy5F7l3MROUUZEuTUkquWHQL98EcnaPEffPuj7UIQAjCIaIFAAPTfgbBP4m4HnIexE/ah
etEAipO3tKuXiMZCP6vQRuU4zGlpyAj3ryYjTZhW9PIB8BvKhZtARhAARA3fJTpxyV4/1AmV6Uid
eRGWnd0RjAPeuA33aru/7IUZyB4RxzBAOxC7znMGT+GFur/QtPKW4Fjwzu1IGSPm3+H3LdB9io4B
24ntWkEiOxAW7kogbCQjftxibdzxXLxxbZmrUb+ixAAjzI0JJzDq9CN2N63clqhMYtv68eaYeZOL
NvGatwkeGHFJPl0YdThhJtoCrwt7y45+pRiBKRODmipCMTvxCecmDYRDISuk68eI0bsURbjpqGJD
BC9AA3JlojI7fUpT8yhNYMNKnzMhhQD85SncOqRxJQtW+KE8YndsVyc6GUi4GSoajBctcmWeDMK4
LhPror9yVIzBZ9y1CLx7Ubsjpjb2vipX8IUA3tmozJPlgjWA2CjbsuLEKMaOckwNUOWtMLU5OaOa
4oH/ANk5UoxNXLLHtQlThr6k4OmQxf8A90avLIN9oQGnTEZlCGoPhiniQ4qtNx4zGLB/tRjbdjiS
njlghG5MRnEMRKn0qXlS1zlsqyMpdpWhw8SxG5MaB00cgx7lOOw4pn7EKuXdacOEpsskdbVwCiGY
hiCq4nMqYxYE6t6A1Y0UYCm3eUY4vgDkUWZwhO5W4cNyImHpmpRILA0bYm0F9q1xjwnaQnkGai02
/wDEdyfWdW1gjA0dtRxpimFTsT6RpxA1DNGJDE0KrMlREnMQSxrmjqFJMPpTgU7UI5nFnUnpq4Qi
GdNV3wVuWyQXFLHZVTaXC1duPag+DoHIACp3KUSHEg2KKswlEzhrBnEZxBrioixSoBj7UVKzfiDD
iO93yRjbGoH2J7NxRlYJtT90+ElaeYiYHKccENY1RIocP/dFi4xbNFMeimOSqQTuyWmVJfWnIDjB
1KRIMgxogz7VOBoYlipR2hipNgQiM+i9yxNQROPfQ/8AF3mP5a9+wVe/Tl9Z/tQ0ogjYU8YRB2gf
LzKZEYipJoFONs6oR4YRFWA9pXbvFK5M8AOONSuWjetnydeqQOYjX1JhgOmzebhlEh946fPvvb5O
Jqc57oqFixAQtwDRiPTz5rl5GPNHEE8Mmoy0X7UoxJ8QrErVfiY+bLXAH3Tn0G2Zj8VeGm1bzr7R
3BOSNcjQHEkqFkAayNVyQzkfk9syMRzmoeWPaMfa7lbsxobkhEE5aiytcqC8bUBAE5sGV6E4k8tc
lwTxFcijan4oEgrbsVU1q3K4dkQ4QnzFyHKwxIkeL1JoQPMy9+Qoo2LOm1b/ADR4YhEg8EOGPdmg
bVwsMYk0UbXNjyZmmv2UJRIlE1BGHVhZB8EXI7VenCJmXApkqj1mqwKcmI7aoaiSczHAetCMbZlL
evN4AXbQ1ULMrmgYk7gFOMZaoRJaRzARjajqG12UZAaZW6yG5EWeG3GgahlvK8mfHG5w8WMVGyAw
tBidpKNexcubknDFnOwph3lcxs1CQbciCCxzDo8zMkWYBtWTntV+fL3TKyZkwkaOFG1eLxjU1xWm
URK3ZJEBiSnjDSQocvbBEIeIGjkIso6ZGMZEagDimOBRp2owBYklgU6d8BTtWi8C0cJBEWw7jNwh
cBNsDuoiKB8wSnJJOaaMU4YAogyQEqiFZIADSdyAcs+KGPajKDuMHKchQEYuZBy2SmDFjDELXbJh
LNkzy04EhEmJ30Wu2RsIKcx49gwWqR7K4KN23iMkNUZCWxnCYBojJRuD2S6BFwDcSAvLtntKEthd
CcC9O8FSlI1yUpbaqGkg0Y7VpyapRbAUWqeESCAtOWS8w10glu1gpyiGOB9aZhuUZGjyOHYFKYLm
IDbi6OZ3qM4NqJIL/miiaZBhGpG2rKnhwAei1W40JZhuFVG2QYglidi4rcZYcRf8qF23SL5ZMhGp
Jogbk9BxEQAU8pBiaH/2VcdqaAAgcS7YIEhxtBdDUK4M2xSEXBNGCcgq5fPZH1hRu2pASqK1FQyj
KchMDEaQMVbuWyI6xWRoAp6mnC2HEgzEqXlESID+XOrjcjyt60LnLg/dzHEAfcluyQ8mNyFuUQ+s
eGTkM+xOCJx2hFsTnn0OEZbV2IPkpOamq3rzIBpZ70JSDSZiVLURExOJ2IyjgvqVqR8M/hy/xf3/
APF3mP5a9+wVe/Tl9Z/4EXLFqQjKTY4UKJtASlEkS04Fk8qHdRfib8WldA8sHxCI/L1I8jy7XDZk
Tcu5CXuxTqPN848OUBeMcDc//tUbVqIhCIaMQGAHyFrkRMbJBwgBQDABS5nmZNAUAGMjsCuc5OTX
pHhBPhiDwxWlnnE8Mt6sXpF5TgCfk8+dMZXbEwBGQc6NI8O5RnEtKBBB7ChOzLVKIGsbCQp2ZYSG
Owq5rHlkSMCSOEmNHHcvM5nmYWbeL5nsdPCFzm5jOTaH+habEY8vD3YDSU9yRmcyS/Q5pd5gU3Q6
KFOp8hemZaQ9t9mzq8zzwuOHcxOwbEYWLsrZk76SR9SnZ5mZhesCrmst9UJkaxKgIyQgYs5Y7hmr
lu1xAS0wG4IXi0gMYtkpXrYEfM8ERkUZmR1GhL4q6HqGY9q2qYjJnty1bME0jUYq0Ilw7nsCvG00
gDVRsGLTkWDOrVi1SNkNKYap3KEYzLSIBkaMo8pCWq1EDU2BlvRFCBkpciIMCdWujscisMNi1QJi
YnEUUXrSpC3bUYSOkElinenagSWao2umu8JjmAS6Itkk7S4CE5S0NgxTeYe0LVIElNCIATk4qq4l
QdAP0KU8hVSunGZ+hYYIYjcsXWk59ADVARKqFpAZdqLKtVgPUiIGmxHU7J3l60wqDtQaIrmyoYvs
QNwOJbF8KcoA5Ap565DJ6ppgxfaEfJmNJyOC0vGL4kf3oucUYnwTIqhKNTtWh3MqfapC4CbctmS1
6iWyA/vROQwH2qcJnTrZidyfUADm4QjA+EuW3hXIk8UgNPrVA77FHOWov3gIxJo32pj9anqwMaDv
Ctx0hwQX7FVn2lXHAOkP2VCAYuSwD0UYNwgRH0IwPtlmROtgCaDtQlKsjmdmS46jAblLl6ExcMS2
JdRgQGMh6kWrk6IJwwClEE+VMETjk7UKEbjwl4Q+anbugG3I6XORFYlSEGnGvBKu6iJsE2p/9OXh
K08xA2z77cKek45EYqhrsz6g2ZoA4KQkSJxDls1i4K4JBt6A9o49AkKEVHaFbvD24g9//EtvkPMf
y979gq7+nL6/+BEoHCQYoy5Y+daJfyz4h2KELlgw0nVcEotQIRFAKAdEr16Yt24B5SkWACu8vyGm
1Zk8Y3CHmcnGxDXISlLEDLtUOe/qAMhM6rVrAaRnLtQhACMYhgBgAPkhsXcRWEs4y2oxgYaMp6mp
2Mo8vft6LIPHeHh05so24UjACIG4fJ9Mg4ORR5j+mmMBMvO3KgB/NYKVrmDE3rk9UjDY1A56L1wQ
JtykJ25xykzSB7VpInIbC6ETalKT0iA/0BA2+VlHYSNI+lA3Dat7iSfqChqlanaFSXP1MpXfPtnK
MWIYDLNTs3InVbLSbopXaVYvgMH0y/xJ9vSTsXMTNNQP1qO4Eq3zQpAnTcH5po6jHlpiVphMMXdS
nIGMzRmpRSjINgR3qhXLQIYiUiZALTE+sMrzlrlxvLGZZDW7bDgvNlA/ib4IjE4CO2qMnOo1dgAp
nWPxMi2gYiIWYUL0QJSPCX2FD6FJ2wRGpiC2KZw+/NazUCkgMULtuVDiCw+1HE7WRjKL2t+XYmiG
fEuUDOY0g5LQJyIFGyVYP2rTECI3J3ZNIuVplWlEQGJ6AqVKwTYLUxLZpnYBCEJ1JqBsUYygYhnD
0cJiCmIKx9aeRBfaU4IHZ/7ram8O8pxP6AmkR2otUpxF+8IgjSegl19SboAapTAVUISGJUaZii4c
NikZYuGQL0USIDS2YU2GmQDgjoiBdmCQ7A0UrgkZSAJriUxDoSjAASweiMjbcDFlWJB2hEQ1mGbV
C4okdoQnblpmMCuIxkR7S1XJAy2qN61JpxXxYMfzXb6U2ERgAoXohzDEbkOIxIFQWH1oW7NQfEWy
UZjIoGMwJe6SzetGZIJbhFDgnGDv9KjKJBDLTgNpXlimlovnRR7YpmUpgsQHbsTlAjcUbUYxMiGM
pYuD+RaPBzEBUDMbVCF6OptT7aqUYAXAHeMtm5arBlama6J+E9i0czA25P4x4SgaTjlIFUPdmm2L
sQkMkJRDE5H1IxAAmK6RmE7epSM5GgcIHI9E7JxtSp+jKv8Axc5j+XvfsFXv05fWf+BTgMTj0SnM
6YxDyJyARsctLTysDl7ZGZ3IeYCBFyTkwUYMZSjUAYHU2KtWj7EQPo+czC5ESiaEEOFqjy1tz+aF
8O3GP6IA+rq/jLUXnENcAzCNy1FmqXonmQIjJDRRsFbuO8wNMu0dJO5XSaE/lT7IyUonAOATgm5i
bXLUtNsFy8So3pFrZL6kblstAUB2shauOY4kjYhOYEgPDHKIQnpAY1AxbvdOcQEQWOxA3pGRhwx3
DYFgg5bUGTxLx2iqEn0kVC0OYyGJan1oxgXPYQh5ZMGOOCYzALMautRkd4CEIQLKkaJvD2UXFLoB
IdkQYB2xZVQiKLUK7yUCA+1i6YRIbamYA96eZpuTSuRcZO/1Lg1TOyI/Kns2Jdp/uQBa3CWFG+tG
ciZ3M3NENdiBkMeEfWo6rcWjQOHojHQA+YoVbFuYnO45EMwFpmDGTsxTmcR2qkoncCnEX7FSMguI
S+pVVGBTFmVQVUlkHiO2qeIAPemonlOp7fyqM4TcxyTaiO5ahLgFI4LUCC2VEOKOGDqNqFTKprsT
mFNrhQLZN6lIgOWK+1AYmPD2DJTINWITsrcJ0lFw5D5ouaMadyJbFExABEiDvzQ1xD9lFc0UjqLD
c6lLTrIkxCINvSexSjHB1LziQAzMn8+cO0j8ilbtTM4UIJRjI6QBihCNYmWkFRjJpRkHBbehCAAM
szgGXDctyPf+RRjfiNNwExIqKKNuzqMplhAGhKFi5b8uUw4bNkLkX1YSGbhC7cBi1SE8Jxlvfchb
EtXvHKiyXCeFkQQ4NELlqRjOoBxoaISnPzbZpKB2blDTeiNVTUu2xRjFjciHBGccmRt3YiQzcOvM
5K5pepgfCtHMwNqeGseEoSBE4nMYqhfdmnZE5oTGIzUozq9QStUPCTUIRGAp0CBpG7Ex7xUf8U6+
h5j+XvfsFXv05fWf+BkrUw8ZggjcV+IsPPlSXH5u4pyzTHiCBlH7y43+GKp89cxckWAgVGN6Jdql
HT4XoqqfKyNLlY9o6ZiHiZSG0xCnOVBpIBK8yQBBNO5FlKx7BDjcgCoSGUkBkjXuQaQdsM0arQBq
BNRjjsTgyrkQR9qeJAGFTghbFwnaxoiJAydEQiGKqWRqq1WAbd0NGiJOJRbKpVAFwxJ7ExDDN6Kp
Hc6e7I95AQ03YEfpOV8N7hyEYt9JRNq0I75OU929C2+xgvi8xKZ2AkpoWZTKEbfLiI2kf+yGiTuz
vkuVlOAnFjwsM0SKAknSjtQfOqZWwcACzpwHapWomUdwTm5IE7/7lwXyO9PDmPpP5Vw3gR2lYwlu
YfaFW1A9wVeWB2sCuLlpx9f5FxQuR7wqSmO0Aql5u0FUvQ76LhuwPemidXYV4JHsqvDP1KoI7lWn
rVCmJouGUh2IiUzpIYumIffVE2paHxqtM5gB8V97XYwTCWoZYLSAdRoHwVGO0OrtrmJxtgkSiSQx
OanOF6EjGJIiCKlkZSgduSv2JRrICQfdRcUQwFexXZxHDKRI7FdhfYAw4e1EAUyUq4Gibcg1FCMi
4iKK3MigLFthRlIGKt24CsZvXYxCtXrYaUJAgsrNy4XxHrCF6JaftDbvRgagjFFpcOxG3ceJJcHI
un1CT4CJB+1AEbye1GOMZbk7syMQKCiAOL5rypRNwRbSHAYbKozETGRPEKHfkqOwWm7EEbCF5nJz
01rAnhWjmoG1N21jwpwRchtjiuHvGaqgBQBaX6YXY4wkJDuKjOOEgCO//gs/9heYH/7e9+wVe/1J
fWf+Bpt3YiUZYgoXOUJly4lqlaDmQGbKELQ0whWZFGA+e73KynAXJRfRLEjch5cJucmog+OYPRbv
RNYEE9ijcjUSAI6JdhULcw4EgWyLbUwiANiNuJ0kElkxWueAYEbULsJNE5H/AN1wuW2UqiLwBGWo
l0QSIx2Cv1ozjOWrcnNTtK1RAfaAqkjvTEodO9FqriomtgnuTiJ9TICRAGG1PKYH0fWnldiZZjV+
RDybZmd0fyprVjTvlRNKdu2Pp+lHzOal2RJ+xUE7p3prfLtsMgqaYDcP/dcdyR7HXET/AIisInuc
/SqRPcwVIetRiWFcguWFcDguGkdmKEjg9Qol3JwVSgdkSpk1VYg9y8I9S8IWDKhIVJkKlwrxheyV
xW4kLi5cHuC4uXDfohVtGPYsZBcN2Q7x+RcHMEd64OZ9ZP5VS7A9tfsVRCfcP7lxcvAndFcfJv2A
hcfKzj2E/kTGF2PePtVJ3I9v9y4eZbtiVw81bPbRPG9aP+JcJjLsKpbkewqtue9Mbc33gqsZDtBT
H6Vi3YSF4yR+kU2zYswRgViU5g5OOKBiBGQwVCCtU5NuTi5h/wCtqGB70K6R614ye5QGJgQXwcIk
iiInFi9HXDQjBB8RmgbRacMNq8u9Fp7nYsiQQ8S2nNssU8iQ9AsHLIaKSJHqWshxmqZ09aeMq9yI
Aqz7UDEmuS03og91Fr5K5pz0E8JQhzds2pu2oYLVE+ZHIxxXCXIy6tsZ23gf8OH0f8Vq9XmP5a9+
wVe/1JfWf+B5NuEYmXiIABPz29+HHlONJetP5t5tmsfkRtRsQYhjIgGX6xqs5WJHgns3HoPLS8dn
Ds6J9itgYklgmlEg7wyJJ0ydwiZXJStxOD0TmILLTECIyZVJTkranZMy3rYmxVB3CqYQLJiQO0n7
E87jDd/euO6HGRl+RHy4eZLJov8AWh5fLyJ2mgX7u1E5nFfE5t90P7l+8uy7U1vlu+QVBC33f+64
7kiPzXXEZd5XsnudUie4ALhiO9ZDsCqSe9VDqg6R2qxIgSaBoe1QEbYDhyQEGiBlRQBq2K907lcg
DQQLFTBqR6XBVAXhHqXhWapKQ71S5JcNx+1eIHuVREritCS4uWHqCaXLeqIVbUo+tVlOI7Vwc1OP
ePyL4XPkbjL+9fD52JB2yP8AegI8xbkfX9cUeC1c3gD+5EXeVhPdpf6ijr/p2qW6H9xR8z+nSBzY
f3BET5W5A7Q35Qv3sBvb+9cPMSj2v+RNDnMcyU1vmoS7SE0LsD3rGB7z+RNoB7CmlAgpjEjY6zCE
LcZTJyAJQmbZJOMRUoRnGVuocyBDLVAu1FguFOQN5zXm2JEE4qE78pSAI1ZstduYMRmSH+tE4iNO
9FgGUpgMCSR9aEJBxt2qYkGOSPlsJHuyRFwVODVUrRDklw6EiBdjIYHELVysjbIr5c8ChDm7ZtSw
1DBPEi5HaMVT1dN7lyaSAnEbxQ/8MW+auY/lr37BV7/Ul9Z/4mytXYiUZbV5WkzhL7ue7evOiXmf
FsZWrMpMbofsKEZu93hiytxJ2l9i4pi4AMclKNoNXxfkRi1dqO1YLenZhvITN6qoUk/YAgHA7f7l
WQA7Pyr4l6PZqH2LgGv9GL/Wvh8vInIlgPoRbRAfnVIT3eaA3Qp9SrK5dPavh8qTvkFQQt7m/wDd
cdyX+F18SUid5VdJ+lUie4AKkfXVZDsCqSywdUCMuuyiN6twlh5a0x7ARsQGJZ3QCpir0pFzoVzt
+VVC8IXhCwZUcd6pOXcU0b0xucql495X3gI3hVEJdyIlZtzBxDL4nJQl/hH2r4nIAdkQv/xpwG4N
9RVNcR2ql6cd7/3JrfNyHbJcHO/8x/Kg94XRkXKtm9pILs2NNqjbNIydz2B0PLiHJLyavrTppQBB
xdCfLfCu7B4T2hS5bm4+TdjQk+E9hWuBocGzRBBqnZnRt3A8SiYycnIo2bsRoJcSaoBQjbcamdxi
gBgUdQeIqykY+JqITPqRicMCFrsFiGoc+8owvy8sNm7A+pBwJaRjHMSwVwxEZwgATCWPcieUkYSB
rCWCFvm7ZtyJYTai1QkLkdsf7kys3PZ1CMuyVP8AhBT575j+WvfsFXv9SX1n/idDmnY25AacjqWq
U9JzBxUZRBAcCDY0zUIXZmULZBc1qoCWABMqtTtKlAXCLdKu5+hNCMpnaqR0t3IvntRlIt9H1pzc
HY9foXCDc/Ri/wBa+Hy8yNpona3bH5zrVc5kA5CFPqVZXbp2Ovh8se2QVIwt93/unncLfmg/3J7h
P+KTL2T3akwie4ALhiB21WQ7Aqk9G7rgHElVTrHqw7VEAORCvYqTMQuHAAuUXGCoFfluCu/pekER
U59VorUY8Kf5Bu6jMF4QqxHqXhCzCpKXrTmRkwo5dcsxx1/srv8AsCGaLZ5dHMyuw1AC2aYijUWv
k7onHO1LH1Ly+YibU89WCeBBB2VCaYY5FUxGxA3BqalaqM7RMZwNCKFCU4gyjgcyGR0TY4kSIB+t
CAlq945UQw71KYAD4jemFQeFaZgghTs2iGOBkHLbBVCzeaBiC8hQSG/sUYSAk2M45hTu3AJ2YmoI
cgYLzOQuaQamBrEoWf6jaNmZLC7EcK12JC7A4GNSrN7OcQT24H/hG/yynyvmP5e9+wVe/wBSX1n/
AInHlRESi2uZOIA2JrYMgoztWzIx2B1I3oGNssHZg6cvwgahkRsK1cvymrfpGK8qVjyImsSM0+so
iV2VMQCUOCdw7ySuDlwNhIXijbG4fkTTuyI2AkLjcneVgD3P9a4YfQypEBbOxYn1+jp065+EJzh7
IWCqFqGa29LKA3okEgxiMEPOgD+cpC2WgcAsaDMpoxG85K+exTP5x6+qRVAw6aLVIO4bod+hk0qA
jFSF77uQYHJUzqOz0LdGzpdfWUAMsPRHsXKH84j/AJV3/Z1L4Ad4Q+1OY+VM+1Gi4gOYt5H2gv8A
trsrch+7kaIQ5uDE+3kniWJrTeg4ds07V2LhPDsWsgxP0IMHtHIdqiWI1sHlhVDRUbckXLtlvTyq
tUcURIYEhio3bRaIPFHIhTjOtq7ExPYVzFkkvZkRE7k3MQFxywoERy2qAkaAEgK3y9+WqcSSCMgS
7f8AFrmP5e9+wVe/1JfWf+JwlOUoTAZ4nLegbkpTI3sPoQhagIxGQCA2yCuQl4AIkhCMQABgFOUg
CYwp3qM88wvC75oNbZUYJiejBUT9USlEiJwPoAZB7bVWu0e7oAyzQhHAJ5FUoEYu+xAdWHarnYAg
FKvYtRwB+lYUyCvyzJH1KfaetVNkKlP0UxVCmlRUW5PE45LwpygHa0MShGAwDAdiY0I6MVTqUVcF
IGWkJneJwToD1r61SkR9PRXoCcJuqVyp/P8AsKJ7Opdf3IfamIcJ4Ftya9BjlIUK+GRet+6cVG6x
iRSdqeEonFlCYtmGpxOOIG8LVZkD2JwHidi2JzVsAvLIY5FSibhbIOjbucFxy5yKEQXGJKpWqM5D
cgYlw7Icve+HIA8WUmV/iBhMRNP0V5swRYB4X2IRtxAOZ/4t8x/L3v2Cr3+pL6/+KMBvV+Wxo9F0
bTGKj2BBN1nyCYdL7E0sAny6AcVXoou1MmyAVFpGOfS7LU9c1TDqxCu7XCG04rTLOIR5ctKJqBvy
ResslenKhMj9SPaeu4NVxEBNisEzJnrsQc0KxxTgrHoxwyRue1KgTBV4jsXgK1Qq2IW/oxR0GuSG
sVwLrehEZL6EfpTmgTDBUC4oqoW5OPCesVyp/wDuAI93Un/px+uXTUOnjRab8BIe9mieWlqH/Tkq
g2Ln0IC9DzIYCQT25ASOIwKLDUCmkGIXDivMlSW1Rnb4okMY9hQBGjU3EVHSXG0Ik4DFAM4k49aE
pReoEt4cOF+H8s2YxOmJypRAguDgf+DrfID6Buq/zDzH8ve/YKvf6kvr/wCKMSA4DuuYltn9XRp9
64PoUR2egMYliRQqI1ai1TvVKHeiDiE21GZoMAE4Djo2J8VvVQsG6GGJX5xQfEo1wxKpH1rBNMaJ
bVSsTgQh1Ar9K6k5xQMpNwgV2qN0YwNTuQ1AnXV+1T0eEavWn6zksEYWw5wdapusE86DYmhA9q0E
1yWo1MU8T3JzRVLnp0J8BmUREOdqYxBC8y1hmENhT+IJ4nuT1dDYqIgUJzVExVaBYrcqI7xRM/av
LbAuD1+WP/3YfWpAdSX+mPrPVqnjQoxuRE4oy5eeg+4ahfFgbcnpOOCYnz7WRzCAfTdORonjxBVD
LUFol3FEC49KRRhdeMy5pgUDGoFd7uyL0ZASDk4qyZFyAQ/YW/4K1+dH+Rcx/L3v2Cr3+pL6z/xR
JU5e9OR+notR2zJURv6lFGBLGVA60ku2YVCyaNd6cLSSxGBWKxdaMk5LBC5Ni4dNGLEJ41C1AtJb
Qq9H2rVcPcp3LYfIFGN08Ma12owDEjZihaFCSy1xIkwyxXlXPu5YHYU3U7lfOeo9Dvm6D1cMUBci
XiaEKRg4fVjt624LyrdI5lAgPLaVWQA2FNHwgoRI4skdJYFRMijHAMhHai2EQ5RmB2LimOxFsMke
xR0+KZr0kSDxIqiQXGScYJ8DtTmo2rFlQrFbE5kAmjhtWkcUk5iQFxBVpvTxKcl129awdl2H1qQ7
OoX/AOl9Uj0OqFEpwa7OnBjtTMJx2FPaJtT2ZL40HH/Ugvgz82GJhJab8fKnm+C1WiCDg2CaUW3o
j6USzlarBIepWm4aDPsXCXAoSNqnZP7sgj/FX/i3zH8ve/YKvf6kvrP/ABRmdkT9Sgfec+sonYFY
G6R+lQHUqpX7hYu1uITmp6M1XhIzVGI2hYIk4hOMUB7EA5WmJYCgTug2yqonBaQyVcNqrKuxcBEX
zKImaIkHiIIUpDElRO2hWuOMSmIcHFa9ocFAnxCh6Y9iJ3K+/vlPijg2S0zNBV1xTBRnDAgkdbRH
xSWs+KSNuJoKEqqc4DFFvCRTo3qpdCRwClbhjLE7kBtUtXd0VzCNsyY4xRBy6NUayKx7eh4rh4ts
VrhF68QK4oMtUQwTxLgYpye5TmPYDv2B1ru1BqibcxqPsp/WE0RVcYptWNevbOy5D9pTH/rHqf8A
6J+voYYoVbeg1dqDJ0BGpzT59FQmjUbCiWNq57ww9SOuIvQylHEInlrpcY25LRzdpjmRgjKxIbWC
ch0QAxESUWBIIwRjatkjUSdwLKRufeXGJGxv+CFP7F8x/L3v2Cr3+pL6z/xRuk+6VaGyKmdxQHuQ
+tDcOqAME0imiHQ1w4UJRpGQdaYl9yOovN2AGZQlKYBlhHYmlWqJjiTVPigDgKlHYFRYoXPWiFGM
ckH8RYoxBoUyEvZjUlSIzPR+iWU47ajpG5S7FcuDHUfrQZVzDqMInikcNyrXeoj81Dq40FApE+zE
sidvQCcCgJdxRIk/Rgq9GluFHUOIYdDxyQdOyc0C0RlxZJpYoBVUTHaynOgmI1G1BGEo6hkjdAYA
tIBED2TRTtZSHemaipQ7ULkRUhpKQlQnArVbnrBxC8yAYe1FAivWidk4/tBT/wDWfUG+1L6+gumV
axKM4YFMS4kcEX2qgoAK9SqeJ7k12GmWUxRcLX7ezNPZmbVweyaLTzMNccpIStyEZ7DQoA0REyDa
usCcwQhOBeJqD/xb5j+XvfsFS+5xOOPev3C/cL9wv3C/cL9wv3C/cL9wv3C/cL9yv3K/cr9yv3K/
cr9yv3K/cr9yv3K/cr9yv3K/cr9yv3K/cr9yv3K/cr9yv3K/cr90v3S/dL90v3S/dL90v3S/dL90
v3S/dL90v3S/dL90v3S/dL90v3S/dL90v3S/dr90v3a/dr92v3a/dr92v3a/dr92v3a/dr92v3a/
dr92v3a/dr92v3a/dr92v3a/dr92v3a/dr92v3a/dr92v3a9hewvYXsL2F7C9hewvYXsL2F7C9he
wvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9
hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7
C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F7C9hewvYR+6/
xeHvQ+57sEfuO/BH/wDl+HteNf8Ax/2r/wAFf+Cv/BX/AIS/8Ff/AB/ev/jl/wDHf4kP/wAD7F/8
d3If/wAuxyX/AIP+Jf8Axy/8DvwX/wAb3L/4/uX/AMd3r/45f/HL/wAD/wClf/HL/wCOQ/8A5f8A
4sV/8av/AI1H/wDl/wDhX/xy/wDjl/8AH92C/wDjvtX/AIC/8Ff+Cv8Awv8ACj//AC7v+1f/ABi/
8D/Ch/8Ay7/Hiv8A41f+Fhn4F/8AHL/49f8Ax6/+PX/x6/8AjPtR/wDwMMl/8Yv/AIxf/GL/AOO7
l/8AHr/41f8Ax6/+PX/xy/8Aj1/8b3r/AOMX/wAYv/jF/wDGL/49f/Gr/wCN71/8b3L/AONX/wAa
j/8Ay9f/ABq/+M70f/5f/gw70f8A+Wd6/wDjO5f/ABi/+NX/AMf/AIcF/wDF96/+L7kf/wCWL/43
/Cv/AI9f/Hr/AOPX/wAev/j1/wDHY54I/wD4v+BfuF+4X7hD/wDGwPix/wDZf+Ev/A71/wDH/wCH
Ff8Ag96/8DuQ/wD5f3Yr/wCP71/4P2L/AMJf+Ev/AAl/4S/8Jf8Ag96/8HuX/wAb/ixQ/wD5b34L
/wCK7sV/4C/8FD7r/B4e5ewvYXsL2F7C9hewvYXsL2F7C9hewvYXsL2F+7XsL92v3a/dr2F7C9he
wvYXsL2F+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7
tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7tfu1+7X7pfu1+6X7pful
+6X7pful+6X7pful+6X7pful+6X7pful+6X7pful+6X7tfu1+6X7pful+6X7pful+6X7pful+6X7
pfuV+5X7lfuV+5X7lfuV+5X7lfuV+5X7lfuV+5X7lfuV+5X7hfuF+4X7hfuV+5X7lfuV+4X7hfuF
+4X7hfuF+4X7hfuF+4X7hfuF+4X7hfuF+4X7hfuF+4X/AI6/8df+Ov8Ax1/46/8AHX/jr/x1/wCO
v/HX/jr/AMdf+Ov/AB1/46/8df8Ajr/x1/46/wDGX/jr/wAZf+Mv/GX/AIy/8Zf+Mv8Axl/4y/8A
GX/jL/xl/wCMv/GX/jL/AMZf+Mv/ABl/4y/8Zf8AjL/xl/4q/wDFX/ir/wAVf+Kv/FX/AIq/8Vf+
Kv8AxV/4q/8AFX/ir/xV/wCIv/EX/iL/AMRf+Iv/ABF/4il/+N4ZeDsz3bV//9k=" transform="matrix(0.2551 0 0 0.2551 0 0)">
</image>
</g>
</g>
<g id="copyright__x0028_replace_TODO_x0027_s_x0029_">
</g>
<g>
<polygon fill="#FFFFFF" points="663.415,39.034 616.903,70.026 663.577,99.242 710.108,68.813 "/>
<path d="M707.052,71.21c0.476,0.582,1.298,0.997,2.054,1.419c0.246,0.136,0.649,0.253,0.673,0.486
c0.034,0.315-1.021,0.899-1.307,1.084c-11.106,7.224-22.025,14.173-33.153,21.38c-3.245,2.104-6.71,4.393-10.055,6.541
c-0.422,0.27-1.164,0.824-1.533,0.824c-0.395-0.002-1.11-0.563-1.531-0.824c-14.561-9.014-29.028-17.896-43.656-26.873
c-0.315-0.192-1.329-0.656-1.348-0.973c-0.023-0.418,0.909-0.697,1.234-0.86c0.521-0.258,1.008-0.41,1.346-0.747
c-0.734-0.706-1.678-1.158-2.616-1.756c-0.254-0.162-0.728-0.396-0.748-0.673c-0.029-0.39,0.831-0.833,1.31-1.16
c1.51-1.026,2.871-1.946,4.371-2.952c11.726-7.853,23.346-15.717,35.098-23.586c1.952-1.307,3.827-2.771,5.831-3.925
c0.472-0.271,0.896,0.104,1.31,0.375c3.625,2.386,7.352,4.792,10.951,7.101c7.869,5.055,15.55,10.057,23.472,15.101
c3.343,2.128,6.872,4.405,10.28,6.616c0.321,0.209,1.366,0.809,1.383,1.01c0.024,0.308-0.542,0.514-0.785,0.672
C708.805,70.022,707.775,70.653,707.052,71.21 M663.731,39.735c-0.102-0.195-0.312-0.283-0.485-0.41
c-0.325,0.063-0.355,0.418-0.747,0.41c-0.297,0.216-0.514,0.512-0.936,0.6c-0.161,0.299-0.559,0.361-0.746,0.636h-0.188
c-0.309,0.327-0.719,0.551-1.047,0.858c-0.404-0.004-0.404,0.395-0.822,0.375c-0.232,0.29-0.511,0.535-0.936,0.635
c-0.281,0.241-0.485,0.561-0.934,0.635c-0.264,0.246-0.527,0.496-0.933,0.598c-0.133,0.332-0.573,0.351-0.749,0.637h-0.187
c-0.38,0.317-0.681,0.716-1.234,0.861c-0.202,0.256-0.53,0.389-0.746,0.633h-0.224c-0.249,0.253-0.476,0.522-0.898,0.6
c-0.178,0.281-0.52,0.402-0.748,0.637c-0.449,0.071-0.618,0.426-0.934,0.634h-0.225c-0.248,0.251-0.475,0.522-0.897,0.598
c-0.104,0.094-0.119,0.28-0.336,0.263c0.02,0.194-0.104,0.244-0.298,0.223c-0.141,0.197-0.328,0.347-0.637,0.375
c-0.057,0.068-0.064,0.186-0.111,0.264c-0.377,0.02-0.418,0.379-0.822,0.371c0.019,0.195-0.105,0.244-0.3,0.223
c-0.199,0.15-0.304,0.396-0.636,0.414c-0.09,0.23-0.313,0.336-0.598,0.375c-0.047,0.137-0.107,0.264-0.337,0.223
c-0.056,0.066-0.063,0.184-0.111,0.259c-0.268-0.042-0.216,0.234-0.524,0.152c0.021,0.194-0.103,0.245-0.299,0.223
c-0.047,0.276-0.34,0.31-0.598,0.375c-0.042,0.157-0.086,0.312-0.336,0.261c-0.15,0.488-0.839,0.434-1.047,0.859
c-0.146-0.035-0.094,0.133-0.261,0.075c-0.007,0.058,0.013,0.14-0.038,0.151h-0.225c-0.279,0.217-0.457,0.539-0.897,0.598
c-0.192,0.205-0.385,0.411-0.708,0.486c-0.01,0.054,0.012,0.136-0.039,0.147c-0.453,0.007-0.484,0.438-0.898,0.486
c-0.005,0.058,0.016,0.14-0.036,0.151c-0.294-0.07-0.228,0.219-0.524,0.148c-0.09,0.258-0.292,0.406-0.598,0.449
c-0.383,0.324-0.715,0.704-1.27,0.859c-0.157,0.191-0.28,0.418-0.635,0.412c-0.245,0.253-0.496,0.499-0.897,0.598
c-0.44,0.545-1.221,0.748-1.683,1.271h-0.223c-0.215,0.232-0.49,0.438-0.711,0.598c-0.153,0.109-0.354,0.152-0.523,0.26
c-0.287,0.186-0.529,0.613-0.935,0.598v0.188c-0.413,0.051-0.491,0.432-0.935,0.449c-0.269,0.256-0.494,0.553-0.934,0.635
c-0.253,0.262-0.495,0.527-0.936,0.598c-0.211,0.313-0.512,0.535-0.934,0.637c-0.264,0.26-0.526,0.521-0.936,0.635
c-0.16,0.313-0.594,0.354-0.746,0.673c-0.045-0.042-0.068-0.106-0.188-0.073c0.028,0.215-0.086,0.285-0.299,0.26
c-0.048,0.289-0.383,0.293-0.637,0.375c0.021,0.194-0.104,0.244-0.299,0.224c-0.032,0.315-0.366,0.331-0.637,0.411
c0.021,0.195-0.103,0.245-0.297,0.223c-0.053,0.285-0.366,0.309-0.637,0.375c0.029,0.216-0.087,0.289-0.298,0.263
c-0.089,0.246-0.354,0.316-0.635,0.374c0.02,0.193-0.105,0.242-0.3,0.223c-0.152,0.199-0.309,0.391-0.635,0.411
c0.02,0.194-0.104,0.246-0.3,0.226c-0.036,0.074-0.073,0.149-0.112,0.225c-0.691,0.229-0.986,0.857-1.756,1.008
c0.05,0.234-0.155,0.206-0.262,0.262c-0.074,0.038-0.139,0.202-0.188,0.225c-0.051,0.023-0.139-0.027-0.187,0
c-0.034,0.018-0.058,0.133-0.112,0.148c-0.249,0.078-0.455,0.207-0.449,0.375c0.015,0.379,0.736,0.345,0.861,0.708
c0.19-0.013,0.344,0.007,0.336,0.188c0.195-0.019,0.318,0.03,0.299,0.226c0.294-0.059,0.256,0.219,0.561,0.149v0.151
c0.175,0.024,0.32,0.076,0.374,0.223c0.422,0.1,0.673,0.375,0.935,0.634c0.169-0.019,0.278,0.022,0.299,0.152
c0.348,0,0.509,0.188,0.636,0.41c0.335-0.048,0.325,0.246,0.635,0.224v0.15c0.257,0.065,0.55,0.098,0.6,0.375h0.26
c-0.054,0.141,0.104,0.07,0.075,0.188c0.315,0.006,0.427,0.219,0.599,0.37h0.262c0.283,0.315,0.675,0.521,1.009,0.786
c0.306,0.017,0.463,0.186,0.599,0.373h0.262c0.278,0.469,0.991,0.504,1.308,0.936c0.336,0.014,0.438,0.26,0.635,0.41
c0.307,0.018,0.463,0.185,0.597,0.373c0.318-0.08,0.263,0.213,0.562,0.15c-0.056,0.168,0.109,0.113,0.076,0.264
c0.408,0.074,0.67,0.299,0.934,0.522c0.195-0.021,0.319,0.028,0.299,0.223c0.191-0.015,0.345,0.005,0.337,0.188
c0.621,0.116,0.877,0.593,1.456,0.749c0.005,0.27,0.465,0.083,0.413,0.41c0.293-0.07,0.228,0.219,0.522,0.148
c-0.051,0.152,0.082,0.117,0.076,0.227c0.297,0,0.469,0.125,0.56,0.335c0.662,0.159,0.983,0.661,1.645,0.821
c0.083,0.303,0.499,0.273,0.599,0.562h0.262c-0.047,0.122,0.119,0.028,0.073,0.151c0.438-0.017,0.41,0.435,0.86,0.408
c0.201,0.387,0.771,0.4,1.01,0.748h0.225c-0.054,0.143,0.102,0.072,0.072,0.188c0.328,0.01,0.457,0.216,0.637,0.374
c0.416,0.105,0.686,0.363,0.935,0.636c0.17-0.021,0.278,0.02,0.298,0.148c0.718,0.193,1.061,0.76,1.797,0.934
c-0.053,0.152,0.079,0.121,0.075,0.227c0.168-0.021,0.276,0.02,0.297,0.148c0.374-0.024,0.437,0.262,0.636,0.41
c0.316,0.021,0.462,0.213,0.635,0.375c0.196-0.02,0.32,0.029,0.299,0.227h0.225c-0.005,0.104,0.112,0.084,0.112,0.184
c0.307,0.018,0.463,0.187,0.598,0.375c0.308,0.028,0.484,0.188,0.636,0.373c0.181-0.018,0.303,0.021,0.299,0.188
c0.328,0.007,0.457,0.217,0.636,0.373c0.314,0.009,0.429,0.223,0.599,0.375h0.261c0.39,0.559,1.234,0.659,1.645,1.195
c0.306,0.02,0.462,0.186,0.599,0.375c0.17-0.021,0.279,0.02,0.299,0.148c0.355-0.006,0.479,0.219,0.637,0.411
c0.576,0.17,0.934,0.562,1.494,0.747c0.368,0.58,1.266,0.629,1.644,1.197c0.386,0.086,0.699,0.247,0.898,0.522h0.261
c0.08,0.181,0.319,0.204,0.374,0.411c0.294-0.06,0.255,0.217,0.562,0.15c0.406,0.514,1.163,0.68,1.607,1.158
c0.317-0.082,0.26,0.213,0.561,0.149c-0.056,0.167,0.107,0.116,0.072,0.263c0.532,0.077,0.725,0.498,1.234,0.598v0.148
c0.189-0.014,0.346,0.004,0.336,0.187c0.419-0.007,0.411,0.413,0.859,0.375c0.056,0.193,0.34,0.159,0.375,0.374h0.225
c0.043,0.133,0.129,0.219,0.335,0.188c-0.05,0.148,0.083,0.117,0.075,0.223c0.296-0.07,0.229,0.219,0.524,0.15
c-0.052,0.15,0.08,0.117,0.073,0.225c0.577,0.06,0.72,0.552,1.271,0.634v0.151h0.524c0.007-0.058-0.015-0.14,0.036-0.151
c0.418-0.044,0.457-0.464,0.896-0.485c0.008-0.055-0.012-0.137,0.037-0.148c0.598-0.201,0.913-0.686,1.534-0.861
c0.247-0.263,0.523-0.497,0.934-0.598c0.114-0.236,0.31-0.387,0.636-0.41c0.237-0.398,0.822-0.45,1.046-0.86
c0.395-0.007,0.396-0.403,0.823-0.375c0.02-0.129,0.129-0.171,0.299-0.147c0.231-0.293,0.507-0.541,0.935-0.638
c0.264-0.258,0.514-0.532,0.935-0.634c0.134-0.205,0.32-0.352,0.636-0.374c0.303-0.22,0.479-0.567,0.935-0.638
c0.271-0.336,0.724-0.493,1.045-0.783h0.188c0.047-0.287,0.368-0.303,0.635-0.372c-0.021-0.196,0.104-0.246,0.299-0.224
c-0.021-0.197,0.105-0.246,0.3-0.227c0.159-0.189,0.318-0.379,0.635-0.411c0.129-0.194,0.305-0.343,0.598-0.374
c0.27-0.266,0.504-0.566,0.971-0.635c0.138-0.189,0.293-0.356,0.6-0.374c0.405-0.428,0.949-0.721,1.495-1.009
c0.091-0.283,0.405-0.342,0.709-0.412c0.091-0.232,0.313-0.336,0.598-0.373c0.004-0.193,0.116-0.281,0.337-0.263
c0.094-0.231,0.313-0.335,0.6-0.372c-0.023-0.209,0.139-0.234,0.336-0.226c0.084-0.188,0.332-0.216,0.412-0.411
c0.405-0.104,0.652-0.367,0.934-0.598c0.398-0.011,0.388-0.433,0.822-0.41c0.278-0.234,0.508-0.515,0.935-0.598
c0.146-0.204,0.304-0.395,0.635-0.414c0.038-0.072,0.073-0.148,0.111-0.223c0.468-0.094,0.625-0.496,1.122-0.561
c0.007-0.057-0.015-0.138,0.039-0.149c0.379-0.07,0.465-0.431,0.896-0.446c0.006-0.058-0.014-0.141,0.037-0.151
c0.227-0.06,0.281-0.29,0.598-0.263c0.239-0.26,0.504-0.492,0.896-0.598c0.26-0.276,0.529-0.543,0.973-0.635
c0.17-0.152,0.283-0.365,0.599-0.374c0.095-0.252,0.33-0.366,0.634-0.411c-0.027-0.326,0.434-0.166,0.449-0.447
c0.459-0.103,0.611-0.512,1.12-0.561c0.007-0.058-0.013-0.141,0.039-0.151c0.395-0.065,0.446-0.476,0.896-0.486
c0.006-0.056-0.015-0.136,0.037-0.149c0.411-0.161,0.649-0.495,1.12-0.596c0.254-0.408,0.844-0.48,1.123-0.861h0.188
c0.152-0.198,0.305-0.393,0.635-0.41c0.139-0.198,0.31-0.363,0.635-0.375c-0.033-0.207,0.104-0.245,0.301-0.223
c0.125-0.223,0.315-0.383,0.634-0.414c-0.019-0.193,0.104-0.242,0.299-0.223c-0.007-0.094,0.084-0.09,0.112-0.148
c0.399-0.123,0.639-0.41,0.936-0.638c0.395-0.126,0.684-0.36,0.933-0.634c0.405,0.006,0.422-0.377,0.823-0.375
c0.295-0.228,0.508-0.538,0.936-0.635c0.173-0.162,0.299-0.373,0.634-0.373c0.039-0.076,0.073-0.152,0.112-0.225
c0.205,0.043,0.228-0.097,0.299-0.188c0.215,0.025,0.279-0.094,0.337-0.225c0.266,0.053,0.199-0.224,0.486-0.148
c-0.028-0.328,0.433-0.166,0.447-0.45c0.475-0.136,0.697-0.522,1.196-0.634c0.007-0.058-0.012-0.14,0.039-0.15
c0.423-0.036,0.463-0.457,0.896-0.486c0.007-0.055-0.014-0.137,0.036-0.148c0.524-0.088,0.709-0.514,1.198-0.633
c-0.053-0.154,0.079-0.122,0.074-0.227c-0.426-0.371-1.006-0.588-1.384-1.01c-0.326-0.023-0.522-0.176-0.635-0.41
c-0.433,0.021-0.438-0.387-0.86-0.375c0.007-0.105-0.127-0.072-0.074-0.223c-0.448,0.023-0.42-0.428-0.861-0.412
c0.048-0.12-0.119-0.027-0.073-0.15h-0.224c-0.34-0.444-0.997-0.574-1.346-1.008h-0.225c0.046-0.122-0.121-0.028-0.074-0.151
c-0.195,0.022-0.321-0.029-0.301-0.224c-0.209,0.01-0.366-0.033-0.335-0.26c-0.14-0.043-0.106,0.091-0.226,0.073
c0.021-0.194-0.103-0.245-0.299-0.224c0.052-0.152-0.081-0.117-0.073-0.225c-0.999-0.246-1.44-1.053-2.431-1.309
c0.035-0.147-0.129-0.095-0.075-0.262c-0.415,0.006-0.423-0.401-0.86-0.373c-0.146-0.328-0.605-0.34-0.784-0.635h-0.224v-0.15
c-0.434-0.041-0.563-0.385-0.935-0.485v-0.151c-0.269-0.065-0.571-0.102-0.636-0.373c-0.788-0.168-1.08-0.84-1.869-1.008v-0.15
c-0.475,0-0.48-0.467-0.934-0.486v-0.148c-0.258-0.066-0.551-0.098-0.6-0.373c-0.182,0.008-0.324-0.025-0.336-0.188
c-0.313-0.01-0.426-0.222-0.598-0.375c-0.449-0.012-0.514-0.408-0.934-0.447c0.012-0.086-0.006-0.145-0.038-0.188
c-0.294-0.031-0.47-0.179-0.598-0.373c-0.195,0.019-0.319-0.029-0.299-0.225c-0.122-0.021-0.189,0.012-0.224,0.074
c-0.088-0.213-0.423-0.176-0.411-0.486c-0.171,0.021-0.279-0.02-0.3-0.148c-0.336,0-0.462-0.213-0.635-0.375
c-0.432,0.007-0.404-0.444-0.86-0.41c0.006-0.105-0.127-0.074-0.075-0.227c-0.426,0.017-0.429-0.394-0.859-0.371
c-0.014-0.1-0.119-0.105-0.075-0.264c-0.114-0.028-0.045,0.129-0.188,0.076c-0.407-0.387-1.047-0.547-1.382-1.012h-0.225
c-0.055-0.194-0.337-0.159-0.374-0.371c-0.192,0.006-0.35-0.023-0.337-0.227c-0.293,0.07-0.227-0.219-0.521-0.148
c-0.192-0.406-0.785-0.41-1.01-0.784c-0.298,0.094-0.342-0.211-0.411-0.263c-0.163-0.121-0.454-0.136-0.524-0.371
c-0.267-0.07-0.587-0.088-0.633-0.375c-0.435,0.008-0.422-0.43-0.861-0.414c-0.035-0.213-0.318-0.178-0.374-0.371
c-0.332-0.018-0.49-0.21-0.636-0.41c-0.43-0.082-0.684-0.339-0.935-0.602c-0.169,0.021-0.276-0.019-0.3-0.147
c-0.218,0.022-0.332-0.065-0.335-0.263c-0.307-0.019-0.463-0.184-0.599-0.375c-0.313-0.035-0.539-0.156-0.635-0.41
c-0.43,0.02-0.422-0.398-0.858-0.371c-0.149-0.304-0.572-0.327-0.712-0.637h-0.223c0.045-0.121-0.123-0.027-0.075-0.148
c-0.413,0-0.419-0.404-0.859-0.375c0.051-0.151-0.083-0.117-0.075-0.227h-0.226c-0.218-0.379-0.769-0.424-1.008-0.781
c-0.489,0.016-0.496-0.453-0.936-0.488v-0.149c-0.267-0.069-0.588-0.085-0.635-0.374h-0.224c-0.27-0.329-0.778-0.418-1.01-0.785
c-0.311,0.012-0.398-0.199-0.636-0.26v-0.152c-0.305,0.082-0.229-0.217-0.523-0.146c-0.08-0.257-0.363-0.311-0.635-0.375
c0.051-0.151-0.081-0.117-0.075-0.223c-0.221,0.019-0.334-0.068-0.336-0.264c-0.516-0.157-0.941-0.403-1.233-0.785
c-0.336,0.049-0.326-0.246-0.636-0.225v-0.148c-0.307-0.019-0.461-0.188-0.599-0.373c-0.184,0.008-0.325-0.023-0.337-0.188
C664.365,40.274,664.292,39.763,663.731,39.735"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#231F20" d="M670.498,47.063c0.198,0.773,0.747,1.196,0.86,2.055
c0.178,0.07,0.116,0.383,0.298,0.449c0.044,0.438,0.177,0.795,0.337,1.121c0.028,0.746,0.461,1.086,0.374,1.942
c0.109,0.093,0.112,0.286,0.224,0.375v0.41c-0.034,0.122,0.097,0.077,0.112,0.151c-0.009,0.446,0.239,0.85,0.299,1.307
c0.021,0.17-0.044,0.388,0,0.563c0.021,0.077,0.133,0.146,0.15,0.224c0.058,0.27,0.034,0.65,0.073,0.973
c0.013,0.089,0.016,0.526,0.074,0.747c0.023,0.085,0.133,0.137,0.15,0.225c0.073,0.379,0.031,0.801,0.073,1.196
c-0.032,0.12,0.099,0.077,0.114,0.147c0.032,0.43-0.102,1.024,0.148,1.234c0.039,0.252-0.096,0.328-0.148,0.486
c-0.346,0.071-0.315-0.234-0.485-0.334c-0.044-0.121,0.058-0.094,0.073-0.152c-0.113-0.672-0.133-1.436-0.15-2.203
c-0.362-0.187-0.114-0.983-0.297-1.347c-0.031-0.118,0.033-0.142,0.073-0.188c-0.23-0.698-0.257-1.226-0.373-1.944
c-0.373-0.34-0.119-1.301-0.485-1.645c-0.043-0.117,0.058-0.092,0.074-0.15c-0.059-0.028-0.057-0.117-0.149-0.111
c-0.004-0.381,0.016-0.789-0.224-0.934c-0.08-0.32,0.014-0.813-0.264-0.936c0.019-0.178-0.003-0.318-0.149-0.338
c-0.046-0.119,0.122-0.027,0.077-0.147c-0.221-0.688-0.563-1.259-0.749-1.98c-0.068-0.032-0.104-0.099-0.187-0.112
c0.02-0.518-0.36-0.638-0.448-1.048c-0.161-0.077-0.325-0.149-0.299-0.411c-0.138-0.012-0.174-0.125-0.337-0.111
c-0.051-0.428-0.929-0.326-1.271-0.225c-0.05,0.012-0.03,0.094-0.037,0.149c-0.472,0.028-0.666,0.331-0.71,0.785
c-0.151-0.052-0.118,0.082-0.225,0.073c0.047,0.223-0.088,0.262-0.186,0.337c0.016,0.504-0.298,0.675-0.3,1.16
c-0.12-0.047-0.029,0.12-0.149,0.075c-0.101,0.572-0.197,1.146-0.412,1.607c-0.014,0.584-0.123,1.071-0.298,1.493
c0.265,0.316-0.285,0.757,0,1.085c-0.282,0.614-0.109,1.684-0.375,2.318c-0.045,0.156,0.061,0.164,0.076,0.262
c-0.27,0.434-0.097,1.008-0.15,1.568c-0.087,0.9-0.09,1.928-0.075,2.881c-0.179-0.006-0.103,0.242-0.187,0.335
c0.206,0.214-0.154,0.568,0.074,0.784c-0.064,0.264-0.104,0.498,0,0.748c-0.158,0.225-0.053,0.688,0.112,0.822v2.057
c0.058,0.133-0.126,0.021-0.112,0.111c0.432,0.467-0.025,1.822,0.338,2.355c-0.156,0.322-0.082,0.979,0.074,1.232
c0.044,0.441-0.092,1.064,0.074,1.383c-0.015,0.441,0.004,0.844,0.15,1.121c-0.282,0.436,0.292,0.945,0,1.383
c0.202,0.52,0.215,1.232,0.298,1.871c0.009,0.053,0.112,0.012,0.112,0.074c0.037,0.871,0.161,1.654,0.375,2.353
c-0.022,0.571,0.055,1.042,0.224,1.423c0.031,0.117-0.033,0.141-0.075,0.186c0.109,0.352,0.293,0.941,0.226,1.232
c0.087,0.049,0.036,0.238,0.186,0.225c0.224,1.223,0.587,2.307,0.823,3.515c0.135,0.076,0.12,0.302,0.261,0.372
c0.049,0.861,0.491,1.33,0.599,2.131c0.079,0.034,0.063,0.16,0.187,0.15c-0.284,0.059,0.218,0.461,0.299,0.598
c0.026,0.045-0.013,0.137,0,0.188c0.033,0.123,0.225,0.152,0.149,0.41c0.012,0.052,0.094,0.03,0.15,0.038
c0.049,0.236,0.078,0.494,0.336,0.523c0.029,0.481,0.486,0.534,0.674,0.857h0.26c0.212,0.467,0.813,0.156,1.197,0.075
c0.884-0.661,1.356-1.733,1.83-2.802c0.048-0.122-0.118-0.028-0.074-0.151c0.221-0.237,0.381-0.54,0.374-1.008
c0.115,0.028,0.046-0.13,0.188-0.075c-0.01-0.322,0.128-0.494,0.075-0.857c0.435-0.252,0.116-1.256,0.523-1.534
c0.016-0.158-0.055-0.401,0.073-0.45c0.056-0.141-0.103-0.07-0.073-0.184c0.199-0.362-0.081-1.201,0.336-1.348
c0.085-0.439-0.024-0.844,0.224-1.234c0.006-0.105-0.127-0.072-0.074-0.224c0.29-0.475,0.116-0.968,0.223-1.644
c0.032-0.197,0.192-0.373,0.225-0.561c0.067-0.387-0.063-0.736,0.149-0.973c0.049-0.146-0.121-0.076-0.074-0.223
c0.22-0.643-0.063-1.365-0.149-1.945c0.021-0.396-0.005-0.743-0.15-0.973c-0.049-0.151,0.081-0.117,0.076-0.223
c-0.23-0.439-0.151-1.318-0.151-2.243c0.003-0.677-0.069-1.392,0-1.833c0.039-0.23,0.287-0.445,0.075-0.635
c0.054-0.229,0.112-0.286,0-0.521c0.21-0.258,0.115-0.62,0.15-0.937c0.026-0.222,0.179-0.45,0.224-0.71
c0.05-0.291,0.006-0.573,0.076-0.82c0.069-0.248,0.235-0.453,0.188-0.713c0.072,0,0.047-0.101,0.147-0.072
c0.031-0.543,0.298-0.854,0.3-1.422c0.261-0.313,0.345-0.803,0.561-1.158c0.082-0.691,0.457-1.088,0.673-1.645
c0.142,0.055,0.072-0.102,0.188-0.075c0.076-0.431,0.497-0.522,0.597-0.933c0.327-0.086,0.48-0.344,0.71-0.525
c0.185-0.001,0.352-0.023,0.375-0.186h0.709v0.113c0.565,0.07,0.738,0.533,1.084,0.821c0.066,0.281,0.146,0.551,0.41,0.636
c0.257,0.729,0.688,1.279,0.824,2.13c0.005,0.042,0.068,0.03,0.111,0.038c0.291,1.539,0.701,2.961,0.822,4.672
c0.125,0.086,0.074,0.348,0.188,0.449c0.043,0.117-0.059,0.09-0.075,0.148c0.077,0.125,0.251,0.453,0.075,0.598
c0.114,0.071,0.02,0.353,0.148,0.411c-0.079,1.538,0.173,3.56-0.148,4.784c0.096,0.402,0.098,0.898,0.299,1.197h0.223
c0.055,0.059,0.08,0.146,0.113,0.223c0.169-0.021,0.278,0.021,0.299,0.15c0.41-0.012,0.571,0.227,1.047,0.149
c0.207-0.141,0.469-0.229,0.747-0.3c0.12-0.553,0.565-0.779,0.522-1.494c0.115,0.027,0.046-0.129,0.188-0.076
c0.045-0.389,0.084-0.785,0.149-1.158c0.121,0.046,0.028-0.121,0.15-0.074c0.199-1.23,0.656-2.207,0.934-3.363
c0.245-0.215,0.375-0.547,0.785-0.6c0.062-0.049,0.128-0.095,0.146-0.186c0.31-0.079,0.687-0.09,1.086-0.074
c-0.002,0.141,0.18,0.094,0.15,0.26c0.513-0.049,0.411,0.51,0.785,0.6c-0.003,0.437,0.162,0.707,0.374,0.934
c-0.06,0.406,0.204,0.493,0.223,0.821c0.068,0.034,0.104,0.099,0.188,0.112c-0.034,0.309,0.2,0.348,0.223,0.6
c0.208,0.214,0.881,0.595,1.009,0.037c0.421-0.104,0.456-0.591,0.86-0.709c-0.055-0.305,0.354-0.146,0.299-0.45
c0.161-0.089,0.438-0.059,0.411-0.335c0.239-0.074,0.622-0.002,0.636-0.299c0.322-0.017,0.598-0.078,0.785-0.227
c0.459,0.057,0.926-0.077,1.309,0c0.237,0.049,0.364,0.285,0.598,0.227c-0.004,0.129,0.153,0.094,0.15,0.223
c0.188-0.094,0.159,0.105,0.261,0.15c0.236,0.108,0.46,0.331,0.598,0.598c0.07,0.135,0.36,0.24,0.485,0.412
c0.061,0.078,0.036,0.177,0.075,0.223c0.096,0.113,0.196,0.104,0.3,0.188c0.06,0.048,0.076,0.177,0.15,0.224
c0.043,0.028,0.138-0.023,0.185,0c0.074,0.034,0.08,0.239,0.301,0.151c0.186,0.235,0.543,0.301,0.934,0.334
c-0.013,0.064,0.029,0.072,0.075,0.076c0.036,0.162-0.041,0.205-0.075,0.299h-0.859c-0.082-0.344-0.663-0.186-0.71-0.561h-0.225
c-0.098-0.301-0.41-0.387-0.598-0.6c0.025-0.225-0.06-0.338-0.262-0.334c0.057-0.17-0.111-0.116-0.076-0.264
c-0.131-0.08-0.307-0.111-0.298-0.336c-0.487-0.271-1.184-0.725-2.206-0.561c-0.197,0.031-0.439,0.171-0.634,0.224h-0.338
c-0.05,0.012-0.03,0.094-0.037,0.151c-0.519,0.177-0.8,0.596-1.233,0.859v0.26c-0.305-0.005-0.211,0.387-0.523,0.375
c-0.036,0.248-0.352,0.221-0.411,0.447c-1.129,0.197-1.171-0.695-1.644-1.156c-0.044-0.281-0.091-0.559-0.375-0.6
c-0.032-0.252,0.021-0.595-0.223-0.636c0.019-0.17-0.02-0.276-0.149-0.299v-0.263c-0.289-0.398-0.56-0.811-1.384-0.672
c-0.203,0.123-0.363,0.283-0.598,0.375c-0.041,0.284-0.15,0.496-0.299,0.672c-0.055,0.143,0.102,0.072,0.074,0.187
c-0.181,0.28-0.313,0.609-0.375,1.009h-0.112v0.561c-0.119-0.045-0.028,0.123-0.148,0.076c-0.041,0.545-0.264,0.908-0.299,1.457
c-0.122-0.045-0.028,0.121-0.149,0.076v0.408c-0.374,0.674-0.395,1.699-1.048,2.096c-0.038,0.072-0.073,0.147-0.111,0.224
c-0.457,0.017-0.776,0.169-1.196,0.224c-0.137-0.023-0.416-0.265-0.523-0.074c-0.408-0.094-0.701-0.605-1.195-0.373
c0.015,0.365-0.244,0.615-0.299,0.973c-0.036,0.24,0.053,0.486,0,0.745c-0.027,0.136-0.188,0.233-0.225,0.375
c-0.104,0.401-0.031,0.878-0.149,1.271c-0.358,0.303-0.288,1.033-0.635,1.346c-0.055,0.143,0.103,0.072,0.074,0.188
c-0.106-0.008-0.073,0.126-0.225,0.074c0.046,0.256-0.13,0.293-0.075,0.561c-0.15-0.052-0.117,0.082-0.225,0.074v0.225
c-0.173,0.125-0.25,0.347-0.335,0.563c-0.111,0.086-0.333,0.066-0.298,0.297c-0.53,0.009-1.1,0.555-1.57,0.149
c-0.141-0.04-0.108,0.094-0.226,0.075c0.046-0.119-0.12-0.028-0.075-0.15c-0.43,0.045-0.561-0.212-0.859-0.297
c-0.157-0.416-0.508-0.641-0.598-1.123c-0.483-0.451-0.687-1.182-0.935-1.868c-0.06-0.054-0.146-0.077-0.186-0.149
c-0.044-0.117,0.057-0.092,0.073-0.15c-0.287-0.299-0.308-0.861-0.523-1.232c-0.727,0.283-0.215,1.802-0.636,2.393
c-0.044,0.158,0.062,0.164,0.075,0.26c-0.385,0.338-0.072,1.375-0.448,1.721v0.447c-0.137,0.305-0.223,0.96-0.187,1.199
c-0.206,0.328-0.237,0.834-0.3,1.305c-0.261,0.126-0.223,0.551-0.223,0.936c-0.21,0.104-0.231,0.395-0.226,0.709h-0.112
c-0.072,0.441-0.37,0.652-0.373,1.16c-0.218,0.244-0.344,0.578-0.448,0.936c-0.142-0.057-0.072,0.101-0.188,0.072
c0.001,0.462-0.406,0.52-0.448,0.936c-0.421,0.166-0.464,0.709-0.897,0.861c-0.734,0.088-1.301,0.324-1.831,0
c-0.04-0.023-0.094-0.198-0.149-0.227c-0.035-0.016-0.12,0.025-0.149,0c-0.13-0.106-0.111-0.417-0.411-0.411
c0.031-0.229-0.187-0.212-0.3-0.299c0.063-0.235-0.113-0.234-0.224-0.298c-0.084-0.391-0.222-0.729-0.486-0.936
c0.001-0.338-0.182-0.489-0.374-0.635c-0.001-0.471-0.271-0.676-0.3-1.121c-0.063-0.035-0.06-0.14-0.186-0.113
c0.036-0.261-0.131-0.316-0.074-0.596c-0.321-0.315-0.261-1.013-0.598-1.311c-0.041-0.139,0.092-0.106,0.073-0.225
c-0.371-0.908-0.61-1.955-0.935-2.914c0.126-0.701-0.511-1.643-0.41-2.317c-0.209-0.376-0.145-1.028-0.375-1.383
c0.066-0.28-0.098-0.907-0.224-1.235c0.235-0.253-0.24-0.6,0-0.857c-0.261-0.631-0.22-1.354-0.411-1.908
c0.091-0.811-0.351-2.104-0.225-2.951c-0.184-0.414-0.072-1.122-0.225-1.568c-0.053-0.67,0.133-1.581-0.15-2.021
c0.051-0.259,0.094-0.354,0-0.599c-0.02-0.082,0.074-0.053,0.076-0.111c0.007-0.59,0.023-1.285-0.076-1.719
c-0.084-0.379,0.129-0.645-0.035-0.973h-0.149c-0.069,0.469-0.216,1.127-0.15,1.496c-0.342,0.652-0.066,1.926-0.374,2.617
c-0.053,0.141,0.103,0.07,0.074,0.185c-0.121-0.046-0.027,0.122-0.147,0.075v1.232c0.031,0.122-0.125,0.052-0.113,0.151
c0.123,0.411-0.136,0.779-0.149,1.122c-0.008,0.203,0.1,0.297,0.073,0.447c-0.015,0.1-0.124,0.174-0.148,0.299
c-0.122,0.664,0.046,1.445-0.224,2.057c0.135,0.152,0.053,0.518,0.075,0.785c-0.392,0.505-0.052,1.742-0.374,2.315
c-0.047,0.122,0.119,0.03,0.073,0.151c-0.214,0.223-0.138,0.732-0.148,1.156c0.033,0.122-0.124,0.052-0.112,0.152
c0.058,0.504-0.105,0.79-0.075,1.271c-0.236,0.485-0.2,1.243-0.298,1.868c-0.314,0.558-0.158,1.588-0.562,2.057
c-0.108,1.023-0.341,1.928-0.673,2.729c-0.055,0.141,0.104,0.07,0.074,0.186c-0.23,0.156-0.165,0.607-0.41,0.748
c-0.013,0.076,0.008,0.117,0.074,0.111c-0.141,0.121-0.103,0.422-0.297,0.486c0.062,0.423-0.246,0.479-0.227,0.859
c-0.128,0.021-0.171,0.131-0.148,0.301c-0.129,0.019-0.17,0.127-0.15,0.299c-0.153,0.019-0.209,0.137-0.186,0.336
c-0.234,0.289-0.578,0.469-0.748,0.822c-0.336,0.148-0.614,0.357-0.973,0.486c-0.117,0.043-0.09-0.061-0.149-0.074
c-0.396,0.347-0.857-0.041-1.31-0.076c0.007-0.105-0.125-0.072-0.072-0.223c-0.631-0.169-0.896-0.699-1.159-1.235
c-0.013-0.05-0.095-0.03-0.15-0.036c0.075-0.287-0.202-0.221-0.148-0.486c-0.077-0.049-0.194-0.058-0.263-0.113
c-0.029-0.606-0.352-0.918-0.449-1.457c-0.057-0.029-0.055-0.117-0.148-0.111c0.059-0.484-0.127-0.721-0.337-0.936
c0.036-0.47-0.12-0.751-0.298-1.009c0.087-0.609-0.405-1.255-0.375-2.057c-0.004-0.042-0.068-0.03-0.111-0.036
c-0.244-1.153-0.381-2.408-0.749-3.438c0.143-0.159,0.011-0.476-0.073-0.636c-0.28,0.19-0.496,0.448-0.485,0.934
c-0.151-0.052-0.12,0.082-0.226,0.074v0.262c-0.165-0.053-0.1,0.123-0.111,0.227c-0.129-0.005-0.095,0.152-0.225,0.147
c0.037,0.423-0.283,0.491-0.299,0.86c-0.369,0.241-0.458,0.764-0.896,0.936c-0.051,0.01-0.031,0.091-0.038,0.148
c-0.243,0.082-0.502,0.146-0.674,0.299c-0.798-0.012-1.406-0.211-1.718-0.709c-0.51-0.301-0.577-1.043-1.047-1.383
c-0.04-0.285-0.232-0.416-0.225-0.749c-0.119-0.153-0.193-0.356-0.225-0.598c-0.059-0.028-0.055-0.118-0.15-0.112
c-0.075-0.346-0.066-0.778-0.335-0.936c-0.059-0.648-0.423-0.996-0.374-1.754c-0.058-0.031-0.055-0.121-0.148-0.113
c0-0.584-0.219-0.953-0.224-1.533c-0.114-0.012-0.012-0.236-0.188-0.188c-0.021-0.914-0.118-1.748-0.074-2.729
c-0.355,0.081-0.42,0.453-0.524,0.785c-0.19-0.078-0.032,0.191-0.186,0.15c-0.021,0.514-0.309,0.763-0.374,1.232
c-0.374,0.412-0.504,1.066-0.933,1.422c-0.137,0.439-0.579,0.569-0.86,0.859c-0.124-0.012-0.232-0.008-0.263,0.074
c-0.142-0.008-0.339,0.039-0.374-0.074h-0.375c-0.988-0.443-1.348-1.52-1.531-2.766c-0.006-0.045-0.068-0.033-0.112-0.038
c-0.016-0.445-0.221-0.702-0.226-1.16c-0.275-0.282-0.283-0.835-0.372-1.308c-0.272-0.252-0.144-0.457-0.226-0.86
c-0.066-0.03-0.102-0.095-0.188-0.111c0.015-0.498-0.354-0.615-0.521-0.933c-0.281,0.064-0.397-0.164-0.562,0
c-0.151,0.051-0.117-0.082-0.224-0.076c-0.498,0.15-0.832,0.464-0.859,1.084c-0.689,0.656-0.777,1.914-1.533,2.504
c-0.138-0.057-0.29-0.121-0.411,0c-0.418-0.142-0.849-0.271-1.008-0.672c-0.013-0.051-0.094-0.031-0.15-0.038
c-0.115-0.509-0.649-0.595-0.86-1.009c-0.331-0.017-0.489-0.207-0.635-0.41c-0.162,0.012-0.303,0.001-0.299-0.151
c-0.205,0.151-0.502-0.007-0.71-0.073c-0.425,0.086-0.595,0.08-1.01,0c-0.355-0.032-0.353,0.295-0.672,0.299
c-0.04,0.209-0.325,0.173-0.337,0.412c-0.191-0.057-0.16,0.112-0.336,0.072c-0.25,0.525-0.617,0.93-1.01,1.309h-0.186
c-0.036,0.066-0.14,0.061-0.113,0.188c-0.457,0.103-0.771,0.352-1.385,0.299c-0.063-0.086-0.096-0.202-0.072-0.373
c0.272-0.039,0.517-0.105,0.785-0.151c0.137-0.298,0.688-0.183,0.746-0.558c0.175,0.047,0.126-0.127,0.299-0.076
c0.098-0.427,0.521-0.527,0.637-0.934c0.266-0.132,0.544-0.254,0.635-0.561c0.285-0.028,0.602-0.021,0.635-0.299
c0.616-0.116,1.475-0.102,2.094,0c0.086,0.188,0.33,0.217,0.599,0.223c0.127,0.195,0.326,0.322,0.561,0.411
c0.015,0.21,0.131,0.316,0.372,0.298c-0.018,0.232,0.189,0.238,0.338,0.3c0.008,0.152,0.047,0.28,0.225,0.263
c-0.087,0.41,0.385,0.264,0.373,0.598c0.582,0.218,1.041-0.094,1.121-0.598c0.165,0.053,0.1-0.127,0.225-0.111
c-0.034-0.234,0.132-0.269,0.074-0.523c0.296-0.129,0.191-0.659,0.562-0.712c-0.029-0.639,0.425-0.796,0.599-1.233h0.186
c0.052-0.01,0.03-0.092,0.037-0.148c0.485-0.094,1.088-0.105,1.57,0c-0.028,0.115,0.129,0.045,0.075,0.186
c0.249,0.039,0.222,0.352,0.447,0.411c0.087,0.538,0.449,0.798,0.413,1.459c0.11,0.089,0.113,0.284,0.223,0.374
c-0.031,0.604,0.297,0.85,0.299,1.419c0.177,0.187,0.1,0.626,0.338,0.749c-0.068,0.789,0.307,1.138,0.374,1.793
c0.236,0.25,0.483,0.488,0.708,0.748c0.952,0.154,1.299-0.299,1.722-0.672c-0.058-0.193,0.112-0.162,0.075-0.338
c0.512-0.546,0.673-1.445,1.156-2.018c-0.049-0.225,0.125-0.225,0.076-0.45c0.139,0.04,0.059-0.138,0.186-0.11
c0.04-0.704,0.567-1.223,0.674-1.982c0.17-1.201-0.067-2.618,0.074-3.85c-0.02-0.078,0.055-0.07,0.112-0.074
c0.098-0.6-0.189-1.586,0.522-1.57c-0.007,0.121,0.064,0.162,0.15,0.188c0.39,2.787,0.083,6.27,0.372,9.157
c-0.03,0.12,0.099,0.077,0.112,0.151c0.027,0.176-0.061,0.458,0.076,0.521c0.051,0.909,0.082,1.835,0.374,2.504
c-0.022,1.146,0.505,1.738,0.635,2.729c0.06,0.028,0.058,0.117,0.148,0.111c0.101,0.86,0.563,1.358,1.011,1.869
c0.479,0.122,0.92,0.187,1.458,0.075c0.148-0.088,0.172-0.303,0.411-0.299c0.064-0.619,0.738-0.632,0.709-1.347
c0.182-0.117,0.28-0.317,0.299-0.598c0.288-0.035,0.324-0.323,0.338-0.634c0.15,0.051,0.117-0.082,0.225-0.076
c0.08-0.609,0.658-1.104,0.373-1.832c0.185-0.383-0.146-0.764-0.15-1.196c-0.059-0.028-0.055-0.117-0.148-0.111
c-0.023-1.173-0.388-2.005-0.374-3.214c-0.009-0.057-0.112-0.015-0.112-0.074c0.047-0.339-0.203-0.639,0-0.896
c0.188-0.053,0.197,0.076,0.336,0.072c0.127,0.889,0.22,1.774,0.449,2.653c0.031,0.12-0.033,0.144-0.074,0.188
c0.289,0.459,0.088,1.406,0.561,1.682c0.193-0.092,0.223-0.352,0.225-0.635c0.188-0.087,0.217-0.333,0.223-0.598
c0.381-0.232,0.301-0.922,0.637-1.197v-0.672c0.124,0.012,0.041-0.184,0.225-0.113c-0.023-0.385,0.184-0.537,0.147-0.934
c0.442-0.156,0.2-0.998,0.563-1.232V69.19c0.267-0.254,0.268-0.78,0.298-1.27c0.121,0.045,0.028-0.121,0.151-0.074
c0.181-0.742,0.366-1.477,0.559-2.207c-0.044-0.194,0.033-0.354,0.15-0.746c0.056-0.191,0.127-0.381,0.225-0.486
c-0.115-0.11-0.066-0.383-0.076-0.598c0.45-0.326-0.141-0.662-0.073-1.16c-0.15-0.123-0.223-0.326-0.226-0.598
c-0.378-0.592-0.541-1.4-0.71-2.205c-0.059-0.027-0.055-0.119-0.148-0.113c-0.155-0.781-0.314-1.508-0.449-2.129
c0.075-0.275,0.009-0.523-0.187-0.711c-0.023-0.176,0.063-0.461-0.076-0.523c0.25-0.281-0.223-0.621,0-0.859
c-0.312-0.911-0.057-2.385-0.297-3.365c0.225-0.358,0.044-1.127,0.297-1.457c0.028-0.115-0.126-0.045-0.074-0.188
c0.036-0.202,0.163-0.309,0.15-0.561c0.151-0.346,0.392-0.604,0.486-1.008c0.36-0.427,0.81-0.762,1.72-0.638
c0.053,0.147,0.199,0.202,0.371,0.226c-0.036,0.313,0.246,0.305,0.227,0.598c0.062,0.051,0.078,0.145,0.224,0.113
c-0.181,0.61,0.353,0.879,0.338,1.27c-0.006,0.129-0.089,0.43,0.073,0.676c-0.102,0.657-0.182,1.916,0.074,2.465
c-0.026,0.254-0.068,0.18,0,0.41c-0.32,0.453-0.005,1.539-0.224,2.094c0.177,0.404-0.064,1.178-0.149,1.758
c0.193,0.232-0.078,0.475,0,0.821c0.033,0.12-0.123,0.052-0.112,0.151c0.019,1.264-0.354,2.135-0.45,3.288
c0.045,0.38,0.314,0.534,0.3,0.973c0.179,0.022,0.068,0.329,0.263,0.335v0.299c0.393,0.643,0.715,1.353,0.936,2.17
c0.058,0.027,0.055,0.117,0.147,0.111c-0.001,0.437,0.164,0.709,0.374,0.934c0.046,0.123-0.119,0.029-0.074,0.15
c0.291,0.243,0.12,0.949,0.486,1.121c-0.022,0.17,0.02,0.277,0.148,0.299c-0.043,0.367,0.195,0.455,0.15,0.822
c0.338,0.285,0.17,1.076,0.522,1.346c0.006,0.092,0.009,0.182-0.073,0.188c0.079,0.033,0.064,0.158,0.186,0.147
c-0.175,0.099,0.086,0.335,0.15,0.638c0.104,0.494,0.227,1.132,0.375,1.494c0.035,0.475,0.141,0.879,0.297,1.234v0.261
c0.128,0.071,0.06,0.339,0.188,0.411c-0.135,0.146,0.02,0.418,0.074,0.598c0.033,0.117-0.031,0.141-0.074,0.188
c0.233,0.668,0.172,1.48,0.373,2.02c-0.254,0.432,0.251,1.068,0,1.494c0.11,0.348-0.185,1.26,0.188,1.533
c0.312-0.144,0.179-0.501,0.224-0.787c0.086-0.561,0.348-1.217,0.224-1.941c0.263-0.088,0.131-0.568,0.3-0.749
c-0.053-0.362-0.078-0.784,0-1.345c0.021-0.145,0.127-0.268,0.149-0.412c0.035-0.227-0.029-0.47,0-0.672
c0.009-0.064,0.092-0.092,0.111-0.15c0.115-0.369,0.007-1.049,0.076-1.57c0.056-0.434,0.299-0.668,0.073-0.936
c0.303-0.682,0.103-1.52,0.224-2.277c0.022-0.127,0.131-0.232,0.15-0.338c0.159-0.826,0.017-1.697,0.15-2.542
c0.052-0.085,0.073-0.2,0.148-0.261c0.046-0.156-0.06-0.164-0.075-0.261c0.398-0.599,0.099-1.896,0.413-2.579
c0.05-0.152-0.081-0.117-0.075-0.227c0.217-0.795,0.262-1.777,0.447-2.539c0.066-0.266-0.048-0.598,0-0.896
c0.03-0.193,0.1-0.342,0.15-0.637c0.072-0.423,0.101-0.807,0.075-1.232c-0.019-0.081,0.054-0.072,0.111-0.076
c0.007-0.342-0.052-0.75,0.149-0.896c0.022-0.121-0.012-0.188-0.076-0.225c0.233-0.59,0.189-1.457,0.45-2.02
c0.021-0.082-0.071-0.052-0.075-0.111c0.11-0.129,0.052-0.424,0.225-0.485c0.045-0.122-0.121-0.03-0.074-0.152
c0.296-0.868,0.416-1.641,0.56-2.728c0.06-0.441,0.148-0.905,0.374-1.233c-0.061-0.47,0.159-0.663,0.15-1.083
c0.135,0.035,0.017-0.183,0.188-0.112c0.018-0.544,0.229-0.892,0.447-1.231c0.047-0.123-0.122-0.031-0.075-0.151
c0.144-0.22,0.267-0.454,0.3-0.785c0.145-0.142,0.324-0.247,0.337-0.524c0.325-0.121,0.352-0.543,0.709-0.636
c0.042-0.082,0.18-0.067,0.15-0.223c0.406-0.067,0.858-0.136,1.233-0.075c0.223,0.035,0.573,0.216,0.748,0.373
c0.025,0.025-0.021,0.123,0,0.15c0.025,0.032,0.368,0.23,0.41,0.301c0.05,0.082,0.018,0.246,0.075,0.297
C670.346,46.989,670.422,47.024,670.498,47.063 M656.743,54.724c0.049-0.147-0.124-0.074-0.077-0.223
c0.255-0.73-0.19-1.725,0.077-2.468c-0.115-0.481-0.127-1.067-0.15-1.646c-0.092-0.059-0.105-0.189-0.188-0.26
c0.035-0.658-0.303-0.942-0.597-1.271c-0.2,0.05-0.245-0.054-0.338-0.112c-0.237,0.084-0.489,0.158-0.56,0.41
c-0.297-0.047-0.187,0.312-0.449,0.3v0.335c-0.119-0.047-0.028,0.122-0.148,0.075c-0.279,1.081-0.101,2.618-0.149,3.926
c0.127,0.216,0.226,0.671,0.075,0.933c0.23,0.232,0.066,0.857,0.297,1.088c-0.199,0.506,0.248,0.992,0.151,1.643
c0.27,0.578,0.447,1.246,0.56,1.982c0.094-0.008,0.092,0.082,0.15,0.113c0.102,0.543,0.167,1.126,0.486,1.457
c0.335-0.131,0.148-0.518,0.224-0.898c0.022-0.124,0.13-0.215,0.147-0.336c0.086-0.523,0-1.125,0.15-1.607
c-0.075-0.225,0.009-0.156,0-0.449c0.117,0.029,0.047-0.129,0.188-0.075c-0.06-0.376,0.129-0.999-0.111-1.195
C656.829,56.134,656.542,55.186,656.743,54.724 M682.645,64.145c-0.103-0.731-0.334-1.336-0.375-2.13
c-0.094,0.008-0.09-0.082-0.149-0.113c0.044-0.365-0.192-0.453-0.149-0.822c-0.211,0.001-0.088-0.334-0.261-0.373
c-0.169-0.977-0.565-1.727-1.159-2.28c-0.662-0.2-1.025,0.253-1.42,0.636c-0.104,0.103-0.061,0.219-0.15,0.375
c-0.061,0.107-0.238,0.195-0.299,0.297c-0.051,0.092-0.029,0.236-0.075,0.338c-0.118,0.26-0.356,0.6-0.3,0.936h-0.111
c0.035,0.419-0.297,0.473-0.224,0.934c-0.122-0.047-0.028,0.121-0.149,0.074c-0.076,0.797-0.395,1.352-0.449,2.168h-0.113
c-0.053,0.143,0.168,0.008,0.113,0.149c-0.387,0.614-0.206,1.406-0.336,2.243c-0.02,0.141-0.13,0.268-0.149,0.41
c-0.121,0.889-0.067,1.533-0.149,2.393c-0.074,0.76-0.075,1.703,0,2.467c0.05,0.502,0.059,0.988,0.226,1.42
c0.054,0.143-0.104,0.072-0.076,0.188c0.085,0.107,0.085,0.191,0,0.301c0.304,0.492,0.095,1.5,0.485,1.904
c0.034,0.9,0.26,1.611,0.525,2.28c-0.019,0.059-0.119,0.03-0.076,0.151c0.234,0.371,0.275,0.54,0.373,0.969
c0.127-0.025,0.123,0.078,0.188,0.113c0.059,0.518,0.407,0.738,0.523,1.195c0.179,0.072,0.373,0.126,0.41,0.337
c0.234-0.06,0.234,0.116,0.375,0.151c0.121,0.043,0.028-0.123,0.15-0.076c0.381,0.229,0.884-0.059,1.269-0.075
c0.039-0.075,0.073-0.152,0.114-0.224h0.188c0.123-0.375,0.417-0.58,0.483-1.008c0.166,0.053,0.099-0.128,0.225-0.113
c0.035-0.701,0.427-1.046,0.449-1.758c0.134,0.037,0.017-0.182,0.188-0.111c0.157-0.513-0.043-0.923,0.298-1.234
c0.08-0.459-0.021-1.143,0.299-1.531c0.035-0.149-0.129-0.096-0.074-0.262c0.125-0.127,0.061-0.438,0.074-0.674
c-0.646-1.424-0.271-4.177-0.373-5.905c-0.004-0.059-0.096-0.03-0.075-0.112c0.024-1.059-0.052-2.016-0.148-2.953
c-0.044-0.005-0.106,0.007-0.112-0.037v-0.448c0.057-0.005,0.129,0.005,0.112-0.075C682.815,64.124,682.628,64.233,682.645,64.145
M656.254,65.116c-0.207-0.078-0.154-0.553-0.485-0.299c-0.229,0.563-0.266,1.713-0.598,2.168c0.15,0.464-0.19,0.943-0.335,1.346
c-0.012,0.1,0.145,0.031,0.111,0.149h-0.111c-0.076,0.562-0.363,0.907-0.375,1.534c-0.119-0.047-0.028,0.121-0.148,0.074
c-0.021,0.29-0.119,0.504-0.074,0.859c-0.152-0.051-0.119,0.08-0.227,0.074c-0.031,0.118,0.09,0.387-0.111,0.336
c-0.063,0.672-0.283,1.188-0.522,1.682c0.003,0.061,0.097,0.029,0.075,0.113c-0.23-0.029-0.064,0.337-0.225,0.373
c-0.029,0.115,0.129,0.045,0.075,0.186c-0.193,0.07-0.132,0.394-0.338,0.451c-0.077,0.508-0.216,0.953-0.522,1.231
c0.015,0.495-0.412,0.938-0.074,1.308c-0.207,0.088,0.145,0.356-0.076,0.488c0.211,0.5,0.336,1.082,0.375,1.755
c0.078,0.628,0.303,0.837,0.298,1.646c0.139,0,0.089,0.188,0.188,0.225c-0.019,0.615,0.201,0.995,0.3,1.496
c-0.113-0.038-0.067,0.082-0.075,0.149c0.346,0.287,0.168,1.101,0.522,1.382v0.262c0.313,0.41,0.313,1.133,0.635,1.532
c0.043,0.433,0.186,0.762,0.412,1.008v0.3c0.182,0.117,0.28,0.318,0.299,0.598c0.244,0.045,0.27,0.305,0.3,0.563
c0.181,0.055,0.304,0.169,0.337,0.373c0.172-0.049,0.124,0.124,0.299,0.076c-0.057,0.165,0.108,0.113,0.072,0.26
c0.48,0.088,1.383,0.201,1.609-0.188c0.392-0.057,0.498-0.396,0.822-0.521c-0.011-0.195,0.016-0.359,0.225-0.336
c-0.071-0.295,0.219-0.23,0.148-0.523c0.151,0.051,0.118-0.082,0.226-0.076c0.243-0.754,0.462-1.529,0.783-2.205
c-0.014-0.096-0.12-0.102-0.074-0.26c0.321-0.49,0.203-1.418,0.225-2.208c0.411-0.235-0.083-0.942,0.188-1.345
c-0.043-0.199-0.1-0.23,0-0.41c-0.403-0.508-0.092-1.727-0.412-2.318c0.238-0.25-0.239-0.573,0-0.822
c-0.341-0.621-0.182-1.42-0.522-2.055c0.067,0.004,0.086-0.035,0.074-0.111c-0.223-0.937-0.6-1.721-0.635-2.842
c-0.28-0.219-0.202-0.796-0.45-1.048c0.019-0.058,0.118-0.03,0.075-0.147c-0.205-0.133-0.273-0.4-0.262-0.749
c-0.264-0.246-0.287-0.733-0.374-1.157c-0.271-0.238-0.197-0.824-0.485-1.047c0.006-0.293-0.07-0.504-0.225-0.637
c-0.015-0.271-0.063-0.511-0.224-0.635c0.017-0.059,0.118-0.033,0.076-0.15c-0.227-0.248-0.386-0.563-0.377-1.045
C656.389,65.831,656.267,65.53,656.254,65.116"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#231F20" d="M644.856,61.454c-0.442-0.082-0.246-0.801-0.522-1.048v-1.195
c0.231-0.341,0.116-1.028,0.484-1.231c0.345,0.203,0.276,0.82,0.449,1.195C645.286,60.089,645.277,60.976,644.856,61.454"/>
</g>
<g>
<path fill="#FFFFFF" d="M91.251,84.491c0,1.907-0.953,2.86-2.86,2.86h-16.72c-0.917,0-1.622-0.284-2.118-0.853
c-0.495-0.568-0.742-1.237-0.742-2.008c0-0.77,0.256-1.439,0.77-2.007c0.514-0.568,1.21-0.853,2.09-0.853h5.5v-28.93h-4.895
c-0.88,0-1.577-0.284-2.09-0.853s-0.77-1.237-0.77-2.008c0-0.77,0.257-1.439,0.77-2.007c0.513-0.568,1.209-0.852,2.09-0.852h15.51
c0.917,0,1.622,0.284,2.117,0.852c0.495,0.568,0.742,1.238,0.742,2.007c0,0.771-0.247,1.439-0.742,2.008s-1.201,0.853-2.117,0.853
h-4.895v28.93h5.5C90.298,81.631,91.251,82.584,91.251,84.491z"/>
<path fill="#FFFFFF" d="M126.451,84.491c0,1.907-0.954,2.86-2.86,2.86h-2.365c-1.21,0-2.072-0.532-2.585-1.595l-13.64-28.435v27.61
c0,0.88-0.275,1.577-0.825,2.09c-0.55,0.514-1.21,0.77-1.98,0.77s-1.448-0.247-2.035-0.742c-0.587-0.496-0.88-1.201-0.88-2.118
v-35.09c0-1.906,0.954-2.859,2.86-2.859h2.145c1.209,0,2.072,0.532,2.584,1.595l13.86,28.875v-28.05c0-0.88,0.284-1.576,0.852-2.09
c0.569-0.513,1.238-0.77,2.008-0.77c0.771,0,1.439,0.257,2.008,0.77c0.568,0.514,0.853,1.21,0.853,2.09V84.491z"/>
<path fill="#FFFFFF" d="M160.936,49.841c0,0.771-0.248,1.439-0.743,2.008c-0.495,0.568-1.201,0.853-2.117,0.853h-8.415v32.23
c0,0.917-0.284,1.622-0.853,2.118c-0.568,0.495-1.237,0.742-2.007,0.742s-1.439-0.247-2.007-0.742
c-0.568-0.496-0.853-1.201-0.853-2.118v-32.23h-8.965c-0.88,0-1.577-0.284-2.09-0.853c-0.514-0.568-0.771-1.237-0.771-2.008
c0-0.77,0.257-1.439,0.771-2.007c0.513-0.568,1.209-0.852,2.09-0.852h23.1c0.916,0,1.623,0.284,2.117,0.852
C160.688,48.402,160.936,49.071,160.936,49.841z"/>
<path fill="#FFFFFF" d="M190.251,84.491c0,0.771-0.257,1.439-0.77,2.008c-0.514,0.568-1.21,0.853-2.09,0.853h-20.79v-40.37h19.03
c1.907,0,2.86,0.953,2.86,2.859c0,0.771-0.256,1.439-0.77,2.008c-0.513,0.568-1.209,0.853-2.09,0.853h-13.31v10.78h9.46
c0.88,0,1.577,0.284,2.09,0.852c0.514,0.568,0.771,1.238,0.771,2.007c0,0.771-0.248,1.439-0.743,2.008
c-0.495,0.568-1.201,0.853-2.118,0.853h-9.46v12.43h15.07c0.843,0,1.531,0.284,2.063,0.853
C189.985,83.052,190.251,83.721,190.251,84.491z"/>
<path fill="#FFFFFF" d="M218.026,65.957V54.461c-2.347-1.173-4.84-1.76-7.48-1.76h-7.425v15.07h7.425
C213.112,67.771,215.606,67.166,218.026,65.957z M224.901,84.766c0,0.88-0.293,1.604-0.88,2.172
c-0.586,0.568-1.283,0.853-2.09,0.853c-0.733,0-1.393-0.293-1.979-0.88l-13.365-13.42h-3.465v11.44
c0,0.917-0.284,1.622-0.853,2.118c-0.568,0.495-1.237,0.742-2.007,0.742c-0.771,0-1.439-0.247-2.008-0.742
c-0.568-0.496-0.853-1.201-0.853-2.118v-37.95h13.145c4.95,0,9.35,1.412,13.2,4.234v18.041c-2.676,1.907-5.812,3.19-9.405,3.85
l9.68,9.734C224.608,83.428,224.901,84.069,224.901,84.766z"/>
<path fill="#FFFFFF" d="M260.541,83.171c-4.217,3.006-8.727,4.51-13.53,4.51c-4.914,0-9.442-1.503-13.585-4.51v-6.05
c0-0.88,0.284-1.577,0.853-2.09s1.238-0.77,2.008-0.77c0.77,0,1.439,0.257,2.007,0.77c0.568,0.513,0.852,1.209,0.852,2.09v2.97
c2.494,1.247,5.116,1.87,7.866,1.87c2.787,0,5.39-0.623,7.81-1.87v-7.865l-20.515-6.38V51.161c3.96-3.007,8.195-4.51,12.706-4.51
c4.62,0,8.873,1.503,12.759,4.51v5.665c0,0.917-0.284,1.623-0.852,2.118c-0.568,0.495-1.238,0.742-2.007,0.742
c-0.77,0-1.439-0.248-2.008-0.742c-0.568-0.495-0.853-1.201-0.853-2.118v-2.585c-2.273-1.247-4.62-1.87-7.04-1.87
c-2.457,0-4.785,0.623-6.985,1.87v7.371l20.515,6.379V83.171z"/>
<path fill="#FFFFFF" d="M291.671,66.671v-12.1c-2.494-1.247-5.115-1.87-7.865-1.87h-7.37v15.84h7.37
C286.556,68.541,289.177,67.918,291.671,66.671z M297.391,69.751c-4.144,3.007-8.672,4.51-13.585,4.51h-7.37v10.67
c0,0.917-0.284,1.622-0.853,2.118c-0.568,0.495-1.237,0.742-2.007,0.742c-0.771,0-1.439-0.247-2.008-0.742
c-0.568-0.496-0.853-1.201-0.853-2.118v-37.95h13.09c4.914,0,9.442,1.503,13.585,4.509V69.751z"/>
<path fill="#FFFFFF" d="M329.071,84.491c0,0.771-0.256,1.439-0.77,2.008c-0.513,0.568-1.21,0.853-2.09,0.853h-20.79v-40.37h19.03
c1.907,0,2.86,0.953,2.86,2.859c0,0.771-0.257,1.439-0.77,2.008c-0.514,0.568-1.21,0.853-2.09,0.853h-13.31v10.78h9.46
c0.88,0,1.576,0.284,2.09,0.852c0.514,0.568,0.77,1.238,0.77,2.007c0,0.771-0.248,1.439-0.743,2.008
c-0.495,0.568-1.201,0.853-2.117,0.853h-9.46v12.43h15.07c0.843,0,1.531,0.284,2.063,0.853
C328.805,83.052,329.071,83.721,329.071,84.491z"/>
<path fill="#FFFFFF" d="M359.871,84.491c0,0.771-0.257,1.439-0.77,2.008c-0.513,0.568-1.209,0.853-2.089,0.853h-20.791v-40.37
h19.03c1.907,0,2.861,0.953,2.861,2.859c0,0.771-0.257,1.439-0.771,2.008c-0.513,0.568-1.209,0.853-2.09,0.853h-13.31v10.78h9.459
c0.881,0,1.577,0.284,2.09,0.852c0.514,0.568,0.771,1.238,0.771,2.007c0,0.771-0.248,1.439-0.742,2.008
c-0.496,0.568-1.201,0.853-2.119,0.853h-9.459v12.43h15.07c0.843,0,1.531,0.284,2.063,0.853
C359.605,83.052,359.871,83.721,359.871,84.491z"/>
<path fill="#FFFFFF" d="M394.136,83.171c-4.144,3.006-8.672,4.51-13.586,4.51c-4.803,0-9.312-1.503-13.53-4.51v-32.01
c4.218-3.007,8.727-4.51,13.53-4.51c4.914,0,9.442,1.503,13.586,4.51v6.16c0,0.917-0.284,1.623-0.853,2.117
c-0.568,0.495-1.238,0.743-2.008,0.743s-1.439-0.248-2.007-0.743c-0.568-0.495-0.853-1.2-0.853-2.117v-3.081
c-2.494-1.247-5.116-1.87-7.866-1.87c-2.786,0-5.389,0.623-7.81,1.87v25.85c2.42,1.247,5.023,1.87,7.81,1.87
c2.75,0,5.373-0.623,7.866-1.87v-3.465c0-0.917,0.284-1.623,0.853-2.117c0.567-0.495,1.237-0.743,2.007-0.743
s1.439,0.248,2.008,0.743c0.568,0.495,0.853,1.201,0.853,2.117V83.171z"/>
<path fill="#FFFFFF" d="M432.032,84.931c0,0.917-0.285,1.622-0.854,2.118c-0.568,0.495-1.237,0.742-2.008,0.742
c-1.906,0-2.859-0.953-2.859-2.86V69.696h-16.5v15.235c0,0.88-0.285,1.577-0.854,2.09c-0.567,0.514-1.236,0.77-2.007,0.77
c-1.907,0-2.86-0.953-2.86-2.86v-35.53c0-1.907,0.953-2.86,2.86-2.86c0.771,0,1.439,0.257,2.007,0.77
c0.568,0.514,0.854,1.21,0.854,2.09v14.575h16.5V49.401c0-0.88,0.283-1.576,0.852-2.09c0.568-0.513,1.238-0.77,2.008-0.77
c0.771,0,1.439,0.248,2.008,0.743c0.568,0.495,0.854,1.201,0.854,2.117V84.931z"/>
<path fill="#FFFFFF" d="M492.641,84.491c0,0.771-0.258,1.439-0.77,2.008c-0.514,0.568-1.211,0.853-2.09,0.853h-24.199v-8.25
c0-1.65,0.402-2.86,1.209-3.63l19.305-18.315v-2.916c-2.201-1.247-4.527-1.87-6.984-1.87c-3.154,0-6.289,0.898-9.404,2.695
c-0.588,0.33-1.176,0.495-1.762,0.495c-0.807,0-1.457-0.275-1.951-0.825c-0.496-0.55-0.744-1.21-0.744-1.98
c0-1.027,0.531-1.87,1.596-2.53c3.924-2.383,8.012-3.575,12.266-3.575c4.51,0,8.746,1.503,12.705,4.51v6.105
c0,1.577-0.533,2.878-1.596,3.905l-18.92,17.985v2.475h18.48c0.879,0,1.576,0.284,2.09,0.853
C492.383,83.052,492.641,83.721,492.641,84.491z"/>
<path fill="#FFFFFF" d="M521.516,80.091v-25.85c-2.271-1.247-4.621-1.87-7.039-1.87c-2.459,0-4.785,0.623-6.984,1.87v25.85
c2.199,1.247,4.525,1.87,6.984,1.87C516.895,81.961,519.245,81.338,521.516,80.091z M527.237,83.171
c-3.887,3.006-8.141,4.51-12.76,4.51c-4.51,0-8.744-1.503-12.707-4.51v-32.01c3.963-3.007,8.197-4.51,12.707-4.51
c4.619,0,8.873,1.503,12.76,4.51V83.171z"/>
<path fill="#FFFFFF" d="M558.807,84.491c0,0.771-0.248,1.439-0.742,2.008s-1.201,0.853-2.119,0.853h-16.5
c-0.879,0-1.576-0.284-2.088-0.853c-0.514-0.568-0.771-1.237-0.771-2.008c0-0.77,0.258-1.439,0.771-2.007
c0.512-0.568,1.209-0.853,2.088-0.853h6.215v-26.51h-5.719c-0.881,0-1.576-0.284-2.09-0.852c-0.514-0.568-0.77-1.238-0.77-2.008
c0-0.771,0.264-1.439,0.797-2.008c0.531-0.568,1.219-0.853,2.063-0.853h5.719c0-0.88,0.283-1.576,0.854-2.09
c0.566-0.513,1.238-0.77,2.006-0.77c1.908,0,2.863,0.953,2.863,2.86v32.23h4.563c0.881,0,1.576,0.284,2.09,0.853
C558.549,83.052,558.807,83.721,558.807,84.491z"/>
<path fill="#FFFFFF" d="M588.122,56.606c0,0.99-0.184,1.815-0.551,2.475l-15.125,27.225c-0.551,0.99-1.338,1.485-2.363,1.485
c-0.844,0-1.551-0.284-2.119-0.853s-0.852-1.256-0.852-2.063c0-0.403,0.129-0.843,0.385-1.32l14.904-26.95v-3.905h-17.434
c-0.881,0-1.578-0.284-2.09-0.853c-0.514-0.568-0.77-1.237-0.77-2.008c0-0.77,0.256-1.439,0.77-2.007
c0.512-0.568,1.209-0.852,2.09-0.852h23.154V56.606z"/>
<path fill="#231F20" d="M77.615,109.606c-1.518,1.083-3.142,1.624-4.871,1.624c-1.769,0-3.398-0.541-4.89-1.624v-2.178
c0-0.316,0.102-0.567,0.307-0.752c0.205-0.186,0.446-0.278,0.723-0.278c0.277,0,0.518,0.092,0.722,0.278
c0.205,0.185,0.307,0.436,0.307,0.752v1.069c0.897,0.449,1.841,0.673,2.831,0.673c1.003,0,1.941-0.224,2.812-0.673v-2.832
l-7.385-2.297v-5.286c1.425-1.083,2.95-1.624,4.573-1.624c1.664,0,3.195,0.541,4.594,1.624v2.04c0,0.33-0.103,0.583-0.307,0.762
c-0.205,0.178-0.445,0.267-0.723,0.267s-0.518-0.089-0.723-0.267c-0.205-0.179-0.307-0.433-0.307-0.762v-0.931
c-0.818-0.449-1.663-0.673-2.535-0.673c-0.884,0-1.722,0.224-2.514,0.673v2.653l7.385,2.297V109.606z"/>
<path fill="#231F20" d="M83.536,110.24c0,0.317-0.099,0.568-0.297,0.752c-0.198,0.185-0.436,0.277-0.712,0.277
c-0.277,0-0.522-0.089-0.733-0.268c-0.211-0.178-0.317-0.432-0.317-0.762v-8.731c0-0.317,0.103-0.568,0.307-0.752
c0.205-0.185,0.445-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752V110.24z
M83.931,98.419c0,0.462-0.237,0.693-0.712,0.693h-1.505c-0.475,0-0.712-0.231-0.712-0.693v-1.148c0-0.462,0.237-0.693,0.712-0.693
h1.505c0.475,0,0.712,0.231,0.712,0.693V98.419z"/>
<path fill="#231F20" d="M94.248,101.667c0,0.277-0.092,0.519-0.278,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-1.98v7.543
c0,0.317-0.102,0.568-0.307,0.752c-0.205,0.185-0.446,0.277-0.723,0.277c-0.277,0-0.518-0.092-0.723-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.979c-0.317,0-0.568-0.103-0.753-0.307c-0.185-0.205-0.277-0.446-0.277-0.723
c0-0.277,0.092-0.518,0.277-0.723c0.185-0.204,0.436-0.307,0.753-0.307h1.979v-2.178c0-0.317,0.103-0.568,0.307-0.752
c0.205-0.185,0.445-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.98
c0.316,0,0.567,0.103,0.752,0.307C94.155,101.148,94.248,101.39,94.248,101.667z"/>
<path fill="#231F20" d="M104.207,110.082c-1.003,0.766-2.271,1.148-3.802,1.148s-2.798-0.383-3.801-1.148v-8.573
c0-0.317,0.103-0.568,0.307-0.752c0.205-0.185,0.445-0.277,0.722-0.277c0.277,0,0.519,0.092,0.723,0.277
c0.205,0.185,0.307,0.436,0.307,0.752v7.267c0.62,0.264,1.202,0.396,1.742,0.396c0.568,0,1.148-0.131,1.743-0.396v-7.267
c0-0.317,0.103-0.568,0.307-0.752c0.205-0.185,0.445-0.277,0.722-0.277c0.277,0,0.519,0.092,0.723,0.277
c0.205,0.185,0.307,0.436,0.307,0.752V110.082z"/>
<path fill="#231F20" d="M113.473,108.775v-1.465c-0.62-0.264-1.201-0.396-1.742-0.396c-0.567,0-1.148,0.132-1.742,0.396v1.465
c0.594,0.264,1.175,0.396,1.742,0.396C112.272,109.17,112.853,109.039,113.473,108.775z M115.532,110.082
c-1.003,0.766-2.271,1.148-3.802,1.148c-1.531,0-2.798-0.383-3.801-1.148v-4.079c1.003-0.765,2.271-1.148,3.801-1.148
c0.634,0,1.214,0.06,1.742,0.178v-2.06c-0.62-0.264-1.201-0.396-1.742-0.396c-0.567,0-1.148,0.131-1.742,0.396v0.099
c0,0.33-0.102,0.587-0.307,0.772c-0.205,0.185-0.445,0.277-0.722,0.277c-0.278,0-0.519-0.092-0.723-0.277
c-0.205-0.185-0.307-0.442-0.307-0.772v-1.406c1.003-0.765,2.271-1.148,3.801-1.148c1.531,0,2.799,0.383,3.802,1.148V110.082z"/>
<path fill="#231F20" d="M125.967,101.667c0,0.277-0.093,0.519-0.277,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-1.98v7.543
c0,0.317-0.102,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277c-0.277,0-0.518-0.092-0.722-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.98c-0.317,0-0.568-0.103-0.752-0.307c-0.185-0.205-0.277-0.446-0.277-0.723
c0-0.277,0.093-0.518,0.277-0.723c0.185-0.204,0.436-0.307,0.752-0.307h1.98v-2.178c0-0.317,0.103-0.568,0.307-0.752
c0.205-0.185,0.445-0.277,0.722-0.277c0.278,0,0.519,0.092,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.98
c0.317,0,0.568,0.103,0.752,0.307C125.875,101.148,125.967,101.39,125.967,101.667z"/>
<path fill="#231F20" d="M133.867,104.458v-1.485c-0.594-0.264-1.174-0.396-1.742-0.396c-0.541,0-1.122,0.131-1.742,0.396v1.881
h1.742C132.692,104.854,133.273,104.722,133.867,104.458z M135.926,110.082c-1.003,0.766-2.271,1.148-3.802,1.148
c-1.531,0-2.798-0.383-3.802-1.148v-8.415c1.003-0.765,2.271-1.148,3.802-1.148c1.531,0,2.798,0.383,3.802,1.148v4.099
c-1.003,0.766-2.271,1.148-3.802,1.148h-1.742v1.861c0.62,0.264,1.201,0.396,1.742,0.396c0.568,0,1.148-0.131,1.742-0.396v-0.099
c0-0.33,0.103-0.584,0.308-0.762c0.205-0.178,0.445-0.268,0.722-0.268s0.519,0.089,0.723,0.268
c0.205,0.178,0.307,0.432,0.307,0.762V110.082z"/>
<path fill="#231F20" d="M145.193,108.775v-5.802c-0.62-0.264-1.202-0.396-1.742-0.396c-0.568,0-1.149,0.131-1.743,0.396v5.802
c0.594,0.264,1.175,0.396,1.743,0.396C143.991,109.17,144.573,109.039,145.193,108.775z M147.251,110.082
c-1.002,0.766-2.27,1.148-3.801,1.148c-1.531,0-2.799-0.383-3.802-1.148v-8.415c1.003-0.765,2.271-1.148,3.802-1.148
c0.633,0,1.214,0.066,1.742,0.198v-3.267c0-0.317,0.102-0.568,0.307-0.752c0.205-0.185,0.445-0.277,0.723-0.277
c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752V110.082z"/>
<path fill="#FFFFFF" d="M161.705,110.24c0,0.317-0.099,0.568-0.297,0.752c-0.198,0.185-0.436,0.277-0.713,0.277
c-0.277,0-0.521-0.089-0.732-0.268c-0.211-0.178-0.316-0.432-0.316-0.762v-8.731c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752V110.24z
M162.101,98.419c0,0.462-0.238,0.693-0.713,0.693h-1.505c-0.475,0-0.713-0.231-0.713-0.693v-1.148
c0-0.462,0.238-0.693,0.713-0.693h1.505c0.475,0,0.713,0.231,0.713,0.693V98.419z"/>
<path fill="#FFFFFF" d="M173.307,110.24c0,0.317-0.103,0.568-0.307,0.752c-0.204,0.185-0.445,0.277-0.722,0.277
c-0.277,0-0.519-0.092-0.723-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-7.267c-0.594-0.264-1.175-0.396-1.742-0.396
c-0.542,0-1.122,0.131-1.743,0.396v7.267c0,0.317-0.103,0.568-0.307,0.752c-0.205,0.185-0.446,0.277-0.723,0.277
c-0.277,0-0.519-0.092-0.723-0.277c-0.204-0.185-0.307-0.436-0.307-0.752v-8.573c1.003-0.765,2.271-1.148,3.802-1.148
s2.798,0.383,3.801,1.148V110.24z"/>
<path fill="#FFFFFF" d="M183.742,101.667c0,0.277-0.092,0.519-0.277,0.723c-0.185,0.204-0.436,0.307-0.753,0.307h-1.979v7.543
c0,0.317-0.103,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277c-0.277,0-0.518-0.092-0.723-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.979c-0.317,0-0.568-0.103-0.752-0.307c-0.185-0.205-0.277-0.446-0.277-0.723
c0-0.277,0.092-0.518,0.277-0.723c0.185-0.204,0.436-0.307,0.752-0.307h1.979v-2.178c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.979
c0.317,0,0.568,0.103,0.753,0.307C183.649,101.148,183.742,101.39,183.742,101.667z"/>
<path fill="#FFFFFF" d="M191.642,104.458v-1.485c-0.594-0.264-1.175-0.396-1.742-0.396c-0.542,0-1.122,0.131-1.743,0.396v1.881
h1.743C190.467,104.854,191.048,104.722,191.642,104.458z M193.701,110.082c-1.003,0.766-2.27,1.148-3.801,1.148
s-2.798-0.383-3.802-1.148v-8.415c1.003-0.765,2.271-1.148,3.802-1.148s2.798,0.383,3.801,1.148v4.099
c-1.003,0.766-2.27,1.148-3.801,1.148h-1.743v1.861c0.621,0.264,1.201,0.396,1.743,0.396c0.567,0,1.148-0.131,1.742-0.396v-0.099
c0-0.33,0.102-0.584,0.307-0.762c0.205-0.178,0.446-0.268,0.723-0.268c0.277,0,0.518,0.089,0.722,0.268
c0.205,0.178,0.307,0.432,0.307,0.762V110.082z"/>
<path fill="#FFFFFF" d="M204.631,104.043c0,0.316-0.103,0.567-0.307,0.752c-0.205,0.185-0.445,0.277-0.722,0.277
c-0.277,0-0.519-0.093-0.723-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-1.148c-0.542-0.212-1.056-0.317-1.545-0.317
c-0.488,0-1.002,0.105-1.544,0.317v7.345c0,0.317-0.102,0.568-0.307,0.752c-0.205,0.185-0.446,0.277-0.723,0.277
c-0.277,0-0.518-0.092-0.722-0.277c-0.205-0.185-0.308-0.436-0.308-0.752v-8.712c0.964-0.673,2.166-1.01,3.604-1.01
c1.439,0,2.641,0.336,3.604,1.01V104.043z"/>
<path fill="#231F20" d="M212.689,108.775v-1.465c-0.621-0.264-1.201-0.396-1.743-0.396c-0.568,0-1.148,0.132-1.742,0.396v1.465
c0.594,0.264,1.174,0.396,1.742,0.396C211.488,109.17,212.068,109.039,212.689,108.775z M214.748,110.082
c-1.003,0.766-2.271,1.148-3.802,1.148s-2.798-0.383-3.802-1.148v-4.079c1.003-0.765,2.271-1.148,3.802-1.148
c0.634,0,1.214,0.06,1.743,0.178v-2.06c-0.621-0.264-1.201-0.396-1.743-0.396c-0.568,0-1.148,0.131-1.742,0.396v0.099
c0,0.33-0.102,0.587-0.307,0.772c-0.205,0.185-0.446,0.277-0.723,0.277s-0.518-0.092-0.722-0.277
c-0.205-0.185-0.308-0.442-0.308-0.772v-1.406c1.003-0.765,2.271-1.148,3.802-1.148s2.798,0.383,3.802,1.148V110.082z"/>
<path fill="#231F20" d="M226.074,110.082c-1.003,0.766-2.271,1.148-3.802,1.148s-2.798-0.383-3.802-1.148v-8.415
c1.003-0.765,2.271-1.148,3.802-1.148s2.798,0.383,3.802,1.148v1.723c0,0.33-0.102,0.584-0.307,0.762
c-0.205,0.178-0.446,0.268-0.723,0.268c-0.277,0-0.518-0.089-0.723-0.268c-0.205-0.178-0.307-0.432-0.307-0.762v-0.416
c-0.594-0.264-1.175-0.396-1.742-0.396c-0.541,0-1.122,0.131-1.743,0.396v5.802c0.621,0.264,1.202,0.396,1.743,0.396
c0.567,0,1.148-0.131,1.742-0.396v-0.496c0-0.33,0.103-0.583,0.307-0.762c0.205-0.178,0.445-0.267,0.723-0.267
c0.277,0,0.518,0.089,0.723,0.267c0.205,0.179,0.307,0.433,0.307,0.762V110.082z"/>
<path fill="#231F20" d="M236.429,101.667c0,0.277-0.093,0.519-0.277,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-1.98v7.543
c0,0.317-0.103,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.722,0.277c-0.277,0-0.519-0.092-0.723-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.98c-0.316,0-0.567-0.103-0.752-0.307c-0.185-0.205-0.277-0.446-0.277-0.723
c0-0.277,0.092-0.518,0.277-0.723c0.185-0.204,0.436-0.307,0.752-0.307h1.98v-2.178c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.98
c0.317,0,0.568,0.103,0.752,0.307C236.336,101.148,236.429,101.39,236.429,101.667z"/>
<path fill="#231F20" d="M241.122,110.24c0,0.317-0.099,0.568-0.297,0.752c-0.198,0.185-0.436,0.277-0.713,0.277
c-0.277,0-0.521-0.089-0.732-0.268c-0.211-0.178-0.317-0.432-0.317-0.762v-8.731c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752V110.24z
M241.518,98.419c0,0.462-0.237,0.693-0.713,0.693H239.3c-0.475,0-0.713-0.231-0.713-0.693v-1.148c0-0.462,0.238-0.693,0.713-0.693
h1.505c0.476,0,0.713,0.231,0.713,0.693V98.419z"/>
<path fill="#231F20" d="M250.665,108.775v-5.802c-0.594-0.264-1.175-0.396-1.742-0.396c-0.541,0-1.122,0.131-1.743,0.396v5.802
c0.621,0.264,1.202,0.396,1.743,0.396C249.49,109.17,250.071,109.039,250.665,108.775z M252.724,110.082
c-1.003,0.766-2.27,1.148-3.801,1.148s-2.798-0.383-3.802-1.148v-8.415c1.003-0.765,2.271-1.148,3.802-1.148
s2.798,0.383,3.801,1.148V110.082z"/>
<path fill="#231F20" d="M264.05,110.24c0,0.317-0.102,0.568-0.307,0.752c-0.205,0.185-0.446,0.277-0.723,0.277
c-0.277,0-0.518-0.092-0.722-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-7.267c-0.594-0.264-1.175-0.396-1.743-0.396
c-0.541,0-1.122,0.131-1.742,0.396v7.267c0,0.317-0.102,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277
c-0.277,0-0.518-0.092-0.722-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-8.573c1.002-0.765,2.27-1.148,3.801-1.148
c1.531,0,2.799,0.383,3.802,1.148V110.24z"/>
<path fill="#FFFFFF" d="M74.209,128.673l-1.801-6.177l-1.822,6.177H74.209z M77.873,133.98c0,0.291-0.102,0.539-0.307,0.743
c-0.205,0.205-0.439,0.307-0.703,0.307c-0.528,0-0.865-0.251-1.01-0.752l-1.029-3.544h-4.852l-1.029,3.564
c-0.145,0.488-0.482,0.733-1.01,0.733c-0.251,0-0.482-0.099-0.693-0.297c-0.211-0.198-0.317-0.442-0.317-0.732
c0-0.066,0.013-0.165,0.04-0.297l3.742-12.593c0.092-0.29,0.205-0.491,0.336-0.604c0.132-0.112,0.35-0.168,0.653-0.168h1.426
c0.303,0,0.521,0.056,0.653,0.168c0.132,0.112,0.245,0.313,0.337,0.604l3.722,12.593C77.859,133.835,77.873,133.928,77.873,133.98z
"/>
<path fill="#FFFFFF" d="M88.089,133.842c-1.003,0.765-2.27,1.148-3.801,1.148s-2.798-0.383-3.802-1.148v-8.574
c0-0.317,0.103-0.567,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277
c0.204,0.185,0.307,0.436,0.307,0.752v7.267c0.621,0.264,1.201,0.396,1.743,0.396c0.567,0,1.148-0.132,1.742-0.396v-7.267
c0-0.317,0.102-0.567,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277
c0.205,0.185,0.307,0.436,0.307,0.752V133.842z"/>
<path fill="#FFFFFF" d="M97.396,130.535v-3.802c-0.62-0.264-1.201-0.396-1.742-0.396c-0.568,0-1.148,0.132-1.743,0.396v3.802
c0.594,0.264,1.175,0.396,1.743,0.396C96.194,130.931,96.775,130.799,97.396,130.535z M99.455,136.99
c-1.003,0.766-2.27,1.148-3.801,1.148s-2.798-0.382-3.802-1.148v-1.624c0-0.33,0.102-0.583,0.307-0.762
c0.205-0.178,0.446-0.267,0.723-0.267c0.277,0,0.518,0.088,0.722,0.267c0.205,0.179,0.307,0.433,0.307,0.762v0.317
c0.594,0.265,1.175,0.396,1.743,0.396c0.541,0,1.122-0.132,1.742-0.396v-2.891c-0.528,0.132-1.108,0.198-1.742,0.198
c-1.531,0-2.798-0.383-3.802-1.148v-6.415c1.003-0.766,2.271-1.148,3.802-1.148s2.798,0.382,3.801,1.148V136.99z"/>
<path fill="#FFFFFF" d="M110.82,133.842c-1.003,0.765-2.27,1.148-3.801,1.148s-2.798-0.383-3.802-1.148v-8.574
c0-0.317,0.102-0.567,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277
c0.205,0.185,0.307,0.436,0.307,0.752v7.267c0.621,0.264,1.201,0.396,1.743,0.396c0.567,0,1.148-0.132,1.742-0.396v-7.267
c0-0.317,0.102-0.567,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277
c0.205,0.185,0.307,0.436,0.307,0.752V133.842z"/>
<path fill="#FFFFFF" d="M122.066,133.842c-1.002,0.765-2.27,1.148-3.801,1.148c-1.571,0-2.858-0.383-3.861-1.148v-1.485
c0-0.317,0.103-0.568,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.519,0.093,0.723,0.277
c0.204,0.185,0.307,0.436,0.307,0.752v0.178c0.621,0.264,1.221,0.396,1.802,0.396c0.567,0,1.148-0.132,1.742-0.396v-1.465
l-5.465-1.703v-3.94c1.003-0.766,2.244-1.148,3.723-1.148c1.491,0,2.738,0.382,3.742,1.148v1.485c0,0.316-0.103,0.567-0.307,0.752
c-0.204,0.185-0.445,0.277-0.722,0.277c-0.277,0-0.519-0.092-0.723-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-0.178
c-0.594-0.264-1.155-0.396-1.683-0.396c-0.489,0-1.043,0.132-1.664,0.396v1.109l5.464,1.703V133.842z"/>
<path fill="#FFFFFF" d="M132.441,125.427c0,0.277-0.092,0.518-0.277,0.722c-0.185,0.205-0.436,0.308-0.752,0.308h-1.979V134
c0,0.317-0.103,0.568-0.308,0.752c-0.204,0.185-0.445,0.277-0.722,0.277s-0.519-0.092-0.723-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.979c-0.317,0-0.568-0.103-0.752-0.308c-0.185-0.205-0.277-0.445-0.277-0.722
c0-0.277,0.092-0.519,0.277-0.723c0.185-0.205,0.436-0.307,0.752-0.307h1.979v-2.178c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277s0.518,0.092,0.722,0.277c0.205,0.185,0.308,0.436,0.308,0.752v2.178h1.979
c0.317,0,0.568,0.102,0.752,0.307C132.349,124.908,132.441,125.149,132.441,125.427z"/>
<path fill="#FFFFFF" d="M152.814,133.842c0,0.277-0.092,0.518-0.277,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-8.712v-2.97
c0-0.594,0.145-1.029,0.436-1.307l6.95-6.593v-1.049c-0.792-0.449-1.63-0.673-2.514-0.673c-1.135,0-2.264,0.324-3.386,0.97
c-0.211,0.119-0.423,0.179-0.634,0.179c-0.29,0-0.525-0.1-0.703-0.298c-0.178-0.198-0.268-0.435-0.268-0.713
c0-0.369,0.191-0.673,0.574-0.91c1.413-0.858,2.884-1.287,4.416-1.287c1.624,0,3.148,0.541,4.574,1.624v2.198
c0,0.567-0.191,1.037-0.574,1.406l-6.811,6.474v0.891h6.652c0.317,0,0.568,0.103,0.752,0.307
C152.722,133.323,152.814,133.564,152.814,133.842z"/>
<path fill="#FFFFFF" d="M163.209,132.258v-9.306c-0.818-0.449-1.663-0.673-2.534-0.673c-0.884,0-1.723,0.224-2.515,0.673v9.306
c0.792,0.448,1.631,0.673,2.515,0.673C161.546,132.931,162.391,132.706,163.209,132.258z M165.269,133.366
c-1.399,1.083-2.931,1.624-4.594,1.624c-1.624,0-3.148-0.541-4.574-1.624v-11.523c1.425-1.083,2.95-1.624,4.574-1.624
c1.663,0,3.194,0.541,4.594,1.624V133.366z"/>
<path fill="#FFFFFF" d="M177.406,127.961c0,0.277-0.093,0.518-0.277,0.723s-0.436,0.307-0.752,0.307h-6.336
c-0.317,0-0.567-0.102-0.752-0.307c-0.185-0.205-0.277-0.446-0.277-0.723c0-0.277,0.092-0.518,0.277-0.723
c0.185-0.205,0.436-0.307,0.752-0.307h6.336c0.317,0,0.568,0.103,0.752,0.307S177.406,127.684,177.406,127.961z"/>
<path fill="#FFFFFF" d="M190.87,133.842c0,0.277-0.092,0.518-0.277,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-8.712v-2.97
c0-0.594,0.145-1.029,0.436-1.307l6.95-6.593v-1.049c-0.792-0.449-1.63-0.673-2.515-0.673c-1.135,0-2.264,0.324-3.385,0.97
c-0.211,0.119-0.423,0.179-0.634,0.179c-0.291,0-0.525-0.1-0.703-0.298c-0.178-0.198-0.267-0.435-0.267-0.713
c0-0.369,0.191-0.673,0.574-0.91c1.412-0.858,2.884-1.287,4.415-1.287c1.624,0,3.149,0.541,4.574,1.624v2.198
c0,0.567-0.192,1.037-0.574,1.406l-6.811,6.474v0.891h6.653c0.317,0,0.567,0.103,0.752,0.307
C190.778,133.323,190.87,133.564,190.87,133.842z"/>
<path fill="#FFFFFF" d="M200.176,129.545v-6.732l-5.287,6.732H200.176z M203.898,130.574c0,0.264-0.099,0.502-0.297,0.713
c-0.198,0.211-0.442,0.317-0.733,0.317h-0.633V134c0,0.317-0.099,0.568-0.297,0.752c-0.198,0.185-0.436,0.277-0.713,0.277
c-0.277,0-0.521-0.089-0.732-0.268c-0.211-0.178-0.317-0.433-0.317-0.762v-2.396h-6.118c-0.277,0-0.519-0.102-0.723-0.307
s-0.307-0.445-0.307-0.723v-1.405c0-0.251,0.106-0.508,0.317-0.772l6-7.683c0.198-0.25,0.462-0.376,0.792-0.376h1.069
c0.277,0,0.518,0.102,0.722,0.307c0.205,0.205,0.307,0.446,0.307,0.723v8.177h0.633c0.291,0,0.535,0.105,0.733,0.317
C203.8,130.073,203.898,130.311,203.898,130.574z"/>
<path fill="#FFFFFF" d="M211.066,131.98c0,0.198-0.066,0.39-0.198,0.574l-3.544,4.871c-0.159,0.225-0.383,0.336-0.674,0.336h-0.771
c-0.607,0-0.911-0.257-0.911-0.772c0-0.171,0.04-0.336,0.119-0.495l2.336-4.852c0.159-0.33,0.423-0.495,0.792-0.495h1.96
c0.264,0,0.479,0.083,0.643,0.248C210.984,131.561,211.066,131.756,211.066,131.98z"/>
<path fill="#FFFFFF" d="M232.371,133.842c0,0.277-0.092,0.518-0.277,0.723c-0.185,0.204-0.436,0.307-0.752,0.307h-8.712v-2.97
c0-0.594,0.145-1.029,0.436-1.307l6.95-6.593v-1.049c-0.792-0.449-1.63-0.673-2.515-0.673c-1.135,0-2.264,0.324-3.386,0.97
c-0.211,0.119-0.422,0.179-0.633,0.179c-0.291,0-0.525-0.1-0.703-0.298c-0.178-0.198-0.268-0.435-0.268-0.713
c0-0.369,0.192-0.673,0.575-0.91c1.412-0.858,2.884-1.287,4.415-1.287c1.624,0,3.148,0.541,4.574,1.624v2.198
c0,0.567-0.192,1.037-0.574,1.406l-6.811,6.474v0.891h6.652c0.317,0,0.568,0.103,0.752,0.307
C232.279,133.323,232.371,133.564,232.371,133.842z"/>
<path fill="#FFFFFF" d="M242.766,132.258v-9.306c-0.818-0.449-1.663-0.673-2.534-0.673c-0.885,0-1.723,0.224-2.515,0.673v9.306
c0.792,0.448,1.63,0.673,2.515,0.673C241.103,132.931,241.948,132.706,242.766,132.258z M244.825,133.366
c-1.399,1.083-2.931,1.624-4.593,1.624c-1.624,0-3.149-0.541-4.574-1.624v-11.523c1.425-1.083,2.95-1.624,4.574-1.624
c1.663,0,3.194,0.541,4.593,1.624V133.366z"/>
<path fill="#FFFFFF" d="M256.19,133.842c0,0.277-0.089,0.518-0.268,0.723c-0.178,0.204-0.432,0.307-0.762,0.307h-5.94
c-0.317,0-0.568-0.103-0.752-0.307c-0.185-0.205-0.277-0.446-0.277-0.723c0-0.277,0.093-0.519,0.277-0.723
c0.185-0.204,0.436-0.307,0.752-0.307h2.238v-9.543h-2.06c-0.317,0-0.567-0.103-0.752-0.307c-0.185-0.205-0.277-0.445-0.277-0.723
c0-0.277,0.095-0.518,0.287-0.723c0.192-0.205,0.439-0.307,0.743-0.307h2.06c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.445-0.277,0.723-0.277c0.687,0,1.03,0.343,1.03,1.03v11.603h1.643c0.317,0,0.568,0.103,0.752,0.307
C256.098,133.323,256.19,133.564,256.19,133.842z"/>
<path fill="#FFFFFF" d="M266.744,123.803c0,0.356-0.066,0.653-0.198,0.891l-5.445,9.801c-0.198,0.357-0.482,0.535-0.852,0.535
c-0.303,0-0.557-0.103-0.762-0.307c-0.205-0.204-0.307-0.452-0.307-0.743c0-0.145,0.046-0.303,0.139-0.475l5.366-9.702v-1.406
h-6.276c-0.317,0-0.568-0.103-0.752-0.307c-0.185-0.205-0.277-0.445-0.277-0.723c0-0.277,0.092-0.518,0.277-0.723
c0.185-0.205,0.436-0.307,0.752-0.307h8.336V123.803z"/>
<path fill="#FFFFFF" d="M280.267,137.346c0,0.317-0.099,0.568-0.297,0.752c-0.198,0.185-0.436,0.277-0.713,0.277
c-0.277,0-0.521-0.089-0.732-0.268s-0.317-0.433-0.317-0.762v-16.137c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752V137.346z"/>
<path fill="#FFFFFF" d="M301.968,133.366c-1.519,1.083-3.142,1.624-4.871,1.624c-1.769,0-3.399-0.541-4.891-1.624v-2.178
c0-0.317,0.102-0.568,0.307-0.753c0.205-0.185,0.446-0.277,0.723-0.277s0.518,0.092,0.723,0.277
c0.205,0.185,0.307,0.436,0.307,0.753v1.069c0.897,0.448,1.841,0.673,2.831,0.673c1.003,0,1.94-0.225,2.812-0.673v-2.832
l-7.385-2.296v-5.287c1.426-1.083,2.95-1.624,4.574-1.624c1.664,0,3.195,0.541,4.594,1.624v2.039c0,0.331-0.103,0.584-0.307,0.763
c-0.205,0.178-0.445,0.267-0.723,0.267c-0.277,0-0.518-0.089-0.723-0.267c-0.205-0.178-0.307-0.432-0.307-0.763v-0.93
c-0.819-0.449-1.664-0.673-2.535-0.673c-0.884,0-1.723,0.224-2.515,0.673v2.653l7.386,2.297V133.366z"/>
<path fill="#FFFFFF" d="M312.264,125.427c0,0.277-0.093,0.518-0.277,0.722s-0.436,0.308-0.752,0.308h-1.98V134
c0,0.317-0.103,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.722,0.277c-0.277,0-0.519-0.092-0.723-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-7.543h-1.98c-0.316,0-0.567-0.103-0.752-0.308c-0.185-0.205-0.277-0.445-0.277-0.722
c0-0.277,0.093-0.519,0.277-0.723c0.185-0.205,0.436-0.307,0.752-0.307h1.98v-2.178c0-0.317,0.102-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.98
c0.317,0,0.568,0.102,0.752,0.307S312.264,125.149,312.264,125.427z"/>
<path fill="#FFFFFF" d="M320.165,132.535v-5.801c-0.594-0.264-1.175-0.396-1.743-0.396c-0.542,0-1.122,0.132-1.742,0.396v5.801
c0.62,0.264,1.201,0.396,1.742,0.396C318.989,132.931,319.57,132.798,320.165,132.535z M322.224,133.842
c-1.003,0.765-2.271,1.148-3.802,1.148c-1.531,0-2.798-0.383-3.802-1.148v-8.415c1.003-0.766,2.271-1.148,3.802-1.148
c1.531,0,2.798,0.382,3.802,1.148V133.842z"/>
<path fill="#FFFFFF" d="M333.549,133.842c-1.003,0.765-2.271,1.148-3.802,1.148s-2.798-0.383-3.802-1.148v-8.415
c1.003-0.766,2.271-1.148,3.802-1.148s2.798,0.382,3.802,1.148v1.723c0,0.33-0.103,0.583-0.308,0.762
c-0.204,0.179-0.445,0.268-0.722,0.268c-0.277,0-0.519-0.089-0.723-0.268c-0.205-0.178-0.307-0.432-0.307-0.762v-0.416
c-0.594-0.264-1.175-0.396-1.742-0.396c-0.542,0-1.122,0.132-1.743,0.396v5.801c0.621,0.264,1.201,0.396,1.743,0.396
c0.567,0,1.148-0.132,1.742-0.396v-0.495c0-0.33,0.102-0.584,0.307-0.762s0.446-0.268,0.723-0.268c0.277,0,0.518,0.089,0.722,0.268
c0.205,0.178,0.308,0.433,0.308,0.762V133.842z"/>
<path fill="#FFFFFF" d="M345.647,134c0,0.277-0.105,0.519-0.317,0.723c-0.211,0.205-0.456,0.307-0.732,0.307
c-0.277,0-0.521-0.106-0.733-0.317l-4.534-4.534V134c0,0.317-0.102,0.568-0.307,0.752c-0.204,0.185-0.445,0.277-0.722,0.277
c-0.277,0-0.519-0.092-0.723-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-12.791c0-0.317,0.102-0.568,0.307-0.752
c0.204-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752v7.405l4.06-4.059
c0.21-0.211,0.462-0.317,0.752-0.317s0.535,0.102,0.732,0.307s0.298,0.446,0.298,0.723c0,0.29-0.106,0.541-0.317,0.752
l-3.386,3.366l3.861,3.861C345.542,133.458,345.647,133.709,345.647,134z"/>
<path fill="#FFFFFF" d="M355.843,134c0,0.33-0.102,0.584-0.306,0.762c-0.205,0.178-0.446,0.268-0.723,0.268
c-0.277,0-0.518-0.089-0.723-0.268c-0.204-0.178-0.307-0.433-0.307-0.762v-7.267c-0.594-0.264-1.174-0.396-1.742-0.396
c-0.541,0-1.122,0.132-1.743,0.396V134c0,0.33-0.102,0.584-0.306,0.762c-0.205,0.178-0.446,0.268-0.723,0.268
c-0.277,0-0.518-0.089-0.723-0.268c-0.204-0.178-0.307-0.433-0.307-0.762v-12.791c0-0.317,0.103-0.568,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277c0.204,0.185,0.306,0.436,0.306,0.752v3.247
c0.542-0.119,1.123-0.178,1.743-0.178c1.532,0,2.798,0.382,3.801,1.148V134z"/>
<path fill="#FFFFFF" d="M365.11,132.535v-5.801c-0.593-0.264-1.174-0.396-1.743-0.396c-0.54,0-1.122,0.132-1.742,0.396v5.801
c0.621,0.264,1.202,0.396,1.742,0.396C363.936,132.931,364.517,132.798,365.11,132.535z M367.169,133.842
c-1.002,0.765-2.27,1.148-3.802,1.148c-1.531,0-2.798-0.383-3.8-1.148v-8.415c1.002-0.766,2.27-1.148,3.8-1.148
c1.532,0,2.799,0.382,3.802,1.148V133.842z"/>
<path fill="#FFFFFF" d="M373.229,134c0,0.317-0.103,0.568-0.307,0.752c-0.205,0.185-0.446,0.277-0.723,0.277
c-0.277,0-0.518-0.092-0.723-0.277c-0.204-0.185-0.306-0.436-0.306-0.752v-12.791c0-0.317,0.102-0.568,0.306-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.277,0,0.518,0.092,0.723,0.277c0.204,0.185,0.307,0.436,0.307,0.752V134z"/>
<path fill="#FFFFFF" d="M390.375,134c0,0.317-0.104,0.568-0.308,0.752c-0.204,0.185-0.446,0.277-0.723,0.277
c-0.277,0-0.519-0.092-0.724-0.277c-0.203-0.185-0.306-0.436-0.306-0.752v-7.267c-0.594-0.264-1.175-0.396-1.742-0.396
c-0.542,0-1.123,0.132-1.743,0.396V134c0,0.317-0.102,0.568-0.307,0.752c-0.204,0.185-0.446,0.277-0.723,0.277
c-0.687,0-1.029-0.343-1.029-1.03v-7.267c-0.594-0.264-1.175-0.396-1.742-0.396c-0.542,0-1.123,0.132-1.743,0.396V134
c0,0.317-0.102,0.568-0.307,0.752c-0.204,0.185-0.446,0.277-0.723,0.277c-0.277,0-0.518-0.092-0.722-0.277
c-0.205-0.185-0.307-0.436-0.307-0.752v-8.573c1.003-0.766,2.27-1.148,3.802-1.148c1.043,0,1.967,0.178,2.771,0.535
c0.806-0.357,1.729-0.535,2.772-0.535c1.53,0,2.798,0.382,3.802,1.148V134z"/>
<path fill="#FFFFFF" d="M398.731,131.98c0,0.198-0.066,0.39-0.198,0.574l-3.545,4.871c-0.157,0.225-0.382,0.336-0.673,0.336h-0.771
c-0.607,0-0.911-0.257-0.911-0.772c0-0.171,0.04-0.336,0.119-0.495l2.337-4.852c0.157-0.33,0.422-0.495,0.791-0.495h1.961
c0.263,0,0.478,0.083,0.643,0.248C398.648,131.561,398.731,131.756,398.731,131.98z"/>
<path fill="#FFFFFF" d="M419.917,133.366c-1.518,1.083-3.141,1.624-4.871,1.624c-1.77,0-3.398-0.541-4.891-1.624v-2.178
c0-0.317,0.104-0.568,0.307-0.753c0.205-0.185,0.446-0.277,0.724-0.277s0.518,0.092,0.724,0.277
c0.203,0.185,0.306,0.436,0.306,0.753v1.069c0.897,0.448,1.841,0.673,2.831,0.673c1.004,0,1.94-0.225,2.812-0.673v-2.832
l-7.386-2.296v-5.287c1.426-1.083,2.951-1.624,4.574-1.624c1.663,0,3.195,0.541,4.594,1.624v2.039c0,0.331-0.102,0.584-0.307,0.763
c-0.205,0.178-0.445,0.267-0.723,0.267s-0.519-0.089-0.723-0.267c-0.204-0.178-0.307-0.432-0.307-0.763v-0.93
c-0.819-0.449-1.664-0.673-2.535-0.673c-0.884,0-1.723,0.224-2.514,0.673v2.653l7.385,2.297V133.366z"/>
<path fill="#FFFFFF" d="M435.559,125.367l-1.762,8.792c-0.094,0.475-0.404,0.712-0.932,0.712h-0.852
c-0.475,0-0.779-0.231-0.911-0.693l-1.921-6.831l-1.92,6.831c-0.131,0.462-0.441,0.693-0.931,0.693h-0.851
c-0.516,0-0.832-0.291-0.951-0.871l-1.723-8.633c-0.014-0.066-0.02-0.139-0.02-0.218c0-0.264,0.1-0.481,0.297-0.653
c0.198-0.171,0.43-0.257,0.693-0.257c0.501,0,0.805,0.271,0.91,0.812l1.348,6.673l1.939-6.633c0.08-0.277,0.178-0.462,0.297-0.554
s0.324-0.139,0.614-0.139h0.653c0.291,0,0.494,0.046,0.614,0.139c0.118,0.092,0.217,0.277,0.296,0.554l1.861,6.672l1.367-6.712
c0.105-0.541,0.422-0.812,0.949-0.812c0.252,0,0.473,0.086,0.664,0.257c0.191,0.172,0.287,0.389,0.287,0.653
C435.579,125.216,435.573,125.288,435.559,125.367z"/>
<path fill="#FFFFFF" d="M444.133,128.219v-1.485c-0.594-0.264-1.176-0.396-1.742-0.396c-0.543,0-1.123,0.132-1.744,0.396v1.881
h1.744C442.958,128.614,443.54,128.482,444.133,128.219z M446.192,133.842c-1.004,0.765-2.271,1.148-3.801,1.148
c-1.533,0-2.799-0.383-3.803-1.148v-8.415c1.004-0.766,2.27-1.148,3.803-1.148c1.529,0,2.797,0.382,3.801,1.148v4.099
c-1.004,0.765-2.271,1.148-3.801,1.148h-1.744v1.861c0.621,0.264,1.201,0.396,1.744,0.396c0.566,0,1.148-0.132,1.742-0.396v-0.099
c0-0.331,0.102-0.584,0.305-0.763c0.205-0.178,0.447-0.267,0.725-0.267c0.275,0,0.518,0.089,0.723,0.267
c0.203,0.178,0.307,0.432,0.307,0.763V133.842z"/>
<path fill="#FFFFFF" d="M455.458,132.535v-5.801c-0.621-0.264-1.201-0.396-1.742-0.396c-0.568,0-1.148,0.132-1.742,0.396v5.801
c0.594,0.264,1.174,0.396,1.742,0.396C454.256,132.931,454.836,132.798,455.458,132.535z M457.518,133.842
c-1.004,0.765-2.271,1.148-3.803,1.148s-2.799-0.383-3.801-1.148v-8.415c1.002-0.766,2.27-1.148,3.801-1.148
c0.635,0,1.215,0.066,1.742,0.198v-3.267c0-0.317,0.104-0.568,0.307-0.752c0.205-0.185,0.445-0.277,0.723-0.277
s0.518,0.092,0.723,0.277c0.205,0.185,0.309,0.436,0.309,0.752V133.842z"/>
<path fill="#FFFFFF" d="M466.864,128.219v-1.485c-0.594-0.264-1.176-0.396-1.742-0.396c-0.543,0-1.123,0.132-1.744,0.396v1.881
h1.744C465.688,128.614,466.27,128.482,466.864,128.219z M468.922,133.842c-1.004,0.765-2.271,1.148-3.801,1.148
c-1.533,0-2.799-0.383-3.803-1.148v-8.415c1.004-0.766,2.27-1.148,3.803-1.148c1.529,0,2.797,0.382,3.801,1.148v4.099
c-1.004,0.765-2.271,1.148-3.801,1.148h-1.744v1.861c0.621,0.264,1.201,0.396,1.744,0.396c0.566,0,1.148-0.132,1.742-0.396v-0.099
c0-0.331,0.102-0.584,0.305-0.763c0.205-0.178,0.447-0.267,0.725-0.267c0.275,0,0.518,0.089,0.723,0.267
c0.203,0.178,0.307,0.432,0.307,0.763V133.842z"/>
<path fill="#FFFFFF" d="M480.249,134c0,0.317-0.104,0.568-0.309,0.752c-0.203,0.185-0.443,0.277-0.721,0.277
s-0.518-0.092-0.725-0.277c-0.205-0.185-0.305-0.436-0.305-0.752v-7.267c-0.596-0.264-1.176-0.396-1.742-0.396
c-0.543,0-1.123,0.132-1.744,0.396V134c0,0.317-0.104,0.568-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277
s-0.518-0.092-0.723-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-8.573c1.004-0.766,2.27-1.148,3.803-1.148
c1.529,0,2.797,0.382,3.801,1.148V134z"/>
<path fill="#231F20" d="M80.09,149.127l-1.762,8.792c-0.092,0.475-0.403,0.713-0.931,0.713h-0.852
c-0.475,0-0.779-0.231-0.911-0.693l-1.92-6.831l-1.92,6.831c-0.132,0.462-0.443,0.693-0.931,0.693h-0.851
c-0.515,0-0.832-0.291-0.95-0.871l-1.723-8.633c-0.013-0.066-0.02-0.138-0.02-0.218c0-0.264,0.099-0.482,0.297-0.653
c0.198-0.172,0.429-0.257,0.693-0.257c0.501,0,0.805,0.271,0.91,0.812l1.347,6.673l1.94-6.633c0.079-0.277,0.178-0.462,0.297-0.554
c0.119-0.093,0.323-0.139,0.614-0.139h0.653c0.291,0,0.496,0.046,0.614,0.139c0.119,0.092,0.217,0.277,0.297,0.554l1.861,6.672
l1.366-6.712c0.105-0.541,0.422-0.812,0.95-0.812c0.251,0,0.472,0.085,0.663,0.257c0.191,0.171,0.288,0.39,0.288,0.653
C80.11,148.975,80.104,149.048,80.09,149.127z"/>
<path fill="#231F20" d="M95.178,149.127l-1.762,8.792c-0.092,0.475-0.402,0.713-0.93,0.713h-0.852
c-0.475,0-0.779-0.231-0.911-0.693l-1.921-6.831l-1.92,6.831c-0.132,0.462-0.442,0.693-0.931,0.693H85.1
c-0.515,0-0.832-0.291-0.95-0.871l-1.723-8.633c-0.013-0.066-0.02-0.138-0.02-0.218c0-0.264,0.099-0.482,0.297-0.653
c0.198-0.172,0.429-0.257,0.693-0.257c0.501,0,0.805,0.271,0.911,0.812l1.347,6.673l1.94-6.633c0.08-0.277,0.178-0.462,0.297-0.554
c0.119-0.093,0.323-0.139,0.614-0.139h0.654c0.29,0,0.495,0.046,0.613,0.139c0.119,0.092,0.218,0.277,0.297,0.554l1.861,6.672
l1.366-6.712c0.106-0.541,0.423-0.812,0.951-0.812c0.25,0,0.472,0.085,0.663,0.257c0.192,0.171,0.288,0.39,0.288,0.653
C95.198,148.975,95.191,149.048,95.178,149.127z"/>
<path fill="#231F20" d="M110.266,149.127l-1.762,8.792c-0.092,0.475-0.403,0.713-0.931,0.713h-0.852
c-0.475,0-0.779-0.231-0.911-0.693l-1.921-6.831l-1.92,6.831c-0.132,0.462-0.443,0.693-0.931,0.693h-0.851
c-0.515,0-0.832-0.291-0.951-0.871l-1.723-8.633c-0.013-0.066-0.02-0.138-0.02-0.218c0-0.264,0.1-0.482,0.297-0.653
c0.198-0.172,0.429-0.257,0.693-0.257c0.501,0,0.806,0.271,0.911,0.812l1.347,6.673l1.94-6.633
c0.079-0.277,0.178-0.462,0.297-0.554c0.119-0.093,0.323-0.139,0.613-0.139h0.654c0.291,0,0.495,0.046,0.614,0.139
c0.119,0.092,0.217,0.277,0.297,0.554l1.861,6.672l1.366-6.712c0.106-0.541,0.423-0.812,0.951-0.812
c0.251,0,0.472,0.085,0.663,0.257c0.191,0.171,0.287,0.39,0.287,0.653C110.285,148.975,110.279,149.048,110.266,149.127z"/>
<path fill="#231F20" d="M117.552,157.602c0,0.687-0.343,1.03-1.029,1.03h-2.257c-0.687,0-1.03-0.343-1.03-1.03v-1.663
c0-0.687,0.343-1.03,1.03-1.03h2.257c0.687,0,1.029,0.343,1.029,1.03V157.602z"/>
<path fill="#FFFFFF" d="M123.551,157.76c0,0.317-0.099,0.567-0.297,0.752c-0.197,0.185-0.436,0.277-0.712,0.277
c-0.277,0-0.521-0.089-0.733-0.267c-0.211-0.179-0.317-0.433-0.317-0.762v-8.732c0-0.317,0.103-0.568,0.307-0.752
c0.205-0.185,0.445-0.277,0.723-0.277s0.518,0.093,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752V157.76z M123.947,145.939
c0,0.462-0.238,0.693-0.713,0.693h-1.504c-0.476,0-0.713-0.231-0.713-0.693v-1.148c0-0.462,0.237-0.693,0.713-0.693h1.504
c0.475,0,0.713,0.231,0.713,0.693V145.939z"/>
<path fill="#FFFFFF" d="M135.154,157.76c0,0.317-0.103,0.567-0.307,0.752s-0.445,0.277-0.723,0.277
c-0.277,0-0.518-0.092-0.723-0.277s-0.307-0.435-0.307-0.752v-7.267c-0.594-0.264-1.175-0.396-1.743-0.396
c-0.542,0-1.122,0.132-1.742,0.396v7.267c0,0.317-0.103,0.567-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277
s-0.518-0.092-0.723-0.277c-0.205-0.185-0.307-0.435-0.307-0.752v-8.573c1.003-0.766,2.271-1.149,3.802-1.149
c1.531,0,2.798,0.383,3.802,1.149V157.76z"/>
<path fill="#FFFFFF" d="M145.588,149.187c0,0.277-0.093,0.518-0.277,0.722s-0.436,0.307-0.752,0.307h-1.98v7.544
c0,0.317-0.103,0.567-0.307,0.752s-0.445,0.277-0.722,0.277c-0.277,0-0.519-0.092-0.723-0.277s-0.307-0.435-0.307-0.752v-7.544
h-1.98c-0.317,0-0.567-0.102-0.752-0.307c-0.185-0.205-0.277-0.445-0.277-0.722c0-0.278,0.092-0.519,0.277-0.723
c0.186-0.205,0.436-0.307,0.752-0.307h1.98v-2.178c0-0.317,0.102-0.567,0.307-0.752c0.205-0.185,0.446-0.277,0.723-0.277
c0.277,0,0.518,0.092,0.722,0.277c0.205,0.185,0.307,0.436,0.307,0.752v2.178h1.98c0.317,0,0.568,0.102,0.752,0.307
S145.588,148.909,145.588,149.187z"/>
<path fill="#FFFFFF" d="M153.489,151.978v-1.484c-0.594-0.264-1.175-0.396-1.743-0.396c-0.542,0-1.122,0.132-1.742,0.396v1.881
h1.742C152.313,152.375,152.895,152.242,153.489,151.978z M155.547,157.602c-1.003,0.766-2.271,1.148-3.801,1.148
c-1.531,0-2.799-0.383-3.802-1.148v-8.415c1.003-0.766,2.271-1.149,3.802-1.149c1.531,0,2.798,0.383,3.801,1.149v4.098
c-1.003,0.766-2.271,1.148-3.801,1.148h-1.742v1.862c0.62,0.264,1.201,0.396,1.742,0.396c0.567,0,1.148-0.132,1.743-0.396v-0.1
c0-0.33,0.102-0.583,0.307-0.762c0.205-0.179,0.445-0.268,0.723-0.268s0.518,0.089,0.723,0.268
c0.205,0.178,0.307,0.432,0.307,0.762V157.602z"/>
<path fill="#FFFFFF" d="M166.477,151.563c0,0.317-0.102,0.568-0.307,0.752s-0.445,0.277-0.723,0.277
c-0.277,0-0.518-0.093-0.722-0.277c-0.205-0.185-0.307-0.436-0.307-0.752v-1.148c-0.541-0.211-1.056-0.317-1.544-0.317
s-1.003,0.106-1.544,0.317v7.346c0,0.317-0.103,0.567-0.307,0.752s-0.445,0.277-0.722,0.277c-0.277,0-0.519-0.092-0.723-0.277
s-0.307-0.435-0.307-0.752v-8.712c0.963-0.674,2.165-1.01,3.604-1.01s2.64,0.336,3.604,1.01V151.563z"/>
<path fill="#231F20" d="M176.515,157.602c-1.003,0.766-2.271,1.148-3.802,1.148c-1.571,0-2.858-0.383-3.861-1.148v-1.485
c0-0.317,0.102-0.568,0.307-0.752c0.205-0.185,0.445-0.277,0.723-0.277c0.277,0,0.518,0.092,0.722,0.277
c0.205,0.185,0.307,0.436,0.307,0.752v0.178c0.621,0.264,1.222,0.396,1.802,0.396c0.568,0,1.148-0.132,1.742-0.396v-1.465
l-5.464-1.703v-3.94c1.003-0.766,2.244-1.149,3.722-1.149c1.492,0,2.739,0.383,3.742,1.149v1.484c0,0.317-0.102,0.568-0.307,0.752
c-0.205,0.185-0.445,0.277-0.723,0.277c-0.277,0-0.518-0.093-0.723-0.277s-0.307-0.436-0.307-0.752v-0.178
c-0.594-0.264-1.155-0.396-1.683-0.396c-0.488,0-1.042,0.132-1.663,0.396v1.108l5.465,1.703V157.602z"/>
<path fill="#231F20" d="M185.761,156.295v-5.801c-0.593-0.264-1.175-0.396-1.742-0.396c-0.541,0-1.122,0.132-1.743,0.396v5.801
c0.621,0.264,1.202,0.396,1.743,0.396C184.586,156.691,185.168,156.559,185.761,156.295z M187.821,157.602
c-1.003,0.766-2.271,1.148-3.802,1.148c-0.633,0-1.214-0.066-1.743-0.198v2.555c0,0.317-0.102,0.567-0.307,0.752
s-0.445,0.277-0.722,0.277c-0.278,0-0.519-0.092-0.723-0.277s-0.307-0.435-0.307-0.752v-11.919
c1.003-0.766,2.271-1.149,3.801-1.149c1.531,0,2.799,0.383,3.802,1.149V157.602z"/>
<path fill="#231F20" d="M197.087,151.978v-1.484c-0.594-0.264-1.175-0.396-1.743-0.396c-0.541,0-1.122,0.132-1.742,0.396v1.881
h1.742C195.912,152.375,196.493,152.242,197.087,151.978z M199.146,157.602c-1.003,0.766-2.27,1.148-3.802,1.148
c-1.531,0-2.798-0.383-3.801-1.148v-8.415c1.003-0.766,2.27-1.149,3.801-1.149c1.532,0,2.798,0.383,3.802,1.149v4.098
c-1.003,0.766-2.27,1.148-3.802,1.148h-1.742v1.862c0.621,0.264,1.201,0.396,1.742,0.396c0.567,0,1.148-0.132,1.743-0.396v-0.1
c0-0.33,0.102-0.583,0.307-0.762c0.205-0.179,0.446-0.268,0.723-0.268c0.277,0,0.518,0.089,0.722,0.268
c0.205,0.178,0.307,0.432,0.307,0.762V157.602z"/>
<path fill="#231F20" d="M208.413,151.978v-1.484c-0.594-0.264-1.174-0.396-1.742-0.396c-0.542,0-1.122,0.132-1.742,0.396v1.881
h1.742C207.238,152.375,207.819,152.242,208.413,151.978z M210.472,157.602c-1.003,0.766-2.271,1.148-3.802,1.148
s-2.798-0.383-3.802-1.148v-8.415c1.003-0.766,2.271-1.149,3.802-1.149s2.798,0.383,3.802,1.149v4.098
c-1.003,0.766-2.271,1.148-3.802,1.148h-1.742v1.862c0.62,0.264,1.201,0.396,1.742,0.396c0.568,0,1.148-0.132,1.742-0.396v-0.1
c0-0.33,0.102-0.583,0.307-0.762c0.205-0.179,0.446-0.268,0.723-0.268c0.277,0,0.518,0.089,0.723,0.268
c0.205,0.178,0.307,0.432,0.307,0.762V157.602z"/>
<path fill="#231F20" d="M221.797,157.602c-1.003,0.766-2.27,1.148-3.801,1.148s-2.798-0.383-3.802-1.148v-8.415
c1.003-0.766,2.271-1.149,3.802-1.149s2.798,0.383,3.801,1.149v1.722c0,0.33-0.103,0.584-0.307,0.762
c-0.205,0.178-0.446,0.268-0.723,0.268c-0.277,0-0.519-0.089-0.723-0.268c-0.204-0.178-0.307-0.432-0.307-0.762v-0.416
c-0.594-0.264-1.175-0.396-1.742-0.396c-0.542,0-1.122,0.132-1.743,0.396v5.801c0.621,0.264,1.201,0.396,1.743,0.396
c0.567,0,1.148-0.132,1.742-0.396V155.8c0-0.33,0.103-0.584,0.307-0.762c0.205-0.178,0.446-0.268,0.723-0.268
c0.277,0,0.518,0.089,0.723,0.268c0.204,0.178,0.307,0.432,0.307,0.762V157.602z"/>
<path fill="#231F20" d="M233.123,157.76c0,0.33-0.102,0.583-0.307,0.762c-0.205,0.178-0.446,0.267-0.723,0.267
c-0.277,0-0.518-0.089-0.722-0.267c-0.205-0.179-0.307-0.433-0.307-0.762v-7.267c-0.594-0.264-1.175-0.396-1.743-0.396
c-0.541,0-1.122,0.132-1.742,0.396v7.267c0,0.33-0.102,0.583-0.307,0.762c-0.205,0.178-0.446,0.267-0.723,0.267
c-0.277,0-0.518-0.089-0.722-0.267c-0.205-0.179-0.308-0.433-0.308-0.762v-12.791c0-0.317,0.103-0.567,0.308-0.752
c0.205-0.185,0.445-0.277,0.722-0.277c0.277,0,0.519,0.092,0.723,0.277c0.205,0.185,0.307,0.436,0.307,0.752v3.247
c0.542-0.119,1.122-0.178,1.742-0.178c1.531,0,2.798,0.383,3.802,1.149V157.76z"/>
<path fill="#FFFFFF" d="M246.546,157.602c0,0.277-0.092,0.518-0.277,0.723c-0.185,0.205-0.436,0.307-0.752,0.307h-8.712v-2.97
c0-0.594,0.145-1.03,0.436-1.307l6.95-6.594v-1.049c-0.792-0.449-1.63-0.674-2.515-0.674c-1.135,0-2.263,0.324-3.385,0.97
c-0.211,0.119-0.423,0.178-0.634,0.178c-0.291,0-0.525-0.099-0.703-0.297c-0.178-0.198-0.267-0.436-0.267-0.713
c0-0.37,0.191-0.673,0.574-0.911c1.412-0.858,2.884-1.287,4.415-1.287c1.624,0,3.149,0.542,4.574,1.624v2.198
c0,0.568-0.191,1.036-0.574,1.406l-6.811,6.475v0.891h6.653c0.317,0,0.567,0.102,0.752,0.307
C246.454,157.083,246.546,157.325,246.546,157.602z"/>
<path fill="#FFFFFF" d="M256.942,156.018v-9.306c-0.819-0.449-1.664-0.674-2.535-0.674c-0.885,0-1.723,0.225-2.515,0.674v9.306
c0.792,0.449,1.63,0.673,2.515,0.673C255.278,156.691,256.123,156.466,256.942,156.018z M259,157.126
c-1.399,1.082-2.931,1.624-4.593,1.624c-1.624,0-3.148-0.542-4.574-1.624v-11.524c1.426-1.082,2.95-1.624,4.574-1.624
c1.663,0,3.194,0.542,4.593,1.624V157.126z"/>
<path fill="#FFFFFF" d="M270.366,157.602c0,0.277-0.089,0.518-0.267,0.723c-0.178,0.205-0.432,0.307-0.763,0.307h-5.94
c-0.317,0-0.567-0.103-0.752-0.307c-0.185-0.205-0.277-0.445-0.277-0.723c0-0.277,0.092-0.518,0.277-0.723
c0.185-0.205,0.436-0.307,0.752-0.307h2.238v-9.544h-2.059c-0.317,0-0.568-0.102-0.753-0.307c-0.185-0.205-0.277-0.445-0.277-0.723
c0-0.277,0.096-0.518,0.287-0.723c0.191-0.205,0.439-0.307,0.743-0.307h2.059c0-0.317,0.102-0.567,0.307-0.752
c0.205-0.185,0.446-0.277,0.723-0.277c0.686,0,1.029,0.343,1.029,1.029v11.603h1.643c0.317,0,0.568,0.102,0.752,0.307
C270.273,157.083,270.366,157.325,270.366,157.602z"/>
<path fill="#FFFFFF" d="M280.919,147.563c0,0.356-0.066,0.653-0.198,0.891l-5.445,9.801c-0.198,0.356-0.481,0.535-0.852,0.535
c-0.304,0-0.558-0.103-0.762-0.307s-0.307-0.452-0.307-0.742c0-0.146,0.046-0.303,0.138-0.475l5.366-9.702v-1.406h-6.276
c-0.317,0-0.568-0.102-0.752-0.307c-0.185-0.205-0.277-0.445-0.277-0.723s0.092-0.518,0.277-0.723
c0.185-0.205,0.436-0.307,0.752-0.307h8.336V147.563z"/>
<path fill="#231F20" d="M288.245,157.602c0,0.687-0.343,1.03-1.03,1.03h-2.257c-0.687,0-1.03-0.343-1.03-1.03v-1.663
c0-0.687,0.343-1.03,1.03-1.03h2.257c0.687,0,1.03,0.343,1.03,1.03V157.602z"/>
<path fill="#231F20" d="M297.452,156.295v-5.801c-0.594-0.264-1.175-0.396-1.743-0.396c-0.541,0-1.122,0.132-1.742,0.396v5.801
c0.621,0.264,1.202,0.396,1.742,0.396C296.277,156.691,296.858,156.559,297.452,156.295z M299.511,157.602
c-1.002,0.766-2.27,1.148-3.802,1.148c-1.531,0-2.798-0.383-3.801-1.148v-8.415c1.002-0.766,2.27-1.149,3.801-1.149
c1.532,0,2.799,0.383,3.802,1.149V157.602z"/>
<path fill="#231F20" d="M310.44,151.563c0,0.317-0.102,0.568-0.307,0.752s-0.446,0.277-0.723,0.277
c-0.277,0-0.518-0.093-0.722-0.277s-0.307-0.436-0.307-0.752v-1.148c-0.541-0.211-1.056-0.317-1.544-0.317
s-1.003,0.106-1.544,0.317v7.346c0,0.317-0.103,0.567-0.307,0.752c-0.205,0.185-0.445,0.277-0.723,0.277s-0.518-0.092-0.723-0.277
c-0.205-0.185-0.307-0.435-0.307-0.752v-8.712c0.964-0.674,2.165-1.01,3.604-1.01s2.64,0.336,3.604,1.01V151.563z"/>
<path fill="#231F20" d="M318.539,154.295v-3.801c-0.621-0.264-1.202-0.396-1.742-0.396c-0.568,0-1.149,0.132-1.743,0.396v3.801
c0.594,0.264,1.175,0.396,1.743,0.396C317.337,154.691,317.918,154.559,318.539,154.295z M320.598,160.75
c-1.003,0.766-2.271,1.148-3.801,1.148c-1.532,0-2.799-0.382-3.802-1.148v-1.624c0-0.33,0.102-0.584,0.307-0.762
s0.446-0.268,0.723-0.268c0.277,0,0.518,0.089,0.723,0.268s0.307,0.433,0.307,0.762v0.317c0.594,0.264,1.175,0.396,1.743,0.396
c0.541,0,1.122-0.132,1.742-0.396v-2.891c-0.528,0.132-1.109,0.198-1.742,0.198c-1.532,0-2.799-0.383-3.802-1.148v-6.415
c1.002-0.766,2.27-1.149,3.802-1.149c1.531,0,2.798,0.383,3.801,1.149V160.75z"/>
</g>
<rect x="44.52" y="552.441" fill="none" width="766.263" height="35.289"/>
<text transform="matrix(1 0 0 1 44.5195 560.8198)"><tspan x="0" y="0" font-family="'Helvetica'" font-size="9">Proceedings</tspan><tspan x="50.021" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="53.179" y="0" font-family="'Helvetica'" font-size="9">of</tspan><tspan x="60.685" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="63.843" y="0" font-family="'Helvetica'" font-size="9">the</tspan><tspan x="76.353" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="79.511" y="0" font-family="'Helvetica'" font-size="9">18th</tspan><tspan x="97.024" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="100.183" y="0" font-family="'Helvetica'" font-size="9">Annual</tspan><tspan x="128.199" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="131.357" y="0" font-family="'Helvetica'" font-size="9">Conference</tspan><tspan x="177.878" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="181.035" y="0" font-family="'Helvetica'" font-size="9">of</tspan><tspan x="188.541" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="191.699" y="0" font-family="'Helvetica'" font-size="9">the</tspan><tspan x="204.209" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="207.367" y="0" font-family="'Helvetica'" font-size="9">International</tspan><tspan x="256.894" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="260.052" y="0" font-family="'Helvetica'" font-size="9">Speech</tspan><tspan x="290.57" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="293.728" y="0" font-family="'Helvetica'" font-size="9">Communication</tspan><tspan x="356.241" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="359.399" y="0" font-family="'Helvetica'" font-size="9">Association</tspan><tspan x="405.416" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="408.574" y="0" font-family="'Helvetica'" font-size="9">(INTERSPEECH</tspan><tspan x="475.58" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="478.738" y="0" font-family="'Helvetica'" font-size="9">2017).</tspan><tspan x="504.251" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="510.568" y="0" font-family="'Helvetica'" font-size="9">ISSN</tspan><tspan x="531.574" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="534.732" y="0" font-family="'Helvetica'" font-size="9">2308-457X.</tspan><tspan x="581.261" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="584.419" y="0" font-family="'Helvetica'" font-size="9">©</tspan><tspan x="591.052" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="594.208" y="0" font-family="'Helvetica'" font-size="9">2017</tspan><tspan x="614.224" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="617.382" y="0" font-family="'Helvetica'" font-size="9">International</tspan><tspan x="666.91" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="670.068" y="0" font-family="'Helvetica'" font-size="9">Speech</tspan><tspan x="700.585" y="0" font-family="'Helvetica'" font-size="9"> </tspan><tspan x="703.744" y="0" font-family="'Helvetica'" font-size="9">Communication </tspan><tspan x="0" y="10.8" font-family="'Helvetica'" font-size="9">Association. All rights reserved. For technical support please contact Causal Productions (info@causalproductions.com).</tspan></text>
<g>
<path fill="#FFFFFF" d="M615.494,564.789v6.127h0.584v-2.625h2.137c0.272,0,0.521-0.038,0.745-0.116
c0.224-0.077,0.416-0.191,0.575-0.344c0.162-0.151,0.285-0.335,0.369-0.553c0.086-0.217,0.128-0.464,0.128-0.737
c0-0.275-0.042-0.521-0.128-0.738c-0.084-0.218-0.207-0.401-0.369-0.554c-0.159-0.151-0.352-0.266-0.575-0.343
c-0.224-0.078-0.473-0.117-0.745-0.117H615.494z M616.078,567.792v-2.505h2.008c0.405,0,0.734,0.103,0.985,0.309
c0.252,0.206,0.377,0.521,0.377,0.944c0,0.423-0.125,0.737-0.377,0.943c-0.251,0.206-0.58,0.309-0.985,0.309H616.078z"/>
<path fill="#FFFFFF" d="M620.789,564.789v6.127h0.54v-2.582c0.005-0.218,0.042-0.419,0.108-0.604
c0.063-0.186,0.156-0.347,0.273-0.481c0.118-0.135,0.26-0.239,0.431-0.317c0.167-0.077,0.36-0.116,0.577-0.116
c0.219,0,0.4,0.035,0.545,0.104c0.146,0.068,0.263,0.163,0.347,0.283c0.086,0.121,0.148,0.262,0.182,0.424
c0.034,0.164,0.051,0.341,0.051,0.528v2.763h0.542v-2.849c0-0.263-0.025-0.5-0.079-0.713c-0.051-0.21-0.14-0.39-0.266-0.535
c-0.125-0.146-0.293-0.259-0.502-0.339c-0.207-0.08-0.466-0.119-0.777-0.119c-0.313,0-0.603,0.081-0.866,0.243
c-0.261,0.164-0.446,0.379-0.549,0.648h-0.017v-2.464H620.789z"/>
<path fill="#FFFFFF" d="M627.275,566.361c-0.339,0-0.638,0.062-0.897,0.188c-0.259,0.126-0.478,0.295-0.656,0.507
c-0.176,0.212-0.313,0.458-0.401,0.742c-0.093,0.283-0.138,0.585-0.138,0.905c0,0.318,0.045,0.622,0.138,0.904
c0.089,0.284,0.226,0.53,0.401,0.743c0.179,0.211,0.397,0.379,0.656,0.501c0.26,0.123,0.559,0.185,0.897,0.185
c0.338,0,0.636-0.062,0.896-0.185c0.259-0.122,0.479-0.29,0.656-0.501c0.179-0.213,0.313-0.459,0.404-0.743
c0.089-0.282,0.138-0.586,0.138-0.904c0-0.32-0.049-0.622-0.138-0.905c-0.092-0.284-0.226-0.53-0.404-0.742
c-0.177-0.212-0.397-0.381-0.656-0.507C627.911,566.422,627.613,566.361,627.275,566.361z M627.275,566.814
c0.256,0,0.482,0.055,0.678,0.163c0.193,0.109,0.355,0.252,0.483,0.43c0.129,0.176,0.227,0.379,0.294,0.604
c0.063,0.226,0.098,0.457,0.098,0.691c0,0.233-0.034,0.465-0.098,0.69c-0.067,0.225-0.165,0.428-0.294,0.604
c-0.128,0.178-0.29,0.32-0.483,0.429c-0.195,0.109-0.422,0.164-0.678,0.164c-0.258,0-0.483-0.055-0.68-0.164
c-0.192-0.108-0.354-0.251-0.483-0.429c-0.128-0.177-0.226-0.38-0.293-0.604c-0.063-0.226-0.098-0.457-0.098-0.69
c0-0.234,0.034-0.466,0.098-0.691c0.067-0.226,0.165-0.429,0.293-0.604c0.13-0.178,0.291-0.32,0.483-0.43
C626.792,566.869,627.017,566.814,627.275,566.814z"/>
<path fill="#FFFFFF" d="M631.034,566.489v-1.329h-0.542v1.329h-0.771v0.456h0.771v3.026c-0.005,0.379,0.064,0.637,0.207,0.776
c0.143,0.142,0.394,0.211,0.754,0.211c0.081,0,0.161-0.002,0.24-0.008c0.081-0.006,0.162-0.009,0.24-0.009v-0.455
c-0.154,0.018-0.308,0.025-0.464,0.025c-0.193-0.012-0.316-0.067-0.363-0.167c-0.051-0.1-0.072-0.238-0.072-0.416v-2.984h0.899
v-0.456H631.034z"/>
<path fill="#FFFFFF" d="M634.583,566.361c-0.335,0-0.634,0.062-0.896,0.188c-0.26,0.126-0.478,0.295-0.656,0.507
c-0.177,0.212-0.31,0.458-0.402,0.742c-0.093,0.283-0.137,0.585-0.137,0.905c0,0.318,0.044,0.622,0.137,0.904
c0.093,0.284,0.226,0.53,0.402,0.743c0.179,0.211,0.396,0.379,0.656,0.501c0.263,0.123,0.562,0.185,0.896,0.185
c0.338,0,0.638-0.062,0.896-0.185c0.263-0.122,0.48-0.29,0.658-0.501c0.177-0.213,0.312-0.459,0.403-0.743
c0.092-0.282,0.137-0.586,0.137-0.904c0-0.32-0.045-0.622-0.137-0.905c-0.092-0.284-0.227-0.53-0.403-0.742
c-0.178-0.212-0.396-0.381-0.658-0.507C635.221,566.422,634.921,566.361,634.583,566.361z M634.583,566.814
c0.26,0,0.486,0.055,0.679,0.163c0.196,0.109,0.357,0.252,0.485,0.43c0.13,0.176,0.228,0.379,0.291,0.604
c0.067,0.226,0.1,0.457,0.1,0.691c0,0.233-0.032,0.465-0.1,0.69c-0.063,0.225-0.161,0.428-0.291,0.604
c-0.128,0.178-0.289,0.32-0.485,0.429c-0.192,0.109-0.419,0.164-0.679,0.164c-0.257,0-0.483-0.055-0.676-0.164
c-0.195-0.108-0.357-0.251-0.486-0.429c-0.128-0.177-0.226-0.38-0.29-0.604c-0.067-0.226-0.101-0.457-0.101-0.69
c0-0.234,0.033-0.466,0.101-0.691c0.064-0.226,0.162-0.429,0.29-0.604c0.129-0.178,0.291-0.32,0.486-0.43
C634.1,566.869,634.327,566.814,634.583,566.814z"/>
<path fill="#FFFFFF" d="M639.261,570.411c-0.251,0-0.47-0.051-0.653-0.151c-0.182-0.1-0.335-0.232-0.454-0.394
c-0.121-0.164-0.207-0.351-0.266-0.563c-0.056-0.212-0.088-0.429-0.088-0.652c0-0.23,0.028-0.453,0.084-0.673
c0.054-0.221,0.14-0.417,0.26-0.588c0.121-0.173,0.274-0.312,0.456-0.417c0.185-0.106,0.405-0.159,0.661-0.159
c0.257,0,0.476,0.055,0.657,0.163c0.179,0.109,0.326,0.252,0.441,0.43c0.114,0.176,0.198,0.376,0.247,0.592
c0.051,0.218,0.079,0.435,0.079,0.652c0,0.224-0.03,0.44-0.093,0.652c-0.059,0.212-0.147,0.398-0.268,0.563
c-0.119,0.161-0.271,0.294-0.447,0.394C639.7,570.36,639.497,570.411,639.261,570.411z M641.226,570.555v-4.066h-0.54v0.764h-0.017
c-0.053-0.12-0.126-0.234-0.221-0.344c-0.096-0.109-0.2-0.203-0.321-0.287c-0.12-0.082-0.254-0.146-0.399-0.192
c-0.146-0.047-0.302-0.068-0.467-0.068c-0.32,0-0.605,0.058-0.854,0.175c-0.249,0.118-0.459,0.276-0.626,0.481
c-0.171,0.201-0.299,0.438-0.387,0.712c-0.089,0.271-0.133,0.559-0.133,0.862c0,0.313,0.035,0.609,0.111,0.887
c0.072,0.278,0.193,0.52,0.354,0.726c0.165,0.207,0.372,0.366,0.623,0.484c0.252,0.117,0.557,0.176,0.91,0.176
c0.324,0,0.609-0.083,0.853-0.248c0.242-0.166,0.428-0.386,0.556-0.661h0.017v0.601c0,0.538-0.111,0.949-0.335,1.236
c-0.224,0.285-0.587,0.428-1.09,0.428c-0.15,0-0.295-0.016-0.438-0.047c-0.144-0.03-0.274-0.081-0.391-0.149
c-0.118-0.069-0.219-0.159-0.3-0.271c-0.084-0.112-0.14-0.25-0.168-0.417h-0.542c0.018,0.235,0.081,0.437,0.193,0.604
c0.111,0.17,0.254,0.31,0.421,0.416c0.168,0.109,0.358,0.19,0.57,0.24c0.212,0.052,0.431,0.079,0.653,0.079
c0.361,0,0.666-0.049,0.913-0.146c0.249-0.097,0.451-0.237,0.604-0.421c0.156-0.184,0.268-0.406,0.34-0.668
C641.189,571.177,641.226,570.882,641.226,570.555z"/>
<path fill="#FFFFFF" d="M642.255,566.489v4.427h0.539v-2.358c0-0.235,0.042-0.451,0.121-0.647c0.08-0.198,0.195-0.368,0.34-0.512
c0.145-0.143,0.319-0.253,0.52-0.329c0.198-0.079,0.422-0.11,0.668-0.101v-0.539c-0.4-0.017-0.745,0.071-1.033,0.265
c-0.289,0.196-0.503,0.473-0.641,0.832h-0.017v-1.037H642.255z"/>
<path fill="#FFFFFF" d="M647.79,568.497v0.702c0,0.219-0.042,0.413-0.124,0.584c-0.083,0.172-0.195,0.319-0.335,0.438
c-0.14,0.12-0.305,0.211-0.495,0.274c-0.187,0.062-0.385,0.095-0.592,0.095c-0.125,0-0.248-0.021-0.368-0.061
c-0.12-0.04-0.227-0.1-0.315-0.176c-0.093-0.077-0.165-0.169-0.221-0.275c-0.054-0.105-0.082-0.227-0.082-0.364
c0-0.218,0.065-0.383,0.193-0.501c0.128-0.117,0.293-0.209,0.495-0.271c0.198-0.063,0.422-0.11,0.664-0.142
c0.243-0.032,0.478-0.066,0.707-0.107c0.075-0.012,0.161-0.032,0.257-0.06c0.098-0.029,0.165-0.075,0.197-0.138H647.79z
M644.906,567.844h0.542c0.012-0.36,0.126-0.622,0.345-0.784c0.214-0.164,0.496-0.246,0.84-0.246c0.159,0,0.31,0.014,0.449,0.039
s0.264,0.072,0.368,0.141c0.107,0.07,0.19,0.163,0.249,0.279c0.063,0.118,0.091,0.269,0.091,0.452c0,0.119-0.015,0.216-0.049,0.291
c-0.03,0.074-0.075,0.133-0.131,0.176c-0.059,0.042-0.128,0.073-0.216,0.089c-0.086,0.019-0.187,0.033-0.301,0.045
c-0.291,0.033-0.579,0.069-0.86,0.106c-0.284,0.037-0.538,0.101-0.766,0.193c-0.227,0.091-0.407,0.225-0.545,0.398
c-0.136,0.174-0.203,0.415-0.203,0.727c0,0.221,0.036,0.415,0.114,0.577c0.079,0.164,0.185,0.297,0.318,0.403
c0.134,0.105,0.291,0.184,0.473,0.231c0.178,0.05,0.371,0.073,0.578,0.073c0.218,0,0.404-0.02,0.561-0.061
c0.156-0.038,0.3-0.097,0.422-0.171c0.123-0.074,0.234-0.167,0.335-0.278c0.098-0.112,0.199-0.239,0.293-0.382h0.021
c0,0.114,0.005,0.218,0.02,0.313c0.014,0.094,0.044,0.175,0.092,0.245c0.045,0.068,0.109,0.12,0.193,0.157
c0.08,0.038,0.191,0.056,0.329,0.056c0.076,0,0.137-0.003,0.184-0.009c0.049-0.005,0.104-0.014,0.168-0.024v-0.456
c-0.052,0.023-0.108,0.036-0.173,0.036c-0.209,0-0.316-0.109-0.316-0.328v-2.35c0-0.297-0.049-0.539-0.145-0.725
c-0.098-0.187-0.224-0.331-0.38-0.435c-0.153-0.102-0.327-0.174-0.522-0.209c-0.195-0.038-0.388-0.056-0.583-0.056
c-0.258,0-0.492,0.027-0.704,0.085c-0.212,0.057-0.396,0.146-0.553,0.267c-0.157,0.12-0.283,0.273-0.374,0.462
C644.978,567.365,644.922,567.586,644.906,567.844z"/>
<path fill="#FFFFFF" d="M652.957,568.703c0,0.229-0.028,0.456-0.084,0.682c-0.054,0.227-0.14,0.428-0.258,0.605
c-0.117,0.177-0.266,0.321-0.449,0.434c-0.185,0.111-0.402,0.167-0.662,0.167c-0.285,0-0.525-0.056-0.724-0.167
c-0.197-0.112-0.357-0.257-0.48-0.434c-0.122-0.178-0.212-0.379-0.266-0.605c-0.056-0.226-0.084-0.453-0.084-0.682
c0-0.251,0.025-0.492,0.076-0.721c0.047-0.229,0.131-0.43,0.252-0.601c0.119-0.172,0.281-0.309,0.479-0.413
c0.202-0.103,0.449-0.154,0.746-0.154c0.26,0,0.478,0.056,0.662,0.167c0.184,0.112,0.332,0.258,0.449,0.434
c0.118,0.178,0.204,0.381,0.258,0.606C652.928,568.246,652.957,568.473,652.957,568.703z M649.412,566.489v6.058h0.538v-2.472
h0.021c0.056,0.16,0.137,0.3,0.242,0.421c0.106,0.12,0.23,0.221,0.366,0.299c0.137,0.082,0.285,0.143,0.444,0.181
c0.162,0.041,0.321,0.061,0.48,0.061c0.338,0,0.631-0.062,0.88-0.185c0.248-0.122,0.457-0.291,0.622-0.506
c0.165-0.214,0.288-0.463,0.37-0.746c0.079-0.284,0.119-0.583,0.119-0.896c0-0.314-0.04-0.613-0.119-0.897
c-0.082-0.283-0.205-0.532-0.37-0.745c-0.165-0.215-0.374-0.386-0.622-0.512c-0.249-0.126-0.542-0.188-0.88-0.188
c-0.36,0-0.682,0.083-0.963,0.252c-0.286,0.169-0.489,0.408-0.615,0.716h-0.018v-0.84H649.412z"/>
<path fill="#FFFFFF" d="M654.328,564.789v6.127h0.542v-2.582c0.005-0.218,0.042-0.419,0.105-0.604
c0.066-0.186,0.155-0.347,0.273-0.481c0.117-0.135,0.263-0.239,0.431-0.317c0.168-0.077,0.363-0.116,0.578-0.116
c0.218,0,0.399,0.035,0.544,0.104c0.148,0.068,0.263,0.163,0.35,0.283c0.086,0.121,0.145,0.262,0.179,0.424
c0.035,0.164,0.054,0.341,0.054,0.528v2.763h0.538v-2.849c0-0.263-0.025-0.5-0.075-0.713c-0.053-0.21-0.142-0.39-0.268-0.535
c-0.126-0.146-0.293-0.259-0.5-0.339c-0.21-0.08-0.47-0.119-0.777-0.119c-0.314,0-0.603,0.081-0.868,0.243
c-0.263,0.164-0.444,0.379-0.547,0.648h-0.017v-2.464H654.328z"/>
<path fill="#FFFFFF" d="M659.262,567.536h0.721v-0.91h-0.721V567.536z M659.262,570.916h0.721v-0.909h-0.721V570.916z"/>
<polygon fill="#FFFFFF" points="665.525,568.393 665.525,570.916 666.107,570.916 666.107,568.393 668.527,564.789
667.832,564.789 665.826,567.897 663.809,564.789 663.123,564.789 "/>
<path fill="#FFFFFF" d="M670.999,568.497v0.702c0,0.219-0.042,0.413-0.126,0.584c-0.083,0.172-0.192,0.319-0.335,0.438
c-0.14,0.12-0.305,0.211-0.492,0.274c-0.189,0.062-0.385,0.095-0.592,0.095c-0.126,0-0.249-0.021-0.368-0.061
c-0.12-0.04-0.228-0.1-0.318-0.176c-0.093-0.077-0.165-0.169-0.219-0.275c-0.056-0.105-0.084-0.227-0.084-0.364
c0-0.218,0.067-0.383,0.196-0.501c0.128-0.117,0.293-0.209,0.491-0.271c0.201-0.063,0.423-0.11,0.665-0.142
c0.242-0.032,0.479-0.066,0.71-0.107c0.072-0.012,0.158-0.032,0.256-0.06c0.098-0.029,0.163-0.075,0.198-0.138H670.999z
M668.116,567.844h0.539c0.012-0.36,0.126-0.622,0.343-0.784c0.219-0.164,0.498-0.246,0.842-0.246c0.159,0,0.31,0.014,0.449,0.039
s0.266,0.072,0.368,0.141c0.107,0.07,0.19,0.163,0.249,0.279c0.062,0.118,0.093,0.269,0.093,0.452c0,0.119-0.018,0.216-0.048,0.291
c-0.03,0.074-0.075,0.133-0.134,0.176c-0.056,0.042-0.129,0.073-0.216,0.089c-0.084,0.019-0.184,0.033-0.298,0.045
c-0.294,0.033-0.579,0.069-0.863,0.106c-0.282,0.037-0.536,0.101-0.764,0.193c-0.226,0.091-0.407,0.225-0.544,0.398
c-0.136,0.174-0.206,0.415-0.206,0.727c0,0.221,0.038,0.415,0.114,0.577c0.077,0.164,0.185,0.297,0.318,0.403
c0.134,0.105,0.294,0.184,0.472,0.231c0.182,0.05,0.374,0.073,0.579,0.073c0.217,0,0.404-0.02,0.563-0.061
c0.156-0.038,0.296-0.097,0.419-0.171s0.234-0.167,0.335-0.278c0.101-0.112,0.198-0.239,0.296-0.382h0.018
c0,0.114,0.008,0.218,0.021,0.313c0.014,0.094,0.045,0.175,0.09,0.245c0.045,0.068,0.108,0.12,0.192,0.157
c0.084,0.038,0.192,0.056,0.33,0.056c0.074,0,0.137-0.003,0.184-0.009c0.051-0.005,0.106-0.014,0.168-0.024v-0.456
c-0.05,0.023-0.109,0.036-0.17,0.036c-0.212,0-0.319-0.109-0.319-0.328v-2.35c0-0.297-0.047-0.539-0.145-0.725
c-0.098-0.187-0.224-0.331-0.377-0.435c-0.153-0.102-0.33-0.174-0.525-0.209c-0.193-0.038-0.389-0.056-0.58-0.056
c-0.261,0-0.495,0.027-0.705,0.085c-0.212,0.057-0.396,0.146-0.552,0.267c-0.16,0.12-0.283,0.273-0.375,0.462
C668.185,567.365,668.132,567.586,668.116,567.844z"/>
<path fill="#FFFFFF" d="M672.61,566.489v4.427h0.542v-2.582c0.005-0.218,0.042-0.419,0.105-0.604
c0.067-0.186,0.16-0.347,0.276-0.481c0.114-0.135,0.261-0.239,0.428-0.317c0.171-0.077,0.363-0.116,0.578-0.116
c0.218,0,0.399,0.035,0.547,0.104c0.146,0.068,0.261,0.163,0.347,0.283c0.086,0.121,0.146,0.262,0.179,0.424
c0.037,0.164,0.054,0.341,0.054,0.528v2.763h0.538v-2.849c0-0.263-0.024-0.5-0.075-0.713c-0.053-0.21-0.14-0.39-0.265-0.535
c-0.126-0.146-0.294-0.259-0.503-0.339c-0.21-0.08-0.47-0.119-0.777-0.119c-0.314,0-0.603,0.081-0.865,0.243
c-0.266,0.164-0.447,0.379-0.55,0.648h-0.017v-0.764H672.61z"/>
<path fill="#FFFFFF" d="M680.058,568.497v0.702c0,0.219-0.042,0.413-0.126,0.584c-0.08,0.172-0.191,0.319-0.335,0.438
c-0.14,0.12-0.304,0.211-0.491,0.274c-0.189,0.062-0.385,0.095-0.592,0.095c-0.126,0-0.249-0.021-0.369-0.061
c-0.119-0.04-0.227-0.1-0.317-0.176c-0.093-0.077-0.165-0.169-0.219-0.275c-0.056-0.105-0.081-0.227-0.081-0.364
c0-0.218,0.064-0.383,0.193-0.501c0.128-0.117,0.293-0.209,0.491-0.271c0.201-0.063,0.421-0.11,0.665-0.142
c0.242-0.032,0.479-0.066,0.71-0.107c0.072-0.012,0.158-0.032,0.256-0.06c0.098-0.029,0.163-0.075,0.198-0.138H680.058z
M677.176,567.844h0.539c0.011-0.36,0.126-0.622,0.343-0.784c0.219-0.164,0.498-0.246,0.841-0.246c0.16,0,0.312,0.014,0.451,0.039
c0.142,0.025,0.265,0.072,0.368,0.141c0.106,0.07,0.19,0.163,0.251,0.279c0.059,0.118,0.089,0.269,0.089,0.452
c0,0.119-0.017,0.216-0.047,0.291c-0.03,0.074-0.075,0.133-0.135,0.176c-0.056,0.042-0.128,0.073-0.215,0.089
c-0.083,0.019-0.184,0.033-0.298,0.045c-0.293,0.033-0.579,0.069-0.863,0.106c-0.282,0.037-0.537,0.101-0.763,0.193
c-0.227,0.091-0.408,0.225-0.545,0.398c-0.138,0.174-0.207,0.415-0.207,0.727c0,0.221,0.039,0.415,0.115,0.577
c0.078,0.164,0.185,0.297,0.317,0.403c0.135,0.105,0.294,0.184,0.473,0.231c0.182,0.05,0.374,0.073,0.579,0.073
c0.217,0,0.404-0.02,0.563-0.061c0.156-0.038,0.296-0.097,0.419-0.171s0.234-0.167,0.335-0.278
c0.101-0.112,0.198-0.239,0.296-0.382h0.017c0,0.114,0.009,0.218,0.022,0.313c0.015,0.094,0.045,0.175,0.089,0.245
c0.046,0.068,0.109,0.12,0.193,0.157c0.084,0.038,0.192,0.056,0.33,0.056c0.075,0,0.137-0.003,0.187-0.009
c0.048-0.005,0.104-0.014,0.165-0.024v-0.456c-0.051,0.023-0.109,0.036-0.17,0.036c-0.212,0-0.319-0.109-0.319-0.328v-2.35
c0-0.297-0.047-0.539-0.145-0.725c-0.098-0.187-0.224-0.331-0.377-0.435c-0.153-0.102-0.33-0.174-0.522-0.209
c-0.196-0.038-0.392-0.056-0.584-0.056c-0.26,0-0.494,0.027-0.704,0.085c-0.212,0.057-0.396,0.146-0.553,0.267
c-0.159,0.12-0.282,0.273-0.374,0.462C677.246,567.365,677.193,567.586,677.176,567.844z"/>
<path fill="#FFFFFF" d="M681.67,566.489v4.427h0.542v-2.582c0.005-0.218,0.042-0.419,0.105-0.604
c0.067-0.186,0.16-0.347,0.276-0.481c0.117-0.135,0.261-0.239,0.428-0.317c0.171-0.077,0.363-0.116,0.581-0.116
c0.216,0,0.396,0.035,0.544,0.104c0.146,0.068,0.261,0.163,0.347,0.283c0.087,0.121,0.145,0.262,0.182,0.424
c0.033,0.164,0.051,0.341,0.051,0.528v2.763h0.538v-2.849c0-0.263-0.025-0.5-0.074-0.713c-0.054-0.21-0.14-0.39-0.266-0.535
c-0.126-0.146-0.293-0.259-0.503-0.339c-0.209-0.08-0.47-0.119-0.777-0.119c-0.314,0-0.603,0.081-0.865,0.243
c-0.266,0.164-0.447,0.379-0.55,0.648h-0.017v-0.764H681.67z"/>
<polygon fill="#FFFFFF" points="688.776,564.789 688.776,570.916 692.792,570.916 692.792,570.417 689.359,570.417
689.359,564.789 "/>
<path fill="#FFFFFF" d="M693.29,566.489v4.427h0.538v-4.427H693.29z M693.29,564.789v0.867h0.538v-0.867H693.29z"/>
<polygon fill="#FFFFFF" points="694.25,571.036 694.705,571.036 697.323,564.661 696.883,564.661 "/>
<path fill="#FFFFFF" d="M697.723,566.489v4.427h0.543v-2.471c0-0.234,0.025-0.45,0.076-0.647c0.052-0.196,0.13-0.369,0.237-0.516
c0.105-0.146,0.242-0.26,0.41-0.344c0.17-0.082,0.368-0.124,0.598-0.124c0.17,0,0.314,0.029,0.436,0.087
c0.12,0.058,0.218,0.135,0.288,0.235c0.072,0.101,0.122,0.217,0.156,0.353c0.03,0.134,0.044,0.274,0.044,0.424v3.003h0.542v-2.505
c0-0.206,0.02-0.405,0.057-0.597c0.035-0.191,0.1-0.361,0.192-0.511c0.092-0.148,0.215-0.268,0.368-0.355
c0.153-0.089,0.35-0.134,0.584-0.134c0.389,0,0.661,0.096,0.815,0.288c0.153,0.191,0.232,0.471,0.232,0.836v2.978h0.538v-3.003
c0-1.035-0.489-1.552-1.467-1.552c-0.289,0-0.565,0.07-0.823,0.213c-0.257,0.144-0.452,0.36-0.583,0.652
c-0.079-0.292-0.237-0.509-0.475-0.652c-0.237-0.143-0.503-0.213-0.794-0.213c-0.36,0-0.656,0.076-0.889,0.23
c-0.23,0.153-0.419,0.369-0.563,0.643h-0.025v-0.745H697.723z"/>
<path fill="#FFFFFF" d="M705.19,568.849h3.404c0.012-0.311-0.017-0.615-0.083-0.914c-0.07-0.302-0.185-0.566-0.342-0.799
c-0.156-0.231-0.359-0.419-0.61-0.563c-0.256-0.143-0.557-0.213-0.912-0.213c-0.349,0-0.649,0.066-0.903,0.2
c-0.256,0.135-0.465,0.313-0.624,0.532c-0.158,0.221-0.279,0.471-0.354,0.751c-0.079,0.279-0.118,0.566-0.118,0.858
c0,0.313,0.039,0.612,0.118,0.896c0.074,0.283,0.195,0.532,0.354,0.746c0.159,0.215,0.368,0.384,0.624,0.506
c0.254,0.123,0.555,0.185,0.903,0.185c0.563,0,0.995-0.137,1.299-0.412c0.308-0.273,0.517-0.663,0.632-1.165h-0.539
c-0.086,0.343-0.24,0.616-0.464,0.823c-0.223,0.205-0.533,0.309-0.928,0.309c-0.256,0-0.478-0.055-0.661-0.164
c-0.182-0.108-0.333-0.246-0.452-0.411c-0.12-0.166-0.209-0.354-0.263-0.563C705.215,569.244,705.19,569.042,705.19,568.849z
M708.055,568.393h-2.865c0.022-0.199,0.069-0.396,0.14-0.588c0.072-0.19,0.167-0.359,0.288-0.505
c0.119-0.146,0.266-0.265,0.438-0.353c0.171-0.089,0.368-0.134,0.592-0.134c0.219,0,0.411,0.045,0.584,0.134
c0.17,0.088,0.316,0.206,0.436,0.353c0.121,0.146,0.216,0.313,0.279,0.501C708.013,567.991,708.05,568.187,708.055,568.393z"/>
<path fill="#FFFFFF" d="M709.675,568.703c0-0.229,0.027-0.457,0.081-0.682c0.056-0.226,0.142-0.429,0.26-0.606
c0.117-0.176,0.266-0.321,0.45-0.434c0.181-0.111,0.402-0.167,0.658-0.167c0.288,0,0.528,0.056,0.727,0.167
c0.198,0.112,0.357,0.258,0.479,0.434c0.123,0.178,0.214,0.381,0.266,0.606c0.056,0.225,0.084,0.452,0.084,0.682
c0,0.229-0.028,0.456-0.084,0.682c-0.052,0.227-0.143,0.428-0.266,0.605c-0.122,0.177-0.281,0.321-0.479,0.434
c-0.198,0.111-0.438,0.167-0.727,0.167c-0.256,0-0.478-0.056-0.658-0.167c-0.185-0.112-0.333-0.257-0.45-0.434
c-0.118-0.178-0.204-0.379-0.26-0.605C709.703,569.159,709.675,568.931,709.675,568.703z M713.219,570.916v-6.127h-0.539v2.54
h-0.02c-0.056-0.159-0.137-0.299-0.242-0.42c-0.106-0.119-0.229-0.222-0.367-0.305c-0.136-0.084-0.284-0.145-0.44-0.184
c-0.156-0.041-0.318-0.06-0.486-0.06c-0.335,0-0.628,0.062-0.877,0.188s-0.458,0.297-0.622,0.512
c-0.168,0.213-0.291,0.462-0.369,0.745c-0.082,0.284-0.12,0.583-0.12,0.897c0,0.313,0.038,0.612,0.12,0.896
c0.078,0.283,0.201,0.532,0.369,0.746c0.164,0.215,0.373,0.384,0.622,0.506c0.249,0.123,0.542,0.185,0.877,0.185
c0.163,0,0.324-0.022,0.486-0.068c0.161-0.047,0.315-0.11,0.458-0.192c0.146-0.084,0.271-0.185,0.383-0.305
c0.111-0.12,0.195-0.252,0.254-0.396h0.017v0.841H713.219z"/>
<path fill="#FFFFFF" d="M714.258,566.489v4.427h0.539v-4.427H714.258z M714.258,564.789v0.867h0.539v-0.867H714.258z"/>
<path fill="#FFFFFF" d="M718.668,568.497v0.702c0,0.219-0.042,0.413-0.126,0.584c-0.081,0.172-0.192,0.319-0.335,0.438
c-0.14,0.12-0.305,0.211-0.491,0.274c-0.19,0.062-0.386,0.095-0.592,0.095c-0.126,0-0.249-0.021-0.369-0.061
c-0.119-0.04-0.227-0.1-0.319-0.176c-0.091-0.077-0.163-0.169-0.217-0.275c-0.056-0.105-0.081-0.227-0.081-0.364
c0-0.218,0.064-0.383,0.193-0.501c0.128-0.117,0.293-0.209,0.491-0.271c0.2-0.063,0.421-0.11,0.665-0.142
c0.242-0.032,0.479-0.066,0.708-0.107c0.074-0.012,0.16-0.032,0.258-0.06c0.098-0.029,0.161-0.075,0.198-0.138H718.668z
M715.787,567.844h0.538c0.012-0.36,0.126-0.622,0.344-0.784c0.219-0.164,0.498-0.246,0.841-0.246c0.16,0,0.31,0.014,0.449,0.039
c0.144,0.025,0.266,0.072,0.369,0.141c0.105,0.07,0.189,0.163,0.252,0.279c0.059,0.118,0.089,0.269,0.089,0.452
c0,0.119-0.017,0.216-0.047,0.291c-0.03,0.074-0.076,0.133-0.135,0.176c-0.056,0.042-0.128,0.073-0.215,0.089
c-0.084,0.019-0.184,0.033-0.3,0.045c-0.293,0.033-0.577,0.069-0.861,0.106c-0.283,0.037-0.537,0.101-0.764,0.193
c-0.226,0.091-0.407,0.225-0.544,0.398c-0.138,0.174-0.207,0.415-0.207,0.727c0,0.221,0.039,0.415,0.114,0.577
c0.078,0.164,0.186,0.297,0.318,0.403c0.135,0.105,0.293,0.184,0.473,0.231c0.182,0.05,0.374,0.073,0.577,0.073
c0.219,0,0.405-0.02,0.565-0.061c0.156-0.038,0.296-0.097,0.419-0.171s0.234-0.167,0.335-0.278
c0.101-0.112,0.198-0.239,0.296-0.382h0.017c0,0.114,0.009,0.218,0.022,0.313c0.014,0.094,0.045,0.175,0.089,0.245
c0.045,0.068,0.109,0.12,0.193,0.157c0.084,0.038,0.192,0.056,0.33,0.056c0.074,0,0.136-0.003,0.187-0.009
c0.047-0.005,0.103-0.014,0.165-0.024v-0.456c-0.051,0.023-0.109,0.036-0.17,0.036c-0.214,0-0.319-0.109-0.319-0.328v-2.35
c0-0.297-0.048-0.539-0.146-0.725c-0.098-0.187-0.224-0.331-0.377-0.435c-0.154-0.102-0.329-0.174-0.522-0.209
c-0.195-0.038-0.391-0.056-0.584-0.056c-0.259,0-0.493,0.027-0.703,0.085c-0.212,0.057-0.397,0.146-0.554,0.267
c-0.158,0.12-0.281,0.273-0.374,0.462C715.856,567.365,715.803,567.586,715.787,567.844z"/>
<path fill="#FFFFFF" d="M723.833,568.703c0,0.229-0.027,0.456-0.081,0.682c-0.056,0.227-0.14,0.428-0.257,0.605
c-0.117,0.177-0.268,0.321-0.453,0.434c-0.182,0.111-0.4,0.167-0.658,0.167c-0.285,0-0.528-0.056-0.727-0.167
c-0.195-0.112-0.356-0.257-0.479-0.434c-0.123-0.178-0.21-0.379-0.266-0.605c-0.054-0.226-0.081-0.453-0.081-0.682
c0-0.229,0.027-0.457,0.081-0.682c0.056-0.226,0.143-0.429,0.266-0.606c0.123-0.176,0.284-0.321,0.479-0.434
c0.198-0.111,0.441-0.167,0.727-0.167c0.258,0,0.477,0.056,0.658,0.167c0.186,0.112,0.336,0.258,0.453,0.434
c0.117,0.178,0.201,0.381,0.257,0.606C723.805,568.246,723.833,568.473,723.833,568.703z M720.289,564.789v6.127h0.5v-0.841h0.017
c0.125,0.309,0.33,0.546,0.611,0.713c0.285,0.166,0.605,0.248,0.967,0.248c0.337,0,0.631-0.062,0.88-0.185
c0.248-0.122,0.455-0.291,0.623-0.506c0.165-0.214,0.287-0.463,0.368-0.746c0.078-0.284,0.12-0.583,0.12-0.896
c0-0.314-0.042-0.613-0.12-0.897c-0.081-0.283-0.203-0.532-0.368-0.745c-0.168-0.215-0.375-0.386-0.623-0.512
c-0.249-0.126-0.543-0.188-0.88-0.188c-0.159,0-0.321,0.019-0.48,0.06c-0.16,0.039-0.31,0.1-0.446,0.184
c-0.138,0.083-0.261,0.186-0.363,0.305c-0.106,0.121-0.188,0.261-0.246,0.42h-0.017v-2.54H720.289z"/>
<path fill="#FFFFFF" d="M728.045,568.497v0.702c0,0.219-0.04,0.413-0.123,0.584c-0.084,0.172-0.196,0.319-0.336,0.438
c-0.14,0.12-0.305,0.211-0.495,0.274c-0.187,0.062-0.385,0.095-0.589,0.095c-0.128,0-0.251-0.021-0.37-0.061
c-0.121-0.04-0.224-0.1-0.316-0.176c-0.093-0.077-0.165-0.169-0.221-0.275c-0.054-0.105-0.082-0.227-0.082-0.364
c0-0.218,0.065-0.383,0.193-0.501c0.128-0.117,0.293-0.209,0.495-0.271c0.2-0.063,0.422-0.11,0.664-0.142
c0.244-0.032,0.481-0.066,0.71-0.107c0.072-0.012,0.158-0.032,0.256-0.06c0.096-0.029,0.163-0.075,0.195-0.138H728.045z
M725.162,567.844h0.542c0.012-0.36,0.126-0.622,0.345-0.784c0.217-0.164,0.496-0.246,0.84-0.246c0.159,0,0.311,0.014,0.45,0.039
s0.263,0.072,0.368,0.141c0.106,0.07,0.19,0.163,0.248,0.279c0.063,0.118,0.091,0.269,0.091,0.452c0,0.119-0.015,0.216-0.046,0.291
c-0.033,0.074-0.077,0.133-0.134,0.176c-0.058,0.042-0.128,0.073-0.216,0.089c-0.086,0.019-0.184,0.033-0.298,0.045
c-0.293,0.033-0.582,0.069-0.863,0.106c-0.282,0.037-0.538,0.101-0.766,0.193c-0.224,0.091-0.405,0.225-0.545,0.398
c-0.136,0.174-0.203,0.415-0.203,0.727c0,0.221,0.04,0.415,0.114,0.577c0.079,0.164,0.185,0.297,0.319,0.403
c0.133,0.105,0.29,0.184,0.472,0.231c0.178,0.05,0.371,0.073,0.578,0.073c0.218,0,0.404-0.02,0.561-0.061
c0.16-0.038,0.3-0.097,0.422-0.171c0.123-0.074,0.234-0.167,0.335-0.278c0.102-0.112,0.199-0.239,0.297-0.382h0.017
c0,0.114,0.005,0.218,0.021,0.313c0.015,0.094,0.042,0.175,0.091,0.245c0.044,0.068,0.108,0.12,0.192,0.157
c0.084,0.038,0.191,0.056,0.329,0.056c0.076,0,0.137-0.003,0.184-0.009c0.051-0.005,0.107-0.014,0.168-0.024v-0.456
c-0.05,0.023-0.108,0.036-0.17,0.036c-0.212,0-0.319-0.109-0.319-0.328v-2.35c0-0.297-0.047-0.539-0.145-0.725
c-0.098-0.187-0.224-0.331-0.377-0.435c-0.156-0.102-0.33-0.174-0.525-0.209c-0.191-0.038-0.388-0.056-0.583-0.056
c-0.258,0-0.492,0.027-0.704,0.085c-0.21,0.057-0.394,0.146-0.553,0.267c-0.157,0.12-0.283,0.273-0.372,0.462
C725.234,567.365,725.178,567.586,725.162,567.844z"/>
<path fill="#FFFFFF" d="M729.659,566.489v4.427h0.542v-2.582c0.005-0.218,0.039-0.419,0.106-0.604
c0.063-0.186,0.156-0.347,0.273-0.481c0.118-0.135,0.26-0.239,0.43-0.317c0.168-0.077,0.361-0.116,0.578-0.116
c0.219,0,0.399,0.035,0.545,0.104s0.263,0.163,0.349,0.283c0.084,0.121,0.146,0.262,0.179,0.424
c0.035,0.164,0.054,0.341,0.054,0.528v2.763h0.54v-2.849c0-0.263-0.025-0.5-0.076-0.713c-0.054-0.21-0.143-0.39-0.269-0.535
c-0.125-0.146-0.293-0.259-0.5-0.339c-0.209-0.08-0.469-0.119-0.775-0.119c-0.316,0-0.607,0.081-0.87,0.243
c-0.262,0.164-0.442,0.379-0.547,0.648h-0.017v-0.764H729.659z"/>
<polygon fill="#FFFFFF" points="734.276,564.789 734.276,570.916 734.814,570.916 734.814,569.235 735.692,568.504 737.44,570.916
738.119,570.916 736.103,568.126 737.991,566.489 737.269,566.489 734.814,568.634 734.814,564.789 "/>
<rect x="738.881" y="570.006" fill="#FFFFFF" width="0.721" height="0.909"/>
<polygon fill="#FFFFFF" points="740.454,566.489 742.16,570.916 742.736,570.916 744.391,566.489 743.833,566.489 742.459,570.375
742.443,570.375 741.054,566.489 "/>
<path fill="#FFFFFF" d="M744.931,566.489v4.427h0.542v-4.427H744.931z M744.931,564.789v0.867h0.542v-0.867H744.931z"/>
<path fill="#FFFFFF" d="M749.163,567.785h0.538c-0.01-0.24-0.059-0.449-0.145-0.627c-0.087-0.178-0.201-0.325-0.347-0.445
c-0.147-0.121-0.314-0.21-0.508-0.267c-0.19-0.058-0.4-0.085-0.621-0.085c-0.195,0-0.391,0.021-0.589,0.068
c-0.198,0.046-0.377,0.116-0.536,0.214c-0.159,0.097-0.29,0.226-0.391,0.387c-0.098,0.159-0.148,0.352-0.148,0.575
c0,0.188,0.03,0.346,0.093,0.476c0.063,0.128,0.151,0.237,0.263,0.325c0.111,0.09,0.242,0.164,0.391,0.224
c0.147,0.061,0.311,0.112,0.489,0.159l0.694,0.154c0.12,0.029,0.237,0.062,0.355,0.103c0.117,0.041,0.224,0.09,0.313,0.149
c0.093,0.061,0.165,0.137,0.221,0.224c0.054,0.089,0.081,0.198,0.081,0.332c0,0.153-0.039,0.282-0.118,0.388
c-0.074,0.107-0.173,0.193-0.296,0.263c-0.119,0.069-0.248,0.118-0.388,0.146c-0.14,0.029-0.276,0.043-0.408,0.043
c-0.36,0-0.661-0.094-0.905-0.279c-0.242-0.187-0.377-0.462-0.398-0.829h-0.542c0.047,0.544,0.232,0.939,0.554,1.185
c0.323,0.246,0.745,0.369,1.267,0.369c0.205,0,0.414-0.022,0.626-0.068c0.212-0.047,0.402-0.122,0.57-0.229
c0.17-0.104,0.307-0.242,0.416-0.411c0.109-0.168,0.161-0.373,0.161-0.613c0-0.194-0.035-0.363-0.107-0.505
c-0.076-0.144-0.172-0.265-0.288-0.36c-0.117-0.099-0.252-0.177-0.405-0.236c-0.151-0.06-0.305-0.102-0.458-0.126l-0.72-0.161
c-0.093-0.024-0.193-0.054-0.303-0.092c-0.109-0.036-0.207-0.083-0.298-0.141c-0.093-0.057-0.168-0.127-0.23-0.211
c-0.058-0.082-0.089-0.184-0.089-0.305c0-0.142,0.031-0.263,0.096-0.36c0.061-0.097,0.145-0.175,0.247-0.236
c0.104-0.06,0.216-0.103,0.34-0.128c0.121-0.025,0.244-0.039,0.365-0.039c0.153,0,0.298,0.02,0.438,0.056
c0.138,0.038,0.258,0.097,0.363,0.177c0.106,0.081,0.19,0.181,0.254,0.305C749.121,567.474,749.156,567.619,749.163,567.785z"/>
<path fill="#FFFFFF" d="M750.645,566.489v4.427h0.542v-4.427H750.645z M750.645,564.789v0.867h0.542v-0.867H750.645z"/>
<path fill="#FFFFFF" d="M753.084,566.489v-1.329h-0.543v1.329h-0.771v0.456h0.771v3.026c-0.006,0.379,0.063,0.637,0.207,0.776
c0.143,0.142,0.394,0.211,0.755,0.211c0.079,0,0.158-0.002,0.239-0.008s0.159-0.009,0.24-0.009v-0.455
c-0.153,0.018-0.308,0.025-0.464,0.025c-0.192-0.012-0.315-0.067-0.362-0.167c-0.051-0.1-0.072-0.238-0.072-0.416v-2.984h0.898
v-0.456H753.084z"/>
<path fill="#FFFFFF" d="M757.416,567.785h0.538c-0.01-0.24-0.058-0.449-0.145-0.627c-0.084-0.178-0.2-0.325-0.347-0.445
c-0.145-0.121-0.314-0.21-0.505-0.267c-0.193-0.058-0.4-0.085-0.623-0.085c-0.196,0-0.392,0.021-0.59,0.068
c-0.195,0.046-0.374,0.116-0.535,0.214c-0.16,0.097-0.291,0.226-0.389,0.387c-0.101,0.159-0.151,0.352-0.151,0.575
c0,0.188,0.03,0.346,0.096,0.476c0.061,0.128,0.148,0.237,0.26,0.325c0.112,0.09,0.243,0.164,0.392,0.224
c0.146,0.061,0.312,0.112,0.488,0.159l0.694,0.154c0.121,0.029,0.241,0.062,0.358,0.103c0.116,0.041,0.221,0.09,0.313,0.149
c0.09,0.061,0.165,0.137,0.218,0.224c0.054,0.089,0.081,0.198,0.081,0.332c0,0.153-0.039,0.282-0.114,0.388
c-0.078,0.107-0.176,0.193-0.297,0.263c-0.119,0.069-0.251,0.118-0.391,0.146c-0.14,0.029-0.275,0.043-0.407,0.043
c-0.361,0-0.662-0.094-0.905-0.279c-0.243-0.187-0.377-0.462-0.399-0.829h-0.539c0.044,0.544,0.229,0.939,0.554,1.185
c0.321,0.246,0.742,0.369,1.265,0.369c0.206,0,0.413-0.022,0.625-0.068c0.213-0.047,0.403-0.122,0.57-0.229
c0.17-0.104,0.308-0.242,0.417-0.411c0.108-0.168,0.165-0.373,0.165-0.613c0-0.194-0.04-0.363-0.112-0.505
c-0.076-0.144-0.171-0.265-0.288-0.36c-0.117-0.099-0.251-0.177-0.402-0.236c-0.153-0.06-0.308-0.102-0.461-0.126l-0.72-0.161
c-0.093-0.024-0.19-0.054-0.3-0.092c-0.109-0.036-0.209-0.083-0.301-0.141c-0.093-0.057-0.168-0.127-0.228-0.211
c-0.061-0.082-0.091-0.184-0.091-0.305c0-0.142,0.033-0.263,0.095-0.36c0.063-0.097,0.145-0.175,0.248-0.236
c0.104-0.06,0.218-0.103,0.341-0.128c0.124-0.025,0.243-0.039,0.363-0.039c0.154,0,0.301,0.02,0.438,0.056
c0.137,0.038,0.26,0.097,0.363,0.177c0.106,0.081,0.192,0.181,0.254,0.305C757.376,567.474,757.41,567.619,757.416,567.785z"/>
<path fill="#FFFFFF" d="M759.75,566.489v-1.329h-0.542v1.329h-0.773v0.456h0.773v3.026c-0.005,0.379,0.062,0.637,0.207,0.776
c0.142,0.142,0.394,0.211,0.754,0.211c0.081,0,0.158-0.002,0.24-0.008c0.081-0.006,0.159-0.009,0.239-0.009v-0.455
c-0.153,0.018-0.31,0.025-0.463,0.025c-0.193-0.012-0.315-0.067-0.363-0.167c-0.05-0.1-0.072-0.238-0.072-0.416v-2.984h0.898
v-0.456H759.75z"/>
<path fill="#FFFFFF" d="M763.3,566.361c-0.338,0-0.637,0.062-0.896,0.188c-0.261,0.126-0.478,0.295-0.656,0.507
c-0.177,0.212-0.31,0.458-0.402,0.742c-0.092,0.283-0.138,0.585-0.138,0.905c0,0.318,0.046,0.622,0.138,0.904
c0.093,0.284,0.226,0.53,0.402,0.743c0.179,0.211,0.396,0.379,0.656,0.501c0.26,0.123,0.559,0.185,0.896,0.185
c0.337,0,0.637-0.062,0.896-0.185c0.264-0.122,0.481-0.29,0.657-0.501c0.179-0.213,0.313-0.459,0.404-0.743
c0.091-0.282,0.138-0.586,0.138-0.904c0-0.32-0.047-0.622-0.138-0.905c-0.091-0.284-0.226-0.53-0.404-0.742
c-0.176-0.212-0.394-0.381-0.657-0.507C763.937,566.422,763.637,566.361,763.3,566.361z M763.3,566.814
c0.258,0,0.483,0.055,0.679,0.163c0.195,0.109,0.354,0.252,0.486,0.43c0.128,0.176,0.224,0.379,0.289,0.604
c0.067,0.226,0.099,0.457,0.099,0.691c0,0.233-0.031,0.465-0.099,0.69c-0.065,0.225-0.161,0.428-0.289,0.604
c-0.132,0.178-0.291,0.32-0.486,0.429c-0.195,0.109-0.421,0.164-0.679,0.164c-0.257,0-0.482-0.055-0.676-0.164
c-0.195-0.108-0.358-0.251-0.486-0.429c-0.128-0.177-0.226-0.38-0.291-0.604c-0.066-0.226-0.1-0.457-0.1-0.69
c0-0.234,0.033-0.466,0.1-0.691c0.065-0.226,0.163-0.429,0.291-0.604c0.128-0.178,0.291-0.32,0.486-0.43
C762.818,566.869,763.043,566.814,763.3,566.814z"/>
<path fill="#FFFFFF" d="M769.325,567.878h0.539c-0.062-0.509-0.257-0.89-0.587-1.141c-0.329-0.252-0.731-0.377-1.206-0.377
c-0.335,0-0.635,0.062-0.896,0.188c-0.26,0.126-0.479,0.295-0.656,0.507c-0.177,0.212-0.311,0.458-0.402,0.742
c-0.093,0.283-0.137,0.585-0.137,0.905c0,0.318,0.044,0.622,0.137,0.904c0.092,0.284,0.226,0.53,0.402,0.743
c0.178,0.211,0.396,0.379,0.656,0.501c0.262,0.123,0.562,0.185,0.896,0.185c0.505,0,0.915-0.149,1.236-0.45
c0.319-0.3,0.517-0.719,0.593-1.257h-0.542c-0.017,0.184-0.061,0.353-0.137,0.507c-0.075,0.154-0.168,0.286-0.282,0.398
c-0.114,0.111-0.246,0.198-0.396,0.262c-0.148,0.062-0.305,0.095-0.473,0.095c-0.258,0-0.483-0.055-0.677-0.164
c-0.195-0.108-0.356-0.251-0.484-0.429c-0.13-0.177-0.228-0.38-0.291-0.604c-0.067-0.226-0.101-0.457-0.101-0.69
c0-0.234,0.033-0.466,0.101-0.691c0.063-0.226,0.161-0.429,0.291-0.604c0.128-0.178,0.289-0.32,0.484-0.43
c0.193-0.108,0.419-0.163,0.677-0.163c0.36,0,0.643,0.095,0.841,0.284C769.112,567.286,769.249,567.547,769.325,567.878z"/>
<polygon fill="#FFFFFF" points="770.655,564.789 770.655,570.916 771.197,570.916 771.197,569.235 772.07,568.504 773.822,570.916
774.498,570.916 772.481,568.126 774.369,566.489 773.648,566.489 771.197,568.634 771.197,564.789 "/>
<path fill="#FFFFFF" d="M774.935,564.789v6.127h0.54v-2.582c0.009-0.218,0.042-0.419,0.108-0.604
c0.063-0.186,0.156-0.347,0.273-0.481c0.118-0.135,0.26-0.239,0.431-0.317c0.167-0.077,0.36-0.116,0.578-0.116
s0.399,0.035,0.545,0.104c0.145,0.068,0.262,0.163,0.35,0.283c0.083,0.121,0.145,0.262,0.178,0.424
c0.034,0.164,0.053,0.341,0.053,0.528v2.763h0.54v-2.849c0-0.263-0.025-0.5-0.079-0.713c-0.05-0.21-0.14-0.39-0.266-0.535
c-0.125-0.146-0.293-0.259-0.5-0.339c-0.209-0.08-0.468-0.119-0.775-0.119c-0.316,0-0.606,0.081-0.868,0.243
c-0.263,0.164-0.447,0.379-0.549,0.648h-0.019v-2.464H774.935z"/>
<path fill="#FFFFFF" d="M781.421,566.361c-0.335,0-0.635,0.062-0.896,0.188c-0.26,0.126-0.479,0.295-0.656,0.507
c-0.177,0.212-0.312,0.458-0.402,0.742c-0.093,0.283-0.138,0.585-0.138,0.905c0,0.318,0.045,0.622,0.138,0.904
c0.091,0.284,0.226,0.53,0.402,0.743c0.178,0.211,0.396,0.379,0.656,0.501c0.262,0.123,0.562,0.185,0.896,0.185
c0.338,0,0.636-0.062,0.896-0.185c0.263-0.122,0.479-0.29,0.656-0.501c0.179-0.213,0.313-0.459,0.404-0.743
c0.093-0.282,0.138-0.586,0.138-0.904c0-0.32-0.045-0.622-0.138-0.905c-0.092-0.284-0.226-0.53-0.404-0.742
c-0.177-0.212-0.394-0.381-0.656-0.507C782.057,566.422,781.759,566.361,781.421,566.361z M781.421,566.814
c0.259,0,0.482,0.055,0.678,0.163c0.195,0.109,0.358,0.252,0.486,0.43c0.128,0.176,0.226,0.379,0.291,0.604
c0.067,0.226,0.098,0.457,0.098,0.691c0,0.233-0.03,0.465-0.098,0.69c-0.065,0.225-0.163,0.428-0.291,0.604
c-0.128,0.178-0.291,0.32-0.486,0.429c-0.195,0.109-0.419,0.164-0.678,0.164c-0.258,0-0.483-0.055-0.677-0.164
c-0.195-0.108-0.356-0.251-0.485-0.429c-0.129-0.177-0.227-0.38-0.29-0.604c-0.067-0.226-0.101-0.457-0.101-0.69
c0-0.234,0.033-0.466,0.101-0.691c0.063-0.226,0.161-0.429,0.29-0.604c0.129-0.178,0.29-0.32,0.485-0.43
C780.938,566.869,781.164,566.814,781.421,566.814z"/>
<rect x="784.331" y="564.789" fill="#FFFFFF" width="0.54" height="6.127"/>
<path fill="#FFFFFF" d="M785.909,566.489v4.427h0.542v-2.471c0-0.234,0.025-0.45,0.075-0.647c0.054-0.196,0.132-0.369,0.237-0.516
c0.106-0.146,0.244-0.26,0.411-0.344c0.171-0.082,0.368-0.124,0.599-0.124c0.17,0,0.314,0.029,0.436,0.087
c0.119,0.058,0.217,0.135,0.286,0.235c0.074,0.101,0.124,0.217,0.158,0.353c0.03,0.134,0.044,0.274,0.044,0.424v3.003h0.542v-2.505
c0-0.206,0.02-0.405,0.056-0.597s0.101-0.361,0.192-0.511c0.093-0.148,0.216-0.268,0.369-0.355c0.153-0.089,0.35-0.134,0.584-0.134
c0.388,0,0.661,0.096,0.815,0.288c0.153,0.191,0.232,0.471,0.232,0.836v2.978h0.538v-3.003c0-1.035-0.489-1.552-1.467-1.552
c-0.29,0-0.566,0.07-0.823,0.213c-0.257,0.144-0.452,0.36-0.584,0.652c-0.078-0.292-0.238-0.509-0.476-0.652
c-0.237-0.143-0.502-0.213-0.792-0.213c-0.36,0-0.656,0.076-0.889,0.23c-0.232,0.153-0.419,0.369-0.563,0.643h-0.025v-0.745
H785.909z"/>
<rect x="793.375" y="570.006" fill="#FFFFFF" width="0.72" height="0.909"/>
<path fill="#FFFFFF" d="M798.564,567.878h0.542c-0.063-0.509-0.259-0.89-0.589-1.141c-0.326-0.252-0.729-0.377-1.204-0.377
c-0.338,0-0.637,0.062-0.896,0.188s-0.48,0.295-0.656,0.507c-0.179,0.212-0.313,0.458-0.404,0.742
c-0.091,0.283-0.138,0.585-0.138,0.905c0,0.318,0.047,0.622,0.138,0.904c0.091,0.284,0.226,0.53,0.404,0.743
c0.176,0.211,0.396,0.379,0.656,0.501c0.26,0.123,0.559,0.185,0.896,0.185c0.503,0,0.913-0.149,1.234-0.45
c0.321-0.3,0.517-0.719,0.593-1.257h-0.54c-0.019,0.184-0.064,0.353-0.14,0.507c-0.072,0.154-0.167,0.286-0.281,0.398
c-0.114,0.111-0.246,0.198-0.394,0.262c-0.151,0.062-0.308,0.095-0.473,0.095c-0.258,0-0.483-0.055-0.679-0.164
c-0.195-0.108-0.354-0.251-0.486-0.429c-0.128-0.177-0.224-0.38-0.29-0.604c-0.066-0.226-0.098-0.457-0.098-0.69
c0-0.234,0.031-0.466,0.098-0.691s0.162-0.429,0.29-0.604c0.132-0.178,0.291-0.32,0.486-0.43c0.195-0.108,0.421-0.163,0.679-0.163
c0.36,0,0.64,0.095,0.841,0.284C798.352,567.286,798.492,567.547,798.564,567.878z"/>
<path fill="#FFFFFF" d="M801.765,566.361c-0.339,0-0.637,0.062-0.897,0.188c-0.259,0.126-0.478,0.295-0.656,0.507
c-0.175,0.212-0.313,0.458-0.401,0.742c-0.093,0.283-0.137,0.585-0.137,0.905c0,0.318,0.044,0.622,0.137,0.904
c0.089,0.284,0.227,0.53,0.401,0.743c0.179,0.211,0.397,0.379,0.656,0.501c0.261,0.123,0.559,0.185,0.897,0.185
c0.338,0,0.637-0.062,0.896-0.185c0.259-0.122,0.48-0.29,0.656-0.501c0.18-0.213,0.313-0.459,0.402-0.743
c0.092-0.282,0.14-0.586,0.14-0.904c0-0.32-0.048-0.622-0.14-0.905c-0.09-0.284-0.223-0.53-0.402-0.742
c-0.176-0.212-0.397-0.381-0.656-0.507C802.402,566.422,802.103,566.361,801.765,566.361z M801.765,566.814
c0.257,0,0.482,0.055,0.679,0.163c0.192,0.109,0.355,0.252,0.483,0.43c0.128,0.176,0.226,0.379,0.293,0.604
c0.063,0.226,0.098,0.457,0.098,0.691c0,0.233-0.034,0.465-0.098,0.69c-0.067,0.225-0.165,0.428-0.293,0.604
c-0.128,0.178-0.291,0.32-0.483,0.429c-0.196,0.109-0.422,0.164-0.679,0.164c-0.258,0-0.483-0.055-0.679-0.164
c-0.193-0.108-0.354-0.251-0.483-0.429c-0.129-0.177-0.227-0.38-0.294-0.604c-0.063-0.226-0.098-0.457-0.098-0.69
c0-0.234,0.034-0.466,0.098-0.691c0.067-0.226,0.165-0.429,0.294-0.604c0.129-0.178,0.29-0.32,0.483-0.43
C801.282,566.869,801.507,566.814,801.765,566.814z"/>
<path fill="#FFFFFF" d="M804.664,566.489v4.427h0.542v-2.471c0-0.234,0.025-0.45,0.079-0.647c0.05-0.196,0.128-0.369,0.233-0.516
c0.106-0.146,0.244-0.26,0.414-0.344c0.168-0.082,0.365-0.124,0.596-0.124c0.17,0,0.317,0.029,0.438,0.087
c0.121,0.058,0.215,0.135,0.288,0.235c0.07,0.101,0.122,0.217,0.154,0.353c0.03,0.134,0.047,0.274,0.047,0.424v3.003h0.539v-2.505
c0-0.206,0.02-0.405,0.056-0.597c0.038-0.191,0.104-0.361,0.192-0.511c0.093-0.148,0.216-0.268,0.372-0.355
c0.153-0.089,0.346-0.134,0.581-0.134c0.391,0,0.661,0.096,0.815,0.288c0.155,0.191,0.231,0.471,0.231,0.836v2.978h0.541v-3.003
c0-1.035-0.488-1.552-1.469-1.552c-0.291,0-0.563,0.07-0.821,0.213c-0.259,0.144-0.452,0.36-0.583,0.652
c-0.081-0.292-0.241-0.509-0.479-0.652c-0.237-0.143-0.503-0.213-0.792-0.213c-0.36,0-0.656,0.076-0.889,0.23
c-0.232,0.153-0.419,0.369-0.562,0.643h-0.025v-0.745H804.664z"/>
</g>
</svg>
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="100%" height="100%" viewBox="0 0 52.866 51.739" enable-background="new 0 0 52.866 51.739" xml:space="preserve">
<polygon fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" points="44.415,3.704 14.633,3.704 14.633,51.739 52.866,51.739
52.866,11.997 44.415,3.704 44.415,3.704 "/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E20917" d="M42.481,1.85c0,2.811,0,5.655,0,6.226c0.576,0,3.471,0,6.308,0
L42.481,1.85L42.481,1.85L42.481,1.85z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#F5F5F5" d="M41.175,1.307c-10.689,0-27.428,0-28.284,0
c0,1.255,0,46.237,0,47.492c1.24,0,35.794,0,37.034,0c0-0.935,0-26.096,0-39.417h-8.75V1.307L41.175,1.307L41.175,1.307z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M42.481,1.85l6.308,6.226c-2.837,0-5.731,0-6.308,0
C42.481,7.505,42.481,4.66,42.481,1.85L42.481,1.85L42.481,1.85z M49.925,48.799c-1.24,0-35.794,0-37.034,0
c0-1.255,0-46.236,0-47.492c0.856,0,17.595,0,28.284,0v8.075h8.75C49.925,22.703,49.925,47.864,49.925,48.799L49.925,48.799
L49.925,48.799L49.925,48.799z M11.583,0v50.105h39.649V8.65L42.467,0H11.583L11.583,0L11.583,0L11.583,0z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" d="M39.015,19.902V5.337H12.891c0,3.47,0,8.805,0,14.565H39.015
L39.015,19.902L39.015,19.902z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E20917" d="M1.307,16.936c1.238,0,33.62,0,34.857,0c0-1.12,0-10.861,0-11.981
c-1.237,0-33.619,0-34.857,0C1.307,6.075,1.307,15.816,1.307,16.936L1.307,16.936L1.307,16.936z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M36.164,16.936c-1.237,0-33.619,0-34.857,0
c0-1.12,0-10.861,0-11.981c1.238,0,33.62,0,34.857,0C36.164,6.075,36.164,15.816,36.164,16.936L36.164,16.936L36.164,16.936z
M0,3.647v14.596h37.471V3.647h-0.653H0L0,3.647L0,3.647L0,3.647z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" d="M9.905,8.311v2.267h0.881c0.635,0,1.059-0.042,1.272-0.125
c0.214-0.083,0.382-0.214,0.503-0.392c0.122-0.178,0.183-0.385,0.183-0.621c0-0.291-0.086-0.53-0.256-0.72
c-0.17-0.188-0.386-0.307-0.647-0.354c-0.191-0.037-0.578-0.055-1.158-0.055H9.905L9.905,8.311L9.905,8.311z M8.292,14.928V6.963
h2.583c0.979,0,1.616,0.04,1.914,0.12c0.456,0.12,0.839,0.38,1.146,0.78c0.309,0.401,0.463,0.918,0.463,1.552
c0,0.49-0.089,0.901-0.267,1.234c-0.177,0.333-0.402,0.595-0.676,0.786c-0.273,0.19-0.552,0.316-0.834,0.377
c-0.385,0.077-0.94,0.114-1.668,0.114H9.905v3.002H8.292L8.292,14.928L8.292,14.928L8.292,14.928z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" d="M17.315,8.311v5.27h1.195c0.447,0,0.77-0.025,0.968-0.076
c0.26-0.065,0.475-0.175,0.646-0.331c0.171-0.156,0.311-0.412,0.419-0.769c0.107-0.356,0.162-0.842,0.162-1.457
s-0.055-1.087-0.162-1.416c-0.108-0.33-0.26-0.586-0.454-0.771c-0.195-0.185-0.441-0.31-0.741-0.375
c-0.224-0.05-0.661-0.076-1.313-0.076H17.315L17.315,8.311L17.315,8.311z M15.702,6.963h2.931c0.661,0,1.165,0.05,1.512,0.152
c0.467,0.138,0.865,0.382,1.197,0.733c0.332,0.352,0.585,0.782,0.759,1.29c0.173,0.509,0.26,1.137,0.26,1.883
c0,0.656-0.081,1.221-0.244,1.695c-0.198,0.58-0.481,1.049-0.851,1.408c-0.277,0.271-0.653,0.483-1.126,0.635
c-0.354,0.113-0.827,0.169-1.42,0.169h-3.018V6.963L15.702,6.963L15.702,6.963L15.702,6.963z"/>
<polygon fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" points="23.727,14.928 23.727,6.963 29.18,6.963 29.18,8.311
25.34,8.311 25.34,10.19 28.648,10.19 28.648,11.538 25.34,11.538 25.34,14.928 23.727,14.928 23.727,14.928 "/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M25.983,35.519c-2.812,2.1-4.745,5.082-3.982,5.547l-0.666-0.335
C20.948,40.259,21.825,37.729,25.983,35.519L25.983,35.519L25.983,35.519L25.983,35.519L25.983,35.519z"/>
<path fill="none" stroke="#E30921" stroke-width="0.5197" stroke-miterlimit="2.6131" d="M25.983,35.519
c-2.812,2.1-4.745,5.082-3.982,5.547l-0.666-0.335C20.948,40.259,21.825,37.729,25.983,35.519L25.983,35.519L25.983,35.519
L25.983,35.519L25.983,35.519z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M47.193,34.952l-0.023-0.368c-0.004-0.047,0-0.105-0.002-0.166
h-0.005c-0.015,0.057-0.033,0.122-0.052,0.175l-0.127,0.351h-0.071l-0.124-0.36c-0.015-0.049-0.029-0.108-0.044-0.166H46.74
c-0.001,0.058,0,0.11-0.003,0.166l-0.022,0.368h-0.089l0.047-0.61h0.121l0.119,0.331c0.016,0.046,0.028,0.097,0.043,0.153h0.003
c0.014-0.056,0.028-0.11,0.043-0.155l0.12-0.329h0.119l0.046,0.61H47.193L47.193,34.952L47.193,34.952L47.193,34.952L47.193,34.952z
M46.604,34.342v0.078h-0.187v0.532h-0.091V34.42h-0.186v-0.078H46.604L46.604,34.342L46.604,34.342L46.604,34.342L46.604,34.342
L46.604,34.342L46.604,34.342z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M28.566,34.342c0.568-0.985,1.172-2.088,1.814-3.325
c1.3-2.505,2.067-4.538,2.526-6.316c0.834,2.165,2.059,4.225,3.702,5.639c0.511,0.44,1.075,0.845,1.667,1.215
C35.172,32.035,31.688,32.963,28.566,34.342L28.566,34.342L28.566,34.342z M47.102,33.165c0.821-1.749-2.684-2.349-7.452-1.796
c-0.838-0.472-1.652-1.007-2.389-1.593c-1.836-1.507-3.187-4.034-4.027-6.566c0.383-2.121,0.359-3.924,0.401-5.872
c-0.182,0.888-0.312,2.372-0.811,4.482c-0.643-2.466-0.783-4.757-0.394-5.904c0.086-0.251,0.293-0.545,0.385-0.61
c0.358,0.179,0.792,0.619,0.889,1.541c0.323-1.702-0.509-1.642-0.742-1.642l-0.523-0.004c-0.29,0-0.551,0.232-0.677,0.705
c-0.431,1.605-0.225,4.505,0.669,7.419c-0.556,1.942-1.416,4.301-2.806,7.101c-3.741,7.533-6.472,11.047-8.29,10.306l0.649,0.333
c1.21,0.617,3.286-1.02,6.551-6.667c3.069-1.107,7.154-1.921,10.714-2.278c3.505,1.878,7.53,2.523,7.734,1.313
c-0.907,0.436-3.514-0.17-6.149-1.445C44.442,31.758,47.17,32.083,47.102,33.165L47.102,33.165L47.102,33.165L47.102,33.165z"/>
<path fill="none" stroke="#E30921" stroke-width="0.5197" stroke-miterlimit="2.6131" d="M28.566,34.342
c0.568-0.985,1.172-2.088,1.814-3.325c1.3-2.505,2.067-4.538,2.526-6.316c0.834,2.165,2.059,4.225,3.702,5.639
c0.511,0.44,1.075,0.845,1.667,1.215C35.172,32.035,31.688,32.963,28.566,34.342L28.566,34.342L28.566,34.342z M47.102,33.165
c0.821-1.749-2.684-2.349-7.452-1.796c-0.838-0.472-1.652-1.007-2.389-1.593c-1.836-1.507-3.187-4.034-4.027-6.566
c0.383-2.121,0.359-3.924,0.401-5.872c-0.182,0.888-0.312,2.372-0.811,4.482c-0.643-2.466-0.783-4.757-0.394-5.904
c0.086-0.251,0.293-0.545,0.385-0.61c0.358,0.179,0.792,0.619,0.889,1.541c0.323-1.702-0.509-1.642-0.742-1.642l-0.523-0.004
c-0.29,0-0.551,0.232-0.677,0.705c-0.431,1.605-0.225,4.505,0.669,7.419c-0.556,1.942-1.416,4.301-2.806,7.101
c-3.741,7.533-6.472,11.047-8.29,10.306l0.649,0.333c1.21,0.617,3.286-1.02,6.551-6.667c3.069-1.107,7.154-1.921,10.714-2.278
c3.505,1.878,7.53,2.523,7.734,1.313c-0.907,0.436-3.514-0.17-6.149-1.445C44.442,31.758,47.17,32.083,47.102,33.165"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M19.435,42.372l-0.528-2.746c-0.148-0.775-0.302-1.79-0.431-2.613
h-0.053c-0.129,0.834-0.298,1.882-0.446,2.623l-0.542,2.736H19.435L19.435,42.372L19.435,42.372L19.435,42.372L19.435,42.372z
M17.233,43.649l-0.675,3.17h-1.566l2.582-11.478h1.856l2.442,11.478h-1.585l-0.667-3.17H17.233L17.233,43.649L17.233,43.649
L17.233,43.649L17.233,43.649L17.233,43.649L17.233,43.649z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M26.4,41.704c0-0.164,0-0.352-0.025-0.524
c-0.076-0.741-0.504-1.392-1.079-1.392c-0.985,0-1.331,1.391-1.331,2.936c0,1.689,0.442,2.89,1.275,2.89
c0.367,0,0.846-0.192,1.103-1.175c0.041-0.146,0.058-0.334,0.058-0.539V41.704L26.4,41.704L26.4,41.704L26.4,41.704L26.4,41.704z
M28.008,35.036v9.649c0,0.631,0.043,1.56,0.067,2.135h-1.387l-0.1-1.004h-0.053c-0.277,0.586-0.894,1.14-1.728,1.14
c-1.521,0-2.463-1.661-2.463-4.243c0-2.914,1.239-4.297,2.549-4.297c0.653,0,1.183,0.307,1.472,0.93H26.4v-4.309H28.008
L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M31.874,45.692c0.992,0,1.207-1.75,1.207-3.016
c0-1.225-0.215-3-1.242-3c-1.047,0-1.255,1.775-1.255,3c0,1.383,0.239,3.016,1.272,3.016H31.874L31.874,45.692L31.874,45.692
L31.874,45.692L31.874,45.692z M31.831,46.955c-1.647,0-2.849-1.423-2.849-4.255c0-2.998,1.422-4.285,2.92-4.285
c1.632,0,2.814,1.469,2.814,4.254c0,3.282-1.626,4.286-2.869,4.286H31.831L31.831,46.955L31.831,46.955L31.831,46.955L31.831,46.955
L31.831,46.955L31.831,46.955z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M37.293,43.887c0,0.194,0.024,0.38,0.066,0.519
c0.264,1.01,0.743,1.208,1.073,1.208c0.951,0,1.305-1.263,1.305-2.96c0-1.582-0.371-2.865-1.323-2.865
c-0.521,0-0.955,0.625-1.064,1.235c-0.032,0.165-0.057,0.376-0.057,0.548V43.887L37.293,43.887L37.293,43.887L37.293,43.887
L37.293,43.887z M35.686,35.036h1.607v4.444h0.034c0.419-0.75,1.005-1.064,1.737-1.064c1.397,0,2.291,1.59,2.291,4.135
c0,2.959-1.206,4.405-2.571,4.405c-0.815,0-1.27-0.433-1.635-1.183h-0.053l-0.101,1.047h-1.379c0.025-0.56,0.068-1.504,0.068-2.135
V35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M45.958,41.853c0.019-1.456-0.493-2.223-1.129-2.223
c-0.819,0-1.203,1.188-1.249,2.223H45.958L45.958,41.853L45.958,41.853L45.958,41.853L45.958,41.853z M43.571,43.017
c0.016,2.119,0.928,2.635,1.887,2.635c0.591,0,1.088-0.138,1.439-0.301l0.24,1.17c-0.494,0.248-1.256,0.393-1.973,0.393
c-2.073,0-3.172-1.575-3.172-4.123c0-2.715,1.246-4.384,2.963-4.384c1.721,0,2.52,1.653,2.52,3.731c0,0.414-0.016,0.67-0.04,0.887
L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" d="M49.925,10.912c0-0.524,0-1.036,0-1.529h-7.589v1.529H49.925
L49.925,10.912L49.925,10.912z"/>
</svg>
\rules except wikilink
<$button class="cpfadeable">{{$:/core/images/preview-open}} MultiMedia</$button>
<$button tooltip="View the next paper">
Next {{$:/core/images/right-arrow}}
<$action-navigate $to={{!!next_paper}}/>
</$button>
<$button tooltip="View the next session">
Next {{$:/core/images/right-arrow}}
<$action-navigate $to={{!!next_session_title}}/>
</$button>
@@.cppinktext ''NO PDF''@@
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<$button tooltip="View the preceding paper">
{{$:/core/images/left-arrow}} Prev
<$action-navigate $to={{!!prev_paper}}/>
</$button>
<$button tooltip="View the preceding session">
{{$:/core/images/left-arrow}} Prev
<$action-navigate $to={{!!prev_session_title}}/>
</$button>
\rules except wikilink
Proceedings of the 18th Annual Conference of the International Speech Communication Association (INTERSPEECH 2017). ISSN 2308-457X. © 2017 International Speech Communication Association. All rights reserved. For technical support please contact Causal Productions (info@causalproductions.com).
<$button tooltip="View the Session List">
{{$:/core/images/up-arrow}} Sessions
<$action-navigate $to="Session List"/>
</$button>
/*
* CONFERENCE Paper abstract card
*/
.cpabstractcardauthorheading { font-size:1em; }
/* the following style is for the <div> that contains the author names (maybe multiline) and affiliation
names (maybe multiline). the 0.75em spaces it a bit from the button row that follows */
.cpabstractcardauthorarea { font-size:1em; line-height:1.15; margin-top:0.5em; margin-bottom:0.75em; }
/* the following style is for the <p> that contains the author names only */
p.cpabstractcardauthornames { font-style:normal; margin-top:0em; margin-bottom:0em; }
/* the following style is for the <p> that contains the affiliations only, the 0.25em separates it from the author names */
p.cpabstractcardaffiliationlist { font-style:italic; margin-top:0.25em; margin-bottom:0em; }
/* the abstract paragraph is the last thing on the tiddler so make the p bottom margin zero */
.cpabstractcardabstract { font-size:1em; line-height:1.15; }
.cpabstractcardabstract > p { margin-top:0.75em; margin-bottom:0em; }
/* the following style is for the <p> that contains the buttons in a single row. The 0.5 spaces the rows close together. */
.cpbuttonrow > p { margin-top:0.5em; margin-bottom:0.5em; }
/* the following style is for the VIEW PDF button which might have a MULTIMEDIA button next to it.
Need separate <p> style "lineheightforbuttons" to avoid extra vertical space due to line-height, and the <span>
is needed to keep the hover area confined with the buttons and not full width. The hover
tooltip is vertically sized by the line-height of the span. */
.lineheightforbuttons { line-height:1em; }
.cpabscardpdfandmediabutton { display:inline-flex;align-items:flex-start;line-height:1.5em; }
.cpaidxlinkrowstyle { width:30px;text-align:left;padding-left:0;margin-left:0; }
/* the following style is based on the normal table top margin of 1em, with margin-top
reduced to 0.5em because the link row table is borderless so it needs to be moved a bit
closer to surrounding elements. The bottom margin is zero because that is the end of the tiddler. */
.cpaidxlinkrowtable { margin-top:0.5em; margin-bottom:0em; }
.cpaidxlinkrowtable td { padding-left:0em; padding-right:1em; }
/*
* CONFERENCE Author Index List tiddler styles
*/
/* the author list is a borderless table so reduce margin-top to 0.5em to make the vertical whitespace appear
consistent with bordered tables. Bottom margin is set to zero because that is the end of the tiddler. */
.cpauthorindexlisttable { margin-top:0.5em; margin-bottom:0em; }
/* the next line ensures all td elements within a .cpsesslisttable have zero left-right padding
and I include the font and line-height definition to avoid adding more structure elements */
.cpauthorindexlisttable td { padding-left:0em; padding-right:0em; font-size:1em; line-height:1.5; }
/*
* CONFERENCE Author Index Person Card
*/
/* the following style is for the author paper table. 0.75em at top is standard
for all our bordered tables, and 0em at bottom because it is the end of the tiddler */
.cpaidxauthortable { margin-top:1em; margin-bottom:0em; }
/* the following styles are used within the table */
.cpauthorindexpersoncardauthorname { font-size:1em; font-weight:bold; }
.cpauthorindexpersoncardconferencename { font-size:1em; font-weight:bold; }
.cpauthorindexpersoncardpapercode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpauthorindexpersoncardpapertitle { font-size:1em; line-height:1.15; }
/*
* Global change to TIDDLYWIKI built-in styles
*/
/* make the titlebar smaller. This affects the tiddler title, and the 3 control buttons on top right
*/
.tc-titlebar { font-size:1.2em; }
/* the margin-bottom spec in the next class allows vertical space between tiddler title and body to close
*/
.tc-titlebar h2 { font-weight: bold; margin-bottom:0.5em; }
/* the tiddler body begins with a <p> so the top margin contributes to the space between title and body.
The following selector selects the first child <p> of the tiddler-body and sets the top/bottom margin to
a minimum value, which can be extended in cases such as the abstract card author list.
*/
.tc-tiddler-body > p { margin-top:0.5em; margin-bottom:0.5em; }
/* the following makes the tags wrapper disappear, allowing the vertical space between tiddler title and
tiddler body to close.
*/
.tc-tags-wrapper { display: none; }
\rules except wikilink
.cpwelcomepagespaceaboveiconwithconferencename { padding-top:0.75em; }
.cpwelcomepagespaceaboveiconwithoutconferencename { padding-top:0.0em; }
/* the following styles force the conference logos to lose their descender padding due
to the line-height of the parent */
.cpwelcomepagespaceaboveiconwithconferencename > img { display:block; }
.cpwelcomepagespaceaboveiconwithoutconferencename > img { display:block; }
.icon_size_on_welcome_page { width:250px; }
/* the confinfo page table is borderess so reduce the top margin a bit to make it consistent
with other tiddlers. Bottom margin is set to zero because that is the end of the tiddler. */
.cpconfinfotable { margin-top:1em; margin-bottom:0em; }
.cpconfinfotable td { padding-left:0em; padding-bottom:0.5em; }
.cpconfinfotable tr:last-child td { padding-bottom:0em; }
/* the following style is used for <a> elements surrounding buttons, to ensure that
the text inside the button does not cause a mysterious underline character to appear between
buttons on the same line, and force the text color to black instead of normal link blue.
Note that the TW text colour is not black but 51^3. */
a.externallinkbutton { color: rgb(51,51,51); text-decoration: none; }
/* the following reveals and styles allow buttons and table cells with class
"cpfadeable" to be faded when turned off. Specifically, PDF and MEDIA link
buttons can be switched off, resulting in not clickable links (can still
be tabbed and entered but ignore this), and faded appearance */
<$reveal type="match" state="$:/causal/config/hidePDFandMEDIA" text="hide">
a.externallinkbutton {
pointer-events: none;
cursor: default;
}
.cpfadeable {
opacity: 0.33;
}
.cpabscardpdfandmediabutton:hover::after, .cpaidxauthortable td:first-child:hover::after, .cpconfinfotable td:first-child:hover::after, .cpsessionviewtable td:first-child:hover::after {
display: inline;
position: absolute;
border: 1px solid #ccc;
border-radius: 4px;
box-shadow: 1px 1px 4px #000;
background-color: #fff;
margin-left: 5px;
margin-top: -25px;
padding: 3px;
opacity: 1;
}
.cpabscardpdfandmediabutton::after, .cpaidxauthortable td:first-child::after, .cpconfinfotable td:first-child::after, .cpsessionviewtable td:first-child::after {
content: "PDF+MEDIA files are only available in the final proceedings";
opacity: 1;
}
.cpabscardpdfandmediabutton::after, .cpaidxauthortable td:first-child::after, .cpconfinfotable td:first-child::after, .cpsessionviewtable td:first-child::after {
display: none;
}
</$reveal>
<$reveal type="match" state="$:/causal/config/hidePDFandMEDIA" text="show">
.cpfadeable {
opacity: 1;
}
</$reveal>
.cpconferencedisambiguator { font-size:1.12em; font-weight:bold; }
.cpprevnextanchortext { font-size:1.12em; font-weight:bold; }
.cpredtext { color:red; }
.cppinktext { color:#FFB0B0; }
.cpcenter { text-align:center; }
.cpmailingaddress { padding-left:2em; }
.cptightlineheight { line-height:1.15; }
.cpemabovezerobelow { margin-top:1em; margin-bottom:0em; }
.cpcopyrightpage { line-height:1.15; margin-top:0.75em; margin-bottom:0em; }
.cpsupportpage { line-height:1.15; margin-top:0.75em; margin-bottom:0em; }
.cpsupportpagetable { margin-top:1em; margin-bottom:0em; }
/* the following causes cpicon to have no line-height, otherwise the icons
get a descender margin below the icon caused by the font style of the parent */
.cpicon > img { display: block; }
.cpw25px > img { width:25px; }
/* the following is used in the session view to force a minimum width for the pdf icon column, using @@ ... @@ syntax */
.pdficonintable { display:block;width:30px; }
/*
* CONFERENCE Session List tiddler styles
*/
/* the session list is a borderess table so reduce the margin-top to 0.5em to make it consistent
with bordered tables. Bottom margin is set to zero because that is the end of the tiddler. */
.cpsessionlisttable { margin-top:0.5em; margin-bottom:0em; }
/* the next line ensures all td elements within a .cpsesslisttable have zero left-right padding */
.cpsessionlisttable td { padding-left:0em; padding-right:0.5em; }
/* note that in session list table, the vertical alignment of table cells must be done
using TW5 operators and not CSS. Operators such as display:flex and align-content:flex-start do not seem to work. */
.cpsessionlistsessioncode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpsessionlistsessionname { font-size:1em; line-height:1.15; }
/*
* CONFERENCE Session View tiddler styles
*/
/* the following style adds a bit of space above and below table row to separate cell text from rulers */
table.cpsessionviewtable { margin-top:0.75em; margin-bottom:0em; }
/* the following styles are for entries within the session view table */
.cpsessionviewpapercode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpsessionviewpapertitle { font-size:1em; line-height:1.15; }
.cpsessionviewpaperauthor { font-size:1em;font-style:italic;line-height:1.15; }
.cpsessionviewmetadata { font-size:1em; line-height:1.15; }
.cpsessionviewmetadata table { margin-top:0.6em; margin-bottom:0.75em; }
.cpsessionviewmetadata tr:first-child td:first-child { padding-bottom:0.2em; } /* make the padding 0.2em on the bottom of top left cell, to space this row a bit more from subsequent rows */
.cpsessionviewmetadata td { padding-left:0px; padding-right:0px; }
.cpsessionviewmetadata td:first-child { width:1px; white-space: nowrap; } /* ensure that 'chairs:' column is just wide enough for the word */
/* the following class is used to make borderless tables */
.cpborderless,
.cpborderless table,
.cpborderless td,
.cpborderless tr,
.cpborderless th,
.cpborderless tbody { border:0 !important; }
/* the following class essentially defines the visual appearance of H2 headers, for use
in tables where tiddler !! syntax does not work. For all header style definitions see w3schools
or t287/00_gv.txt */
.cph2 { display: block; font-size: 1.5em; margin-top: 0.83em; margin-bottom: 0.83em; margin-left: 0; margin-right: 0; font-weight: bold; }
.cph3 { display: block; font-size: 1.0em; margin-top: 0.83em; margin-bottom: 0.83em; margin-left: 0; margin-right: 0; font-weight: bold; }
/* the following allows tables to have extra space between content and row divider rules */
.cptablecelltopbottomspace1 td { padding-top:0.1em; padding-bottom:0.1em; }
.cptablecelltopbottomspace2 td { padding-top:0.2em; padding-bottom:0.2em; }
.cptablecelltopbottomspace3 td { padding-top:0.3em; padding-bottom:0.3em; }
/*
* Welcome Page tiddler styles
*/
/* width of svg logo for the whole publication */
.cppublicationsvg { width:TODO_publication_welcomeartwork_displaywidth; }
.cppublicationname { font-weight:bold;font-size:1.3em; }
.cppublicationdatevenue {
font-size:1.1em;
display:flex;
justify-content:space-between;
}
/* each individual conference in the publication is named in the following style */
.cpwelcomepageconferencename { font-weight:bold;line-height:1.2; }
/* the following style is for the publication header which is a table with icon in left cell
and conference name and date/venue in right cell. We need to have a small top margin to separate
from the tiddler title.
*/
.cpwelcomepagepublicationtable,
.cpwelcomepagepublicationtable td { margin-top:1em; margin-bottom:0px; padding-top:0px; padding-bottom:0px; }
/* the following style is for a table which contains a per-conference row with icon in left cell, and major
headings in right cell such as preface, session list, author index. We want all margins to be zero so it
can butt up to its vertical neighbours efficiently.
*/
.cpwelcomepageconferencetable,
.cpwelcomepageconferencetable td { margin-top:0px; margin-bottom:0px; padding-top:0px; padding-bottom:0px; }
/* the copyright message is displayed in tiny font on the welcome page. To make it readable the user can click on the COPYRIGHT STATEMENT heading to see the text in a readable tiddler */
.cpwelcomepagecopyright { display: block; font-size: 0.5em; margin-top: 0.1em; margin-bottom: 0.1em; margin-left: 0; margin-right: 0; font-weight: bold; line-height:1.5em; }
/* the following style is applied to the conference information, session list, and author index links.
TW mandates that the links be blue, and not bold, so specifying these in the following style will have
no effect. We can control font size, italic, and other parameters which will work correctly. */
.cpwelcomepageconferencelinks {}
\rules except wikilink
<$button>{{$:/core/images/preview-open}} View Folder</$button>
\rules except wikilink
<$checkbox tiddler="$:/state/causal" field="view multimedia list" checked="yes" unchecked="no" default="no"> View MultiMedia list</$checkbox>
<a href={{!!pdf_file_full_name}} class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in abscard view}}</a>
<$button tooltip="View the top level Welcome Page">
{{$:/core/images/up-arrow}} Welcome
<$action-navigate $to="Welcome Page"/>
</$button>
{
"tiddlers": {
"$:/Acknowledgements": {
"title": "$:/Acknowledgements",
"type": "text/vnd.tiddlywiki",
"text": "TiddlyWiki incorporates code from these fine OpenSource projects:\n\n* [[The Stanford Javascript Crypto Library|http://bitwiseshiftleft.github.io/sjcl/]]\n* [[The Jasmine JavaScript Test Framework|http://pivotal.github.io/jasmine/]]\n* [[Normalize.css by Nicolas Gallagher|http://necolas.github.io/normalize.css/]]\n\nAnd media from these projects:\n\n* World flag icons from [[Wikipedia|http://commons.wikimedia.org/wiki/Category:SVG_flags_by_country]]\n"
},
"$:/core/copyright.txt": {
"title": "$:/core/copyright.txt",
"type": "text/plain",
"text": "TiddlyWiki created by Jeremy Ruston, (jeremy [at] jermolene [dot] com)\n\nCopyright © Jeremy Ruston 2004-2007\nCopyright © UnaMesa Association 2007-2016\n\nRedistribution and use in source and binary forms, with or without modification,\nare permitted provided that the following conditions are met:\n\nRedistributions of source code must retain the above copyright notice, this\nlist of conditions and the following disclaimer.\n\nRedistributions in binary form must reproduce the above copyright notice, this\nlist of conditions and the following disclaimer in the documentation and/or other\nmaterials provided with the distribution.\n\nNeither the name of the UnaMesa Association nor the names of its contributors may be\nused to endorse or promote products derived from this software without specific\nprior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY\nEXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT\nSHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\nINCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED\nTO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\nANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH\nDAMAGE.\n"
},
"$:/core/icon": {
"title": "$:/core/icon",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\"><path d=\"M64 0l54.56 32v64L64 128 9.44 96V32L64 0zm21.127 95.408c-3.578-.103-5.15-.094-6.974-3.152l-1.42.042c-1.653-.075-.964-.04-2.067-.097-1.844-.07-1.548-1.86-1.873-2.8-.52-3.202.687-6.43.65-9.632-.014-1.14-1.593-5.17-2.157-6.61-1.768.34-3.546.406-5.34.497-4.134-.01-8.24-.527-12.317-1.183-.8 3.35-3.16 8.036-1.21 11.44 2.37 3.52 4.03 4.495 6.61 4.707 2.572.212 3.16 3.18 2.53 4.242-.55.73-1.52.864-2.346 1.04l-1.65.08c-1.296-.046-2.455-.404-3.61-.955-1.93-1.097-3.925-3.383-5.406-5.024.345.658.55 1.938.24 2.53-.878 1.27-4.665 1.26-6.4.47-1.97-.89-6.73-7.162-7.468-11.86 1.96-3.78 4.812-7.07 6.255-11.186-3.146-2.05-4.83-5.384-4.61-9.16l.08-.44c-3.097.59-1.49.37-4.82.628-10.608-.032-19.935-7.37-14.68-18.774.34-.673.664-1.287 1.243-.994.466.237.4 1.18.166 2.227-3.005 13.627 11.67 13.732 20.69 11.21.89-.25 2.67-1.936 3.905-2.495 2.016-.91 4.205-1.282 6.376-1.55 5.4-.63 11.893 2.276 15.19 2.37 3.3.096 7.99-.805 10.87-.615 2.09.098 4.143.483 6.16 1.03 1.306-6.49 1.4-11.27 4.492-12.38 1.814.293 3.213 2.818 4.25 4.167 2.112-.086 4.12.46 6.115 1.066 3.61-.522 6.642-2.593 9.833-4.203-3.234 2.69-3.673 7.075-3.303 11.127.138 2.103-.444 4.386-1.164 6.54-1.348 3.507-3.95 7.204-6.97 7.014-1.14-.036-1.805-.695-2.653-1.4-.164 1.427-.81 2.7-1.434 3.96-1.44 2.797-5.203 4.03-8.687 7.016-3.484 2.985 1.114 13.65 2.23 15.594 1.114 1.94 4.226 2.652 3.02 4.406-.37.58-.936.785-1.54 1.01l-.82.11zm-40.097-8.85l.553.14c.694-.27 2.09.15 2.83.353-1.363-1.31-3.417-3.24-4.897-4.46-.485-1.47-.278-2.96-.174-4.46l.02-.123c-.582 1.205-1.322 2.376-1.72 3.645-.465 1.71 2.07 3.557 3.052 4.615l.336.3z\" fill-rule=\"evenodd\"/></svg>"
},
"$:/core/images/advanced-search-button": {
"title": "$:/core/images/advanced-search-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-advanced-search-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M74.5651535,87.9848361 C66.9581537,93.0488876 57.8237115,96 48,96 C21.490332,96 0,74.509668 0,48 C0,21.490332 21.490332,0 48,0 C74.509668,0 96,21.490332 96,48 C96,57.8541369 93.0305793,67.0147285 87.9377231,74.6357895 L122.284919,108.982985 C125.978897,112.676963 125.973757,118.65366 122.284271,122.343146 C118.593975,126.033442 112.613238,126.032921 108.92411,122.343793 L74.5651535,87.9848361 Z M48,80 C65.673112,80 80,65.673112 80,48 C80,30.326888 65.673112,16 48,16 C30.326888,16 16,30.326888 16,48 C16,65.673112 30.326888,80 48,80 Z\"></path>\n <circle cx=\"48\" cy=\"48\" r=\"8\"></circle>\n <circle cx=\"28\" cy=\"48\" r=\"8\"></circle>\n <circle cx=\"68\" cy=\"48\" r=\"8\"></circle>\n </g>\n</svg>"
},
"$:/core/images/auto-height": {
"title": "$:/core/images/auto-height",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-auto-height tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M67.9867828,114.356363 L67.9579626,99.8785426 C67.9550688,98.4248183 67.1636987,97.087107 65.8909901,96.3845863 L49.9251455,87.5716209 L47.992126,95.0735397 L79.8995411,95.0735397 C84.1215894,95.0735397 85.4638131,89.3810359 81.686497,87.4948823 L49.7971476,71.5713518 L48.0101917,79.1500092 L79.992126,79.1500092 C84.2093753,79.1500092 85.5558421,73.4676733 81.7869993,71.5753162 L49.805065,55.517008 L48.0101916,63.0917009 L79.9921259,63.0917015 C84.2035118,63.0917016 85.5551434,57.4217887 81.7966702,55.5218807 L65.7625147,47.4166161 L67.9579705,50.9864368 L67.9579705,35.6148245 L77.1715737,44.8284272 C78.7336709,46.3905243 81.2663308,46.3905243 82.8284279,44.8284271 C84.390525,43.2663299 84.390525,40.7336699 82.8284278,39.1715728 L66.8284271,23.1715728 C65.2663299,21.6094757 62.73367,21.6094757 61.1715729,23.1715729 L45.1715729,39.1715729 C43.6094757,40.73367 43.6094757,43.26633 45.1715729,44.8284271 C46.73367,46.3905243 49.26633,46.3905243 50.8284271,44.8284271 L59.9579705,35.6988837 L59.9579705,50.9864368 C59.9579705,52.495201 60.806922,53.8755997 62.1534263,54.5562576 L78.1875818,62.6615223 L79.9921261,55.0917015 L48.0101917,55.0917009 C43.7929424,55.0917008 42.4464755,60.7740368 46.2153183,62.6663939 L78.1972526,78.7247021 L79.992126,71.1500092 L48.0101917,71.1500092 C43.7881433,71.1500092 42.4459197,76.842513 46.2232358,78.7286665 L78.1125852,94.6521971 L79.8995411,87.0735397 L47.992126,87.0735397 C43.8588276,87.0735397 42.4404876,92.5780219 46.0591064,94.5754586 L62.024951,103.388424 L59.9579785,99.8944677 L59.9867142,114.32986 L50.8284271,105.171573 C49.26633,103.609476 46.73367,103.609476 45.1715729,105.171573 C43.6094757,106.73367 43.6094757,109.26633 45.1715729,110.828427 L61.1715729,126.828427 C62.73367,128.390524 65.2663299,128.390524 66.8284271,126.828427 L82.8284278,110.828427 C84.390525,109.26633 84.390525,106.73367 82.8284279,105.171573 C81.2663308,103.609476 78.7336709,103.609476 77.1715737,105.171573 L67.9867828,114.356363 L67.9867828,114.356363 Z M16,20 L112,20 C114.209139,20 116,18.209139 116,16 C116,13.790861 114.209139,12 112,12 L16,12 C13.790861,12 12,13.790861 12,16 C12,18.209139 13.790861,20 16,20 L16,20 Z\"></path>\n</svg>"
},
"$:/core/images/blank": {
"title": "$:/core/images/blank",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-blank tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\"></svg>"
},
"$:/core/images/bold": {
"title": "$:/core/images/bold",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-bold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M41.1456583,51.8095238 L41.1456583,21.8711485 L67.4985994,21.8711485 C70.0084159,21.8711485 72.4285598,22.0802967 74.7591036,22.4985994 C77.0896475,22.9169022 79.1512515,23.6638602 80.9439776,24.7394958 C82.7367036,25.8151314 84.170863,27.3090474 85.2464986,29.2212885 C86.3221342,31.1335296 86.859944,33.5835518 86.859944,36.5714286 C86.859944,41.9496067 85.2465147,45.8337882 82.0196078,48.2240896 C78.792701,50.614391 74.6694929,51.8095238 69.6498599,51.8095238 L41.1456583,51.8095238 Z M13,0 L13,128 L75.0280112,128 C80.7647346,128 86.3519803,127.28292 91.789916,125.848739 C97.2278517,124.414559 102.068139,122.203563 106.310924,119.215686 C110.553709,116.22781 113.929959,112.373506 116.439776,107.652661 C118.949592,102.931816 120.204482,97.3445701 120.204482,90.8907563 C120.204482,82.8832466 118.262391,76.0411115 114.378151,70.3641457 C110.493911,64.6871798 104.607883,60.7133634 96.719888,58.442577 C102.456611,55.6937304 106.788968,52.1680887 109.717087,47.8655462 C112.645206,43.5630037 114.109244,38.1849062 114.109244,31.7310924 C114.109244,25.7553389 113.123259,20.7357813 111.151261,16.6722689 C109.179262,12.6087565 106.400578,9.35201972 102.815126,6.90196078 C99.2296739,4.45190185 94.927196,2.68908101 89.907563,1.61344538 C84.8879301,0.537809748 79.3305627,0 73.2352941,0 L13,0 Z M41.1456583,106.128852 L41.1456583,70.9915966 L71.8011204,70.9915966 C77.896389,70.9915966 82.7964334,72.3958776 86.5014006,75.2044818 C90.2063677,78.0130859 92.0588235,82.7039821 92.0588235,89.2773109 C92.0588235,92.6237329 91.4911355,95.3725383 90.3557423,97.5238095 C89.2203491,99.6750808 87.6965548,101.378145 85.7843137,102.633053 C83.8720726,103.887961 81.661077,104.784311 79.1512605,105.322129 C76.641444,105.859947 74.0121519,106.128852 71.2633053,106.128852 L41.1456583,106.128852 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/cancel-button": {
"title": "$:/core/images/cancel-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-cancel-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n\t<g fill-rule=\"evenodd\">\n\t <path d=\"M64,76.3137085 L47.0294734,93.2842351 C43.9038742,96.4098343 38.8399231,96.4084656 35.7157288,93.2842712 C32.5978915,90.166434 32.5915506,85.0947409 35.7157649,81.9705266 L52.6862915,65 L35.7157649,48.0294734 C32.5901657,44.9038742 32.5915344,39.8399231 35.7157288,36.7157288 C38.833566,33.5978915 43.9052591,33.5915506 47.0294734,36.7157649 L64,53.6862915 L80.9705266,36.7157649 C84.0961258,33.5901657 89.1600769,33.5915344 92.2842712,36.7157288 C95.4021085,39.833566 95.4084494,44.9052591 92.2842351,48.0294734 L75.3137085,65 L92.2842351,81.9705266 C95.4098343,85.0961258 95.4084656,90.1600769 92.2842712,93.2842712 C89.166434,96.4021085 84.0947409,96.4084494 80.9705266,93.2842351 L64,76.3137085 Z M64,129 C99.346224,129 128,100.346224 128,65 C128,29.653776 99.346224,1 64,1 C28.653776,1 1.13686838e-13,29.653776 1.13686838e-13,65 C1.13686838e-13,100.346224 28.653776,129 64,129 Z M64,113 C90.509668,113 112,91.509668 112,65 C112,38.490332 90.509668,17 64,17 C37.490332,17 16,38.490332 16,65 C16,91.509668 37.490332,113 64,113 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-down": {
"title": "$:/core/images/chevron-down",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-down tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n\t<g fill-rule=\"evenodd\" transform=\"translate(64.000000, 40.500000) rotate(-270.000000) translate(-64.000000, -40.500000) translate(-22.500000, -26.500000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n\t</g>\n</svg>"
},
"$:/core/images/chevron-left": {
"title": "$:/core/images/chevron-left",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-left tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\" version=\"1.1\">\n <g fill-rule=\"evenodd\" transform=\"translate(92.500000, 64.000000) rotate(-180.000000) translate(-92.500000, -64.000000) translate(6.000000, -3.000000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-right": {
"title": "$:/core/images/chevron-right",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-right tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\" transform=\"translate(-48.000000, -3.000000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-up": {
"title": "$:/core/images/chevron-up",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-up tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n\t<g fill-rule=\"evenodd\" transform=\"translate(64.000000, 89.500000) rotate(-90.000000) translate(-64.000000, -89.500000) translate(-22.500000, 22.500000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n\t</g>\n</svg>"
},
"$:/core/images/clone-button": {
"title": "$:/core/images/clone-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-clone-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M32.2650915,96 L32.2650915,120.002359 C32.2650915,124.419334 35.8432884,128 40.2627323,128 L120.002359,128 C124.419334,128 128,124.421803 128,120.002359 L128,40.2627323 C128,35.8457573 124.421803,32.2650915 120.002359,32.2650915 L96,32.2650915 L96,48 L108.858899,48 C110.519357,48 111.853018,49.3405131 111.853018,50.9941198 L111.853018,108.858899 C111.853018,110.519357 110.512505,111.853018 108.858899,111.853018 L50.9941198,111.853018 C49.333661,111.853018 48,110.512505 48,108.858899 L48,96 L32.2650915,96 Z\"></path>\n <path d=\"M40,56 L32.0070969,56 C27.5881712,56 24,52.418278 24,48 C24,43.5907123 27.5848994,40 32.0070969,40 L40,40 L40,32.0070969 C40,27.5881712 43.581722,24 48,24 C52.4092877,24 56,27.5848994 56,32.0070969 L56,40 L63.9929031,40 C68.4118288,40 72,43.581722 72,48 C72,52.4092877 68.4151006,56 63.9929031,56 L56,56 L56,63.9929031 C56,68.4118288 52.418278,72 48,72 C43.5907123,72 40,68.4151006 40,63.9929031 L40,56 Z M7.9992458,0 C3.58138434,0 0,3.5881049 0,7.9992458 L0,88.0007542 C0,92.4186157 3.5881049,96 7.9992458,96 L88.0007542,96 C92.4186157,96 96,92.4118951 96,88.0007542 L96,7.9992458 C96,3.58138434 92.4118951,0 88.0007542,0 L7.9992458,0 Z M19.0010118,16 C17.3435988,16 16,17.336731 16,19.0010118 L16,76.9989882 C16,78.6564012 17.336731,80 19.0010118,80 L76.9989882,80 C78.6564012,80 80,78.663269 80,76.9989882 L80,19.0010118 C80,17.3435988 78.663269,16 76.9989882,16 L19.0010118,16 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/close-all-button": {
"title": "$:/core/images/close-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-close-all-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\" transform=\"translate(-23.000000, -23.000000)\">\n <path d=\"M43,131 L22.9976794,131 C18.5827987,131 15,127.418278 15,123 C15,118.590712 18.5806831,115 22.9976794,115 L43,115 L43,94.9976794 C43,90.5827987 46.581722,87 51,87 C55.4092877,87 59,90.5806831 59,94.9976794 L59,115 L79.0023206,115 C83.4172013,115 87,118.581722 87,123 C87,127.409288 83.4193169,131 79.0023206,131 L59,131 L59,151.002321 C59,155.417201 55.418278,159 51,159 C46.5907123,159 43,155.419317 43,151.002321 L43,131 Z\" transform=\"translate(51.000000, 123.000000) rotate(-45.000000) translate(-51.000000, -123.000000) \"></path>\n <path d=\"M43,59 L22.9976794,59 C18.5827987,59 15,55.418278 15,51 C15,46.5907123 18.5806831,43 22.9976794,43 L43,43 L43,22.9976794 C43,18.5827987 46.581722,15 51,15 C55.4092877,15 59,18.5806831 59,22.9976794 L59,43 L79.0023206,43 C83.4172013,43 87,46.581722 87,51 C87,55.4092877 83.4193169,59 79.0023206,59 L59,59 L59,79.0023206 C59,83.4172013 55.418278,87 51,87 C46.5907123,87 43,83.4193169 43,79.0023206 L43,59 Z\" transform=\"translate(51.000000, 51.000000) rotate(-45.000000) translate(-51.000000, -51.000000) \"></path>\n <path d=\"M115,59 L94.9976794,59 C90.5827987,59 87,55.418278 87,51 C87,46.5907123 90.5806831,43 94.9976794,43 L115,43 L115,22.9976794 C115,18.5827987 118.581722,15 123,15 C127.409288,15 131,18.5806831 131,22.9976794 L131,43 L151.002321,43 C155.417201,43 159,46.581722 159,51 C159,55.4092877 155.419317,59 151.002321,59 L131,59 L131,79.0023206 C131,83.4172013 127.418278,87 123,87 C118.590712,87 115,83.4193169 115,79.0023206 L115,59 Z\" transform=\"translate(123.000000, 51.000000) rotate(-45.000000) translate(-123.000000, -51.000000) \"></path>\n <path d=\"M115,131 L94.9976794,131 C90.5827987,131 87,127.418278 87,123 C87,118.590712 90.5806831,115 94.9976794,115 L115,115 L115,94.9976794 C115,90.5827987 118.581722,87 123,87 C127.409288,87 131,90.5806831 131,94.9976794 L131,115 L151.002321,115 C155.417201,115 159,118.581722 159,123 C159,127.409288 155.419317,131 151.002321,131 L131,131 L131,151.002321 C131,155.417201 127.418278,159 123,159 C118.590712,159 115,155.419317 115,151.002321 L115,131 Z\" transform=\"translate(123.000000, 123.000000) rotate(-45.000000) translate(-123.000000, -123.000000) \"></path>\n </g>\n</svg>"
},
"$:/core/images/close-button": {
"title": "$:/core/images/close-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-close-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M65.0864256,75.4091629 L14.9727349,125.522854 C11.8515951,128.643993 6.78104858,128.64922 3.65685425,125.525026 C0.539017023,122.407189 0.5336324,117.334539 3.65902635,114.209145 L53.7727171,64.0954544 L3.65902635,13.9817637 C0.537886594,10.8606239 0.532659916,5.79007744 3.65685425,2.6658831 C6.77469148,-0.451954124 11.8473409,-0.457338747 14.9727349,2.66805521 L65.0864256,52.7817459 L115.200116,2.66805521 C118.321256,-0.453084553 123.391803,-0.458311231 126.515997,2.6658831 C129.633834,5.78372033 129.639219,10.8563698 126.513825,13.9817637 L76.4001341,64.0954544 L126.513825,114.209145 C129.634965,117.330285 129.640191,122.400831 126.515997,125.525026 C123.39816,128.642863 118.32551,128.648248 115.200116,125.522854 L65.0864256,75.4091629 L65.0864256,75.4091629 Z\"></path>\n </g>\n</svg>\n"
},
"$:/core/images/close-others-button": {
"title": "$:/core/images/close-others-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-close-others-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z M64,96 C81.673112,96 96,81.673112 96,64 C96,46.326888 81.673112,32 64,32 C46.326888,32 32,46.326888 32,64 C32,81.673112 46.326888,96 64,96 Z M64,80 C72.836556,80 80,72.836556 80,64 C80,55.163444 72.836556,48 64,48 C55.163444,48 48,55.163444 48,64 C48,72.836556 55.163444,80 64,80 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/delete-button": {
"title": "$:/core/images/delete-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-delete-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\" transform=\"translate(12.000000, 0.000000)\">\n <rect x=\"0\" y=\"11\" width=\"105\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"28\" y=\"0\" width=\"48\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"8\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"8\" y=\"112\" width=\"88\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"80\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"56\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"32\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n </g>\n</svg>"
},
"$:/core/images/done-button": {
"title": "$:/core/images/done-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-done-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M3.52445141,76.8322939 C2.07397484,75.3828178 1.17514421,73.3795385 1.17514421,71.1666288 L1.17514421,23.1836596 C1.17514421,18.7531992 4.75686621,15.1751442 9.17514421,15.1751442 C13.5844319,15.1751442 17.1751442,18.7606787 17.1751442,23.1836596 L17.1751442,63.1751442 L119.173716,63.1751442 C123.590457,63.1751442 127.175144,66.7568662 127.175144,71.1751442 C127.175144,75.5844319 123.592783,79.1751442 119.173716,79.1751442 L9.17657227,79.1751442 C6.96796403,79.1751442 4.9674142,78.279521 3.51911285,76.8315312 Z\" id=\"Rectangle-285\" transform=\"translate(64.175144, 47.175144) rotate(-45.000000) translate(-64.175144, -47.175144) \"></path>\n </g>\n</svg>"
},
"$:/core/images/down-arrow": {
"title": "$:/core/images/down-arrow",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-down-arrow tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <path d=\"M109.35638,81.3533152 C107.923899,82.7869182 105.94502,83.6751442 103.759224,83.6751442 L24.5910645,83.6751442 C20.225873,83.6751442 16.6751442,80.1307318 16.6751442,75.7584775 C16.6751442,71.3951199 20.2192225,67.8418109 24.5910645,67.8418109 L95.8418109,67.8418109 L95.8418109,-3.40893546 C95.8418109,-7.77412698 99.3862233,-11.3248558 103.758478,-11.3248558 C108.121835,-11.3248558 111.675144,-7.78077754 111.675144,-3.40893546 L111.675144,75.7592239 C111.675144,77.9416955 110.789142,79.9205745 109.356651,81.3538862 Z\" transform=\"translate(64.175144, 36.175144) rotate(45.000000) translate(-64.175144, -36.175144) \"></path>\n</svg>"
},
"$:/core/images/download-button": {
"title": "$:/core/images/download-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-download-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\"><g fill-rule=\"evenodd\"><path class=\"tc-image-download-button-ring\" d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z\"/><path d=\"M34.3496823,66.4308767 L61.2415823,93.634668 C63.0411536,95.4551107 65.9588502,95.4551107 67.7584215,93.634668 L94.6503215,66.4308767 C96.4498928,64.610434 96.4498928,61.6588981 94.6503215,59.8384554 C93.7861334,58.9642445 92.6140473,58.4731195 91.3919019,58.4731195 L82.9324098,58.4731195 C80.3874318,58.4731195 78.3243078,56.3860674 78.3243078,53.8115729 L78.3243078,38.6615466 C78.3243078,36.0870521 76.2611837,34 73.7162058,34 L55.283798,34 C52.7388201,34 50.675696,36.0870521 50.675696,38.6615466 L50.675696,38.6615466 L50.675696,53.8115729 C50.675696,56.3860674 48.612572,58.4731195 46.0675941,58.4731195 L37.608102,58.4731195 C35.063124,58.4731195 33,60.5601716 33,63.134666 C33,64.3709859 33.4854943,65.5566658 34.3496823,66.4308767 L34.3496823,66.4308767 Z\"/></g></svg>"
},
"$:/core/images/edit-button": {
"title": "$:/core/images/edit-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-edit-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M116.870058,45.3431458 L108.870058,45.3431458 L108.870058,45.3431458 L108.870058,61.3431458 L116.870058,61.3431458 L116.870058,45.3431458 Z M124.870058,45.3431458 L127.649881,45.3431458 C132.066101,45.3431458 135.656854,48.9248678 135.656854,53.3431458 C135.656854,57.7524334 132.07201,61.3431458 127.649881,61.3431458 L124.870058,61.3431458 L124.870058,45.3431458 Z M100.870058,45.3431458 L15.6638275,45.3431458 C15.5064377,45.3431458 15.3501085,45.3476943 15.1949638,45.3566664 L15.1949638,45.3566664 C15.0628002,45.3477039 14.928279,45.3431458 14.7913977,45.3431458 C6.68160973,45.3431458 -8.34314575,53.3431458 -8.34314575,53.3431458 C-8.34314575,53.3431458 6.85614548,61.3431458 14.7913977,61.3431458 C14.9266533,61.3431458 15.0596543,61.3384973 15.190398,61.3293588 C15.3470529,61.3385075 15.5049057,61.3431458 15.6638275,61.3431458 L100.870058,61.3431458 L100.870058,45.3431458 L100.870058,45.3431458 Z\" transform=\"translate(63.656854, 53.343146) rotate(-45.000000) translate(-63.656854, -53.343146) \"></path>\n <path d=\"M35.1714596,124.189544 C41.9594858,123.613403 49.068777,121.917633 58.85987,118.842282 C60.6854386,118.268877 62.4306907,117.705515 65.1957709,116.802278 C81.1962861,111.575575 87.0734839,109.994907 93.9414474,109.655721 C102.29855,109.242993 107.795169,111.785371 111.520478,118.355045 C112.610163,120.276732 115.051363,120.951203 116.97305,119.861518 C118.894737,118.771832 119.569207,116.330633 118.479522,114.408946 C113.146151,105.003414 104.734907,101.112919 93.5468356,101.66546 C85.6716631,102.054388 79.4899908,103.716944 62.7116783,109.197722 C59.9734132,110.092199 58.2519873,110.64787 56.4625698,111.20992 C37.002649,117.322218 25.6914684,118.282267 16.8654804,112.957098 C14.9739614,111.815848 12.5154166,112.424061 11.3741667,114.31558 C10.2329168,116.207099 10.84113,118.665644 12.7326489,119.806894 C19.0655164,123.627836 26.4866335,124.926678 35.1714596,124.189544 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/erase": {
"title": "$:/core/images/erase",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-erase tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60.0870401,127.996166 L123.102318,64.980888 C129.636723,58.4464827 129.629513,47.8655877 123.098967,41.3350425 L99.4657866,17.7018617 C92.927448,11.1635231 82.3486358,11.1698163 75.8199411,17.698511 L4.89768189,88.6207702 C-1.63672343,95.1551755 -1.6295126,105.736071 4.90103262,112.266616 L20.6305829,127.996166 L60.0870401,127.996166 Z M25.1375576,120.682546 L10.812569,106.357558 C7.5455063,103.090495 7.54523836,97.793808 10.8048093,94.5342371 L46.2691086,59.0699377 L81.7308914,94.5317205 L55.5800654,120.682546 L25.1375576,120.682546 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/excise": {
"title": "$:/core/images/excise",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-excise tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M56,107.313709 L53.6568542,109.656854 C50.5326599,112.781049 45.4673401,112.781049 42.3431457,109.656854 C39.2189514,106.53266 39.2189514,101.46734 42.3431458,98.3431457 L58.3431458,82.3431457 C61.4673401,79.2189514 66.5326599,79.2189514 69.6568542,82.3431458 L85.6568542,98.3431458 C88.7810486,101.46734 88.7810486,106.53266 85.6568542,109.656854 C82.5326599,112.781049 77.4673401,112.781049 74.3431458,109.656854 L72,107.313708 L72,121.597798 C72,125.133636 68.418278,128 64,128 C59.581722,128 56,125.133636 56,121.597798 L56,107.313709 Z M0,40.0070969 C0,35.5848994 3.59071231,32 8,32 C12.418278,32 16,35.5881712 16,40.0070969 L16,71.9929031 C16,76.4151006 12.4092877,80 8,80 C3.581722,80 0,76.4118288 0,71.9929031 L0,40.0070969 Z M32,40.0070969 C32,35.5848994 35.5907123,32 40,32 C44.418278,32 48,35.5881712 48,40.0070969 L48,71.9929031 C48,76.4151006 44.4092877,80 40,80 C35.581722,80 32,76.4118288 32,71.9929031 L32,40.0070969 Z M80,40.0070969 C80,35.5848994 83.5907123,32 88,32 C92.418278,32 96,35.5881712 96,40.0070969 L96,71.9929031 C96,76.4151006 92.4092877,80 88,80 C83.581722,80 80,76.4118288 80,71.9929031 L80,40.0070969 Z M56,8.00709688 C56,3.58489938 59.5907123,0 64,0 C68.418278,0 72,3.58817117 72,8.00709688 L72,39.9929031 C72,44.4151006 68.4092877,48 64,48 C59.581722,48 56,44.4118288 56,39.9929031 L56,8.00709688 Z M112,40.0070969 C112,35.5848994 115.590712,32 120,32 C124.418278,32 128,35.5881712 128,40.0070969 L128,71.9929031 C128,76.4151006 124.409288,80 120,80 C115.581722,80 112,76.4118288 112,71.9929031 L112,40.0070969 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/export-button": {
"title": "$:/core/images/export-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-export-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00348646,127.999999 C8.00464867,128 8.00581094,128 8.00697327,128 L119.993027,128 C122.205254,128 124.207939,127.101378 125.657096,125.651198 L125.656838,125.65759 C127.104563,124.210109 128,122.21009 128,119.999949 L128,56.0000511 C128,51.5817449 124.409288,48 120,48 C115.581722,48 112,51.5797863 112,56.0000511 L112,112 L16,112 L16,56.0000511 C16,51.5817449 12.4092877,48 8,48 C3.581722,48 7.10542736e-15,51.5797863 7.10542736e-15,56.0000511 L7.10542736e-15,119.999949 C7.10542736e-15,124.418255 3.59071231,128 8,128 C8.00116233,128 8.0023246,128 8.00348681,127.999999 Z M56.6235633,27.3113724 L47.6580188,36.2769169 C44.5333664,39.4015692 39.4634864,39.4061295 36.339292,36.2819351 C33.2214548,33.1640979 33.2173444,28.0901742 36.3443103,24.9632084 L58.9616908,2.34582788 C60.5248533,0.782665335 62.5748436,0.000361191261 64.624516,2.38225238e-14 L64.6193616,0.00151809229 C66.6695374,0.000796251595 68.7211167,0.781508799 70.2854358,2.34582788 L92.9028163,24.9632084 C96.0274686,28.0878607 96.0320289,33.1577408 92.9078345,36.2819351 C89.7899973,39.3997724 84.7160736,39.4038827 81.5891078,36.2769169 L72.6235633,27.3113724 L72.6235633,88.5669606 C72.6235633,92.9781015 69.0418413,96.5662064 64.6235633,96.5662064 C60.2142756,96.5662064 56.6235633,92.984822 56.6235633,88.5669606 L56.6235633,27.3113724 L56.6235633,27.3113724 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/file": {
"title": "$:/core/images/file",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-file tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M111.96811,30.5 L112,30.5 L112,119.999079 C112,124.417866 108.419113,128 104.000754,128 L23.9992458,128 C19.5813843,128 16,124.417687 16,119.999079 L16,8.00092105 C16,3.58213437 19.5808867,0 23.9992458,0 L81,0 L81,0.0201838424 C83.1589869,-0.071534047 85.3482153,0.707077645 86.9982489,2.35711116 L109.625176,24.9840387 C111.151676,26.510538 111.932942,28.4998414 111.96811,30.5 L111.96811,30.5 Z M81,8 L24,8 L24,120 L104,120 L104,30.5 L89.0003461,30.5 C84.5818769,30.5 81,26.9216269 81,22.4996539 L81,8 Z\"></path>\n <rect x=\"32\" y=\"36\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"52\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"68\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"84\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"100\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"20\" width=\"40\" height=\"8\" rx=\"4\"></rect>\n </g>\n</svg>"
},
"$:/core/images/fixed-height": {
"title": "$:/core/images/fixed-height",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fixed-height tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60,35.6568542 L50.8284271,44.8284271 C49.26633,46.3905243 46.73367,46.3905243 45.1715729,44.8284271 C43.6094757,43.26633 43.6094757,40.73367 45.1715729,39.1715729 L61.1715729,23.1715729 C62.73367,21.6094757 65.2663299,21.6094757 66.8284271,23.1715728 L82.8284278,39.1715728 C84.390525,40.7336699 84.390525,43.2663299 82.8284279,44.8284271 C81.2663308,46.3905243 78.7336709,46.3905243 77.1715737,44.8284272 L68,35.6568539 L68,93.3431461 L77.1715737,84.1715728 C78.7336709,82.6094757 81.2663308,82.6094757 82.8284279,84.1715729 C84.390525,85.7336701 84.390525,88.2663301 82.8284278,89.8284272 L66.8284271,105.828427 C65.2663299,107.390524 62.73367,107.390524 61.1715729,105.828427 L45.1715729,89.8284271 C43.6094757,88.26633 43.6094757,85.73367 45.1715729,84.1715729 C46.73367,82.6094757 49.26633,82.6094757 50.8284271,84.1715729 L60,93.3431458 L60,35.6568542 L60,35.6568542 Z M16,116 L112,116 C114.209139,116 116,114.209139 116,112 C116,109.790861 114.209139,108 112,108 L16,108 C13.790861,108 12,109.790861 12,112 C12,114.209139 13.790861,116 16,116 L16,116 Z M16,20 L112,20 C114.209139,20 116,18.209139 116,16 C116,13.790861 114.209139,12 112,12 L16,12 C13.790861,12 12,13.790861 12,16 C12,18.209139 13.790861,20 16,20 L16,20 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-all-button": {
"title": "$:/core/images/fold-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold-all tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"64\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M64.0292774,58.6235628 C61.9791013,58.6242848 59.9275217,57.8435723 58.3632024,56.279253 L35.7458219,33.6618725 C32.6211696,30.5372202 32.6166093,25.4673401 35.7408036,22.3431458 C38.8586409,19.2253085 43.9325646,19.2211982 47.0595304,22.348164 L64.0250749,39.3137085 L80.9906194,22.348164 C84.1152717,19.2235117 89.1851518,19.2189514 92.3093461,22.3431458 C95.4271834,25.460983 95.4312937,30.5349067 92.3043279,33.6618725 L69.6869474,56.279253 C68.1237851,57.8424153 66.0737951,58.6247195 64.0241231,58.6250809 Z\" transform=\"translate(64.024316, 39.313708) scale(1, -1) translate(-64.024316, -39.313708) \"></path>\n <path d=\"M64.0292774,123.621227 C61.9791013,123.621949 59.9275217,122.841236 58.3632024,121.276917 L35.7458219,98.6595365 C32.6211696,95.5348842 32.6166093,90.4650041 35.7408036,87.3408098 C38.8586409,84.2229725 43.9325646,84.2188622 47.0595304,87.345828 L64.0250749,104.311373 L80.9906194,87.345828 C84.1152717,84.2211757 89.1851518,84.2166154 92.3093461,87.3408098 C95.4271834,90.458647 95.4312937,95.5325707 92.3043279,98.6595365 L69.6869474,121.276917 C68.1237851,122.840079 66.0737951,123.622383 64.0241231,123.622745 Z\" transform=\"translate(64.024316, 104.311372) scale(1, -1) translate(-64.024316, -104.311372) \"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-button": {
"title": "$:/core/images/fold-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M64.0292774,63.6235628 C61.9791013,63.6242848 59.9275217,62.8435723 58.3632024,61.279253 L35.7458219,38.6618725 C32.6211696,35.5372202 32.6166093,30.4673401 35.7408036,27.3431458 C38.8586409,24.2253085 43.9325646,24.2211982 47.0595304,27.348164 L64.0250749,44.3137085 L80.9906194,27.348164 C84.1152717,24.2235117 89.1851518,24.2189514 92.3093461,27.3431458 C95.4271834,30.460983 95.4312937,35.5349067 92.3043279,38.6618725 L69.6869474,61.279253 C68.1237851,62.8424153 66.0737951,63.6247195 64.0241231,63.6250809 Z\" transform=\"translate(64.024316, 44.313708) scale(1, -1) translate(-64.024316, -44.313708) \"></path>\n <path d=\"M64.0049614,105.998482 C61.9547853,105.999204 59.9032057,105.218491 58.3388864,103.654172 L35.7215059,81.0367916 C32.5968535,77.9121393 32.5922933,72.8422592 35.7164876,69.7180649 C38.8343248,66.6002276 43.9082485,66.5961173 47.0352144,69.7230831 L64.0007589,86.6886276 L80.9663034,69.7230831 C84.0909557,66.5984308 89.1608358,66.5938705 92.2850301,69.7180649 C95.4028673,72.8359021 95.4069777,77.9098258 92.2800119,81.0367916 L69.6626314,103.654172 C68.099469,105.217334 66.0494791,105.999639 63.999807,106 Z\" transform=\"translate(64.000000, 86.688628) scale(1, -1) translate(-64.000000, -86.688628) \"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-others-button": {
"title": "$:/core/images/fold-others-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold-others tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"56.0314331\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M101.657101,104.948818 C100.207918,103.498614 98.2051847,102.599976 95.9929031,102.599976 L72,102.599976 L72,78.6070725 C72,76.3964271 71.1036108,74.3936927 69.6545293,72.9441002 L69.6571005,72.9488183 C68.2079177,71.4986143 66.2051847,70.5999756 63.9929031,70.5999756 L32.0070969,70.5999756 C27.5881712,70.5999756 24,74.1816976 24,78.5999756 C24,83.0092633 27.5848994,86.5999756 32.0070969,86.5999756 L56,86.5999756 L56,110.592879 C56,112.803524 56.8963895,114.806259 58.3454713,116.255852 L58.3429,116.251133 C59.7920828,117.701337 61.7948156,118.599976 64.0070969,118.599976 L88,118.599976 L88,142.592879 C88,147.011804 91.581722,150.599976 96,150.599976 C100.409288,150.599976 104,147.015076 104,142.592879 L104,110.607072 C104,108.396427 103.103611,106.393693 101.654529,104.9441 Z\" transform=\"translate(64.000000, 110.599976) rotate(-45.000000) translate(-64.000000, -110.599976) \"></path>\n <path d=\"M101.725643,11.7488671 C100.27646,10.2986632 98.2737272,9.40002441 96.0614456,9.40002441 L72.0685425,9.40002441 L72.0685425,-14.5928787 C72.0685425,-16.8035241 71.1721533,-18.8062584 69.7230718,-20.255851 L69.725643,-20.2511329 C68.2764602,-21.7013368 66.2737272,-22.5999756 64.0614456,-22.5999756 L32.0756394,-22.5999756 C27.6567137,-22.5999756 24.0685425,-19.0182536 24.0685425,-14.5999756 C24.0685425,-10.1906879 27.6534419,-6.59997559 32.0756394,-6.59997559 L56.0685425,-6.59997559 L56.0685425,17.3929275 C56.0685425,19.6035732 56.964932,21.6063078 58.4140138,23.0559004 L58.4114425,23.0511823 C59.8606253,24.5013859 61.8633581,25.4000244 64.0756394,25.4000244 L88.0685425,25.4000244 L88.0685425,49.3929275 C88.0685425,53.8118532 91.6502645,57.4000244 96.0685425,57.4000244 C100.47783,57.4000244 104.068542,53.815125 104.068542,49.3929275 L104.068542,17.4071213 C104.068542,15.1964759 103.172153,13.1937416 101.723072,11.744149 Z\" transform=\"translate(64.068542, 17.400024) scale(1, -1) rotate(-45.000000) translate(-64.068542, -17.400024) \"></path>\n </g>\n</svg>"
},
"$:/core/images/folder": {
"title": "$:/core/images/folder",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-folder tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M55.6943257,128.000004 L7.99859666,128.000004 C3.5810937,128.000004 0,124.413822 0,119.996384 L0,48.0036243 C0,43.5833471 3.58387508,40.0000044 7.99859666,40.0000044 L16,40.0000044 L16,31.9999914 C16,27.5817181 19.5783731,24 24.0003461,24 L55.9996539,24 C60.4181231,24 64,27.5800761 64,31.9999914 L64,40.0000044 L104.001403,40.0000044 C108.418906,40.0000044 112,43.5861868 112,48.0036243 L112,59.8298353 L104,59.7475921 L104,51.9994189 C104,49.7887607 102.207895,48.0000044 99.9972215,48.0000044 L56,48.0000044 L56,36.0000255 C56,33.7898932 54.2072328,32 51.9957423,32 L28.0042577,32 C25.7890275,32 24,33.7908724 24,36.0000255 L24,48.0000044 L12.0027785,48.0000044 C9.78987688,48.0000044 8,49.7906032 8,51.9994189 L8,116.00059 C8,118.211248 9.79210499,120.000004 12.0027785,120.000004 L58.7630167,120.000004 L55.6943257,128.000004 L55.6943257,128.000004 Z\"></path>\n <path d=\"M23.8728955,55.5 L119.875702,55.5 C124.293205,55.5 126.87957,59.5532655 125.650111,64.5630007 L112.305967,118.936999 C111.077582,123.942356 106.497904,128 102.083183,128 L6.08037597,128 C1.66287302,128 -0.923492342,123.946735 0.305967145,118.936999 L13.650111,64.5630007 C14.878496,59.5576436 19.4581739,55.5 23.8728955,55.5 L23.8728955,55.5 L23.8728955,55.5 Z M25.6530124,64 L113.647455,64 C115.858129,64 117.151473,66.0930612 116.538306,68.6662267 L105.417772,115.333773 C104.803671,117.910859 102.515967,120 100.303066,120 L12.3086228,120 C10.0979492,120 8.8046054,117.906939 9.41777189,115.333773 L20.5383062,68.6662267 C21.1524069,66.0891409 23.4401107,64 25.6530124,64 L25.6530124,64 L25.6530124,64 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/full-screen-button": {
"title": "$:/core/images/full-screen-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-full-screen-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g>\n <g>\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(104.000000, 104.000000) rotate(-180.000000) translate(-104.000000, -104.000000) translate(80.000000, 80.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(24.000000, 104.000000) rotate(-90.000000) translate(-24.000000, -104.000000) translate(0.000000, 80.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(104.000000, 24.000000) rotate(90.000000) translate(-104.000000, -24.000000) translate(80.000000, 0.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n </g>\n</svg>"
},
"$:/core/images/github": {
"title": "$:/core/images/github",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-github tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M63.9383506,1.60695328 C28.6017227,1.60695328 -0.055756057,30.2970814 -0.055756057,65.6906208 C-0.055756057,94.003092 18.2804728,118.019715 43.7123154,126.493393 C46.9143781,127.083482 48.0812647,125.104717 48.0812647,123.405261 C48.0812647,121.886765 48.02626,117.85449 47.9948287,112.508284 C30.1929317,116.379268 26.4368926,103.916587 26.4368926,103.916587 C23.5255693,96.5129372 19.3294921,94.5420399 19.3294921,94.5420399 C13.5186324,90.5687739 19.7695302,90.6474524 19.7695302,90.6474524 C26.1933001,91.099854 29.5721638,97.2525155 29.5721638,97.2525155 C35.2808718,107.044059 44.5531024,104.215566 48.1991321,102.575118 C48.7806109,98.4366275 50.4346826,95.612068 52.2616263,94.0109598 C38.0507543,92.3941159 23.1091047,86.8944862 23.1091047,62.3389152 C23.1091047,55.3443933 25.6039634,49.6205298 29.6978889,45.1437211 C29.0378318,43.5229433 26.8415704,37.0044266 30.3265147,28.1845627 C30.3265147,28.1845627 35.6973364,26.4615028 47.9241083,34.7542205 C53.027764,33.330139 58.5046663,32.6220321 63.9462084,32.5944947 C69.3838216,32.6220321 74.856795,33.330139 79.9683085,34.7542205 C92.1872225,26.4615028 97.5501864,28.1845627 97.5501864,28.1845627 C101.042989,37.0044266 98.8467271,43.5229433 98.190599,45.1437211 C102.292382,49.6205298 104.767596,55.3443933 104.767596,62.3389152 C104.767596,86.9574291 89.8023734,92.3744463 75.5482834,93.9598188 C77.8427675,95.9385839 79.8897303,99.8489072 79.8897303,105.828476 C79.8897303,114.392635 79.8111521,121.304544 79.8111521,123.405261 C79.8111521,125.120453 80.966252,127.114954 84.2115327,126.489459 C109.623731,117.996111 127.944244,93.9952241 127.944244,65.6906208 C127.944244,30.2970814 99.2867652,1.60695328 63.9383506,1.60695328\"></path>\n </g>\n </svg>\n"
},
"$:/core/images/globe": {
"title": "$:/core/images/globe",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-globe tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M72.8111354,37.1275855 C72.8111354,37.9789875 72.8111354,38.8303894 72.8111354,39.6817913 C72.8111354,41.8784743 73.7885604,46.5631866 72.8111354,48.5143758 C71.3445471,51.4420595 68.1617327,52.0543531 66.4170946,54.3812641 C65.2352215,55.9575873 61.7987417,64.9821523 62.7262858,67.3005778 C66.6959269,77.2228204 74.26087,70.4881886 80.6887657,76.594328 C81.5527211,77.415037 83.5758191,78.8666631 83.985137,79.8899578 C87.2742852,88.1128283 76.4086873,94.8989524 87.7419325,106.189751 C88.9872885,107.430443 91.555495,102.372895 91.8205061,101.575869 C92.6726866,99.0129203 98.5458765,96.1267309 100.908882,94.5234439 C102.928056,93.1534443 105.782168,91.8557166 107.236936,89.7775886 C109.507391,86.5342557 108.717505,82.2640435 110.334606,79.0328716 C112.473794,74.7585014 114.163418,69.3979002 116.332726,65.0674086 C120.230862,57.2857361 121.054075,67.1596684 121.400359,67.5059523 C121.757734,67.8633269 122.411167,67.5059523 122.916571,67.5059523 C123.011132,67.5059523 124.364019,67.6048489 124.432783,67.5059523 C125.0832,66.5705216 123.390209,49.5852316 123.114531,48.2089091 C121.710578,41.1996597 116.17083,32.4278331 111.249523,27.7092761 C104.975994,21.6942076 104.160516,11.5121686 92.9912146,12.7547535 C92.7872931,12.7774397 87.906794,22.9027026 85.2136766,26.2672064 C81.486311,30.9237934 82.7434931,22.1144904 78.6876623,22.1144904 C78.6065806,22.1144904 77.5045497,22.0107615 77.4353971,22.1144904 C76.8488637,22.9942905 75.9952305,26.0101404 75.1288269,26.5311533 C74.8635477,26.6906793 73.4071369,26.2924966 73.2826811,26.5311533 C71.0401728,30.8313939 81.5394677,28.7427264 79.075427,34.482926 C76.7225098,39.9642538 72.747373,32.4860199 72.747373,43.0434079\"></path>\n <path d=\"M44.4668556,7.01044608 C54.151517,13.1403033 45.1489715,19.2084878 47.1611905,23.2253896 C48.8157833,26.5283781 51.4021933,28.6198851 48.8753629,33.038878 C46.8123257,36.6467763 42.0052989,37.0050492 39.251679,39.7621111 C36.2115749,42.8060154 33.7884281,48.7028116 32.4624592,52.6732691 C30.8452419,57.5158356 47.0088721,59.5388126 44.5246867,63.6811917 C43.1386839,65.9923513 37.7785192,65.1466282 36.0880227,63.8791519 C34.9234453,63.0059918 32.4946425,63.3331166 31.6713597,62.0997342 C29.0575851,58.1839669 29.4107339,54.0758543 28.0457962,49.9707786 C27.1076833,47.1493864 21.732611,47.8501656 20.2022714,49.3776393 C19.6790362,49.8998948 19.8723378,51.1703278 19.8723378,51.8829111 C19.8723378,57.1682405 26.9914913,55.1986414 26.9914913,58.3421973 C26.9914913,72.9792302 30.9191897,64.8771867 38.1313873,69.6793121 C48.1678018,76.3618966 45.9763926,76.981595 53.0777543,84.0829567 C56.7511941,87.7563965 60.8192437,87.7689005 62.503478,93.3767069 C64.1046972,98.7081071 53.1759798,98.7157031 50.786754,100.825053 C49.663965,101.816317 47.9736094,104.970571 46.5680513,105.439676 C44.7757187,106.037867 43.334221,105.93607 41.6242359,107.219093 C39.1967302,109.040481 37.7241465,112.151588 37.6034934,112.030935 C35.4555278,109.88297 34.0848666,96.5511248 33.7147244,93.7726273 C33.1258872,89.3524817 28.1241923,88.2337027 26.7275443,84.7420826 C25.1572737,80.8164061 28.2518481,75.223612 25.599097,70.9819941 C19.0797019,60.557804 13.7775712,56.4811506 10.2493953,44.6896152 C9.3074899,41.5416683 13.5912267,38.1609942 15.1264825,35.8570308 C17.0029359,33.0410312 17.7876232,30.0028946 19.8723378,27.2224065 C22.146793,24.1888519 40.8551166,9.46076832 43.8574051,8.63490613 L44.4668556,7.01044608 Z\"></path>\n <path d=\"M64,126 C98.2416545,126 126,98.2416545 126,64 C126,29.7583455 98.2416545,2 64,2 C29.7583455,2 2,29.7583455 2,64 C2,98.2416545 29.7583455,126 64,126 Z M64,120 C94.927946,120 120,94.927946 120,64 C120,33.072054 94.927946,8 64,8 C33.072054,8 8,33.072054 8,64 C8,94.927946 33.072054,120 64,120 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-1": {
"title": "$:/core/images/heading-1",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-1 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M14,30 L27.25,30 L27.25,60.104 L61.7,60.104 L61.7,30 L74.95,30 L74.95,105.684 L61.7,105.684 L61.7,71.552 L27.25,71.552 L27.25,105.684 L14,105.684 L14,30 Z M84.3350766,43.78 C86.8790893,43.78 89.3523979,43.5680021 91.7550766,43.144 C94.1577553,42.7199979 96.3307336,42.0133383 98.2740766,41.024 C100.21742,40.0346617 101.87807,38.7626744 103.256077,37.208 C104.634084,35.6533256 105.535075,33.7453446 105.959077,31.484 L115.817077,31.484 L115.817077,105.684 L102.567077,105.684 L102.567077,53.32 L84.3350766,53.32 L84.3350766,43.78 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-2": {
"title": "$:/core/images/heading-2",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-2 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M125.519077,105.684 L74.8510766,105.684 C74.9217436,99.5359693 76.4057288,94.1653563 79.3030766,89.572 C82.2004244,84.9786437 86.1577182,80.986017 91.1750766,77.594 C93.5777553,75.8273245 96.0863969,74.113675 98.7010766,72.453 C101.315756,70.792325 103.718399,69.0080095 105.909077,67.1 C108.099754,65.1919905 109.901736,63.1250111 111.315077,60.899 C112.728417,58.6729889 113.47041,56.1113478 113.541077,53.214 C113.541077,51.8713266 113.382078,50.4403409 113.064077,48.921 C112.746075,47.4016591 112.127748,45.9883399 111.209077,44.681 C110.290405,43.3736601 109.018418,42.2783377 107.393077,41.395 C105.767735,40.5116622 103.647756,40.07 101.033077,40.07 C98.6303979,40.07 96.6340846,40.5469952 95.0440766,41.501 C93.4540687,42.4550048 92.1820814,43.762325 91.2280766,45.423 C90.2740719,47.083675 89.5674123,49.0446554 89.1080766,51.306 C88.648741,53.5673446 88.3837436,56.0053203 88.3130766,58.62 L76.2290766,58.62 C76.2290766,54.5213128 76.7767378,50.7230175 77.8720766,47.225 C78.9674154,43.7269825 80.610399,40.7060127 82.8010766,38.162 C84.9917542,35.6179873 87.6593942,33.6216739 90.8040766,32.173 C93.948759,30.7243261 97.6057224,30 101.775077,30 C106.297766,30 110.078395,30.7419926 113.117077,32.226 C116.155758,33.7100074 118.611401,35.5826554 120.484077,37.844 C122.356753,40.1053446 123.681739,42.5609868 124.459077,45.211 C125.236414,47.8610133 125.625077,50.3873213 125.625077,52.79 C125.625077,55.7580148 125.165748,58.4433213 124.247077,60.846 C123.328405,63.2486787 122.091751,65.4569899 120.537077,67.471 C118.982402,69.4850101 117.215753,71.3399915 115.237077,73.036 C113.2584,74.7320085 111.209087,76.3219926 109.089077,77.806 C106.969066,79.2900074 104.849087,80.7033266 102.729077,82.046 C100.609066,83.3886734 98.6480856,84.7313266 96.8460766,86.074 C95.0440676,87.4166734 93.47175,88.8123261 92.1290766,90.261 C90.7864032,91.7096739 89.8677458,93.2466585 89.3730766,94.872 L125.519077,94.872 L125.519077,105.684 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-3": {
"title": "$:/core/images/heading-3",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-3 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M94.8850766,62.224 C96.8637532,62.294667 98.8424001,62.1533351 100.821077,61.8 C102.799753,61.4466649 104.566402,60.8283378 106.121077,59.945 C107.675751,59.0616623 108.930072,57.8426744 109.884077,56.288 C110.838081,54.7333256 111.315077,52.8253446 111.315077,50.564 C111.315077,47.3839841 110.237421,44.8400095 108.082077,42.932 C105.926733,41.0239905 103.153094,40.07 99.7610766,40.07 C97.641066,40.07 95.8037511,40.4939958 94.2490766,41.342 C92.6944022,42.1900042 91.4047484,43.3383261 90.3800766,44.787 C89.3554048,46.2356739 88.5957458,47.860991 88.1010766,49.663 C87.6064075,51.465009 87.3944096,53.3199905 87.4650766,55.228 L75.3810766,55.228 C75.5224107,51.623982 76.1937373,48.2850154 77.3950766,45.211 C78.596416,42.1369846 80.2393995,39.4693446 82.3240766,37.208 C84.4087537,34.9466554 86.9350618,33.1800064 89.9030766,31.908 C92.8710915,30.6359936 96.2277246,30 99.9730766,30 C102.870424,30 105.714729,30.4239958 108.506077,31.272 C111.297424,32.1200042 113.806065,33.3566585 116.032077,34.982 C118.258088,36.6073415 120.042403,38.6743208 121.385077,41.183 C122.72775,43.6916792 123.399077,46.5713171 123.399077,49.822 C123.399077,53.5673521 122.551085,56.8356527 120.855077,59.627 C119.159068,62.4183473 116.509095,64.4499936 112.905077,65.722 L112.905077,65.934 C117.145098,66.7820042 120.448731,68.8843166 122.816077,72.241 C125.183422,75.5976835 126.367077,79.6786426 126.367077,84.484 C126.367077,88.017351 125.660417,91.1796527 124.247077,93.971 C122.833736,96.7623473 120.925755,99.129657 118.523077,101.073 C116.120398,103.016343 113.329093,104.517995 110.149077,105.578 C106.969061,106.638005 103.612428,107.168 100.079077,107.168 C95.7683884,107.168 92.005426,106.549673 88.7900766,105.313 C85.5747272,104.076327 82.8894207,102.327345 80.7340766,100.066 C78.5787325,97.8046554 76.9357489,95.0840159 75.8050766,91.904 C74.6744043,88.7239841 74.0737436,85.1906861 74.0030766,81.304 L86.0870766,81.304 C85.9457426,85.8266893 87.0587315,89.5896517 89.4260766,92.593 C91.7934218,95.5963483 95.3443863,97.098 100.079077,97.098 C104.107097,97.098 107.481396,95.9496782 110.202077,93.653 C112.922757,91.3563219 114.283077,88.0880212 114.283077,83.848 C114.283077,80.9506522 113.717749,78.6540085 112.587077,76.958 C111.456404,75.2619915 109.972419,73.9723378 108.135077,73.089 C106.297734,72.2056623 104.230755,71.6580011 101.934077,71.446 C99.6373985,71.2339989 97.2877553,71.163333 94.8850766,71.234 L94.8850766,62.224 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-4": {
"title": "$:/core/images/heading-4",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-4 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8,30 L21.25,30 L21.25,60.104 L55.7,60.104 L55.7,30 L68.95,30 L68.95,105.684 L55.7,105.684 L55.7,71.552 L21.25,71.552 L21.25,105.684 L8,105.684 L8,30 Z M84.5890766,78.548 L107.061077,78.548 L107.061077,45.9 L106.849077,45.9 L84.5890766,78.548 Z M128.049077,88.088 L118.509077,88.088 L118.509077,105.684 L107.061077,105.684 L107.061077,88.088 L75.2610766,88.088 L75.2610766,76.11 L107.061077,31.484 L118.509077,31.484 L118.509077,78.548 L128.049077,78.548 L128.049077,88.088 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-5": {
"title": "$:/core/images/heading-5",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-5 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M83.7550766,31.484 L122.127077,31.484 L122.127077,42.296 L92.7650766,42.296 L88.9490766,61.164 L89.1610766,61.376 C90.7864181,59.5386575 92.8533974,58.1430048 95.3620766,57.189 C97.8707558,56.2349952 100.361731,55.758 102.835077,55.758 C106.509762,55.758 109.795729,56.3763272 112.693077,57.613 C115.590424,58.8496729 118.0284,60.5809889 120.007077,62.807 C121.985753,65.0330111 123.487405,67.6653181 124.512077,70.704 C125.536748,73.7426819 126.049077,77.028649 126.049077,80.562 C126.049077,83.5300148 125.572081,86.5863176 124.618077,89.731 C123.664072,92.8756824 122.144754,95.7376538 120.060077,98.317 C117.9754,100.896346 115.30776,103.016325 112.057077,104.677 C108.806394,106.337675 104.919766,107.168 100.397077,107.168 C96.7930586,107.168 93.454092,106.691005 90.3800766,105.737 C87.3060613,104.782995 84.6030883,103.35201 82.2710766,101.444 C79.939065,99.5359905 78.0840835,97.1863473 76.7060766,94.395 C75.3280697,91.6036527 74.5684107,88.3353521 74.4270766,84.59 L86.5110766,84.59 C86.8644117,88.6180201 88.2423979,91.7096559 90.6450766,93.865 C93.0477553,96.0203441 96.2277235,97.098 100.185077,97.098 C102.729089,97.098 104.884401,96.6740042 106.651077,95.826 C108.417752,94.9779958 109.848738,93.8120074 110.944077,92.328 C112.039415,90.8439926 112.816741,89.1126766 113.276077,87.134 C113.735412,85.1553234 113.965077,83.0353446 113.965077,80.774 C113.965077,78.7246564 113.682413,76.763676 113.117077,74.891 C112.55174,73.018324 111.703749,71.3753404 110.573077,69.962 C109.442404,68.5486596 107.976086,67.4180042 106.174077,66.57 C104.372068,65.7219958 102.269755,65.298 99.8670766,65.298 C97.3230639,65.298 94.9380878,65.7749952 92.7120766,66.729 C90.4860655,67.6830048 88.8784149,69.4673203 87.8890766,72.082 L75.8050766,72.082 L83.7550766,31.484 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-6": {
"title": "$:/core/images/heading-6",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-6 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M112.587077,50.246 C112.304409,47.2073181 111.226753,44.751676 109.354077,42.879 C107.481401,41.006324 104.955093,40.07 101.775077,40.07 C99.584399,40.07 97.6940846,40.4763293 96.1040766,41.289 C94.5140687,42.1016707 93.1714154,43.1793266 92.0760766,44.522 C90.9807378,45.8646734 90.0974133,47.401658 89.4260766,49.133 C88.7547399,50.864342 88.2070787,52.6839905 87.7830766,54.592 C87.3590745,56.5000095 87.0587442,58.390324 86.8820766,60.263 C86.7054091,62.135676 86.5464107,63.8846585 86.4050766,65.51 L86.6170766,65.722 C88.2424181,62.7539852 90.4860623,60.5456739 93.3480766,59.097 C96.2100909,57.6483261 99.3017267,56.924 102.623077,56.924 C106.297762,56.924 109.583729,57.5599936 112.481077,58.832 C115.378424,60.1040064 117.834067,61.8529889 119.848077,64.079 C121.862087,66.3050111 123.399071,68.9373181 124.459077,71.976 C125.519082,75.0146819 126.049077,78.300649 126.049077,81.834 C126.049077,85.438018 125.466082,88.7769846 124.300077,91.851 C123.134071,94.9250154 121.455754,97.6103219 119.265077,99.907 C117.074399,102.203678 114.459758,103.987994 111.421077,105.26 C108.382395,106.532006 105.025762,107.168 101.351077,107.168 C95.9097161,107.168 91.4400941,106.16101 87.9420766,104.147 C84.4440591,102.13299 81.6880867,99.3770175 79.6740766,95.879 C77.6600666,92.3809825 76.2644138,88.2823568 75.4870766,83.583 C74.7097394,78.8836432 74.3210766,73.8133605 74.3210766,68.372 C74.3210766,63.9199777 74.7980719,59.4326893 75.7520766,54.91 C76.7060814,50.3873107 78.278399,46.2710186 80.4690766,42.561 C82.6597542,38.8509815 85.5393921,35.8300117 89.1080766,33.498 C92.6767611,31.1659883 97.0757171,30 102.305077,30 C105.273091,30 108.064397,30.4946617 110.679077,31.484 C113.293756,32.4733383 115.608067,33.8513245 117.622077,35.618 C119.636087,37.3846755 121.27907,39.5046543 122.551077,41.978 C123.823083,44.4513457 124.529743,47.2073181 124.671077,50.246 L112.587077,50.246 Z M100.927077,97.098 C103.117754,97.098 105.025735,96.6563378 106.651077,95.773 C108.276418,94.8896623 109.636738,93.7413404 110.732077,92.328 C111.827415,90.9146596 112.640074,89.271676 113.170077,87.399 C113.700079,85.526324 113.965077,83.6006766 113.965077,81.622 C113.965077,79.6433234 113.700079,77.7353425 113.170077,75.898 C112.640074,74.0606575 111.827415,72.4530069 110.732077,71.075 C109.636738,69.6969931 108.276418,68.5840042 106.651077,67.736 C105.025735,66.8879958 103.117754,66.464 100.927077,66.464 C98.736399,66.464 96.8107516,66.8703293 95.1500766,67.683 C93.4894017,68.4956707 92.0937489,69.5909931 90.9630766,70.969 C89.8324043,72.3470069 88.9844128,73.9546575 88.4190766,75.792 C87.8537405,77.6293425 87.5710766,79.5726564 87.5710766,81.622 C87.5710766,83.6713436 87.8537405,85.6146575 88.4190766,87.452 C88.9844128,89.2893425 89.8324043,90.9323261 90.9630766,92.381 C92.0937489,93.8296739 93.4894017,94.9779958 95.1500766,95.826 C96.8107516,96.6740042 98.736399,97.098 100.927077,97.098 L100.927077,97.098 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/help": {
"title": "$:/core/images/help",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-help tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M36.0548906,111.44117 C30.8157418,115.837088 20.8865444,118.803477 9.5,118.803477 C7.86465619,118.803477 6.25937294,118.742289 4.69372699,118.624467 C12.612543,115.984876 18.7559465,110.02454 21.0611049,102.609942 C8.74739781,92.845129 1.04940554,78.9359851 1.04940554,63.5 C1.04940554,33.9527659 29.2554663,10 64.0494055,10 C98.8433448,10 127.049406,33.9527659 127.049406,63.5 C127.049406,93.0472341 98.8433448,117 64.0494055,117 C53.9936953,117 44.48824,114.999337 36.0548906,111.44117 L36.0548906,111.44117 Z M71.4042554,77.5980086 C71.406883,77.2865764 71.4095079,76.9382011 71.4119569,76.5610548 C71.4199751,75.3262169 71.4242825,74.0811293 71.422912,72.9158546 C71.4215244,71.736154 71.4143321,70.709635 71.4001396,69.8743525 C71.4078362,68.5173028 71.9951951,67.7870427 75.1273009,65.6385471 C75.2388969,65.5619968 76.2124091,64.8981068 76.5126553,64.6910879 C79.6062455,62.5580654 81.5345849,60.9050204 83.2750652,58.5038955 C85.6146327,55.2762841 86.8327108,51.426982 86.8327108,46.8554323 C86.8327108,33.5625756 76.972994,24.9029551 65.3778484,24.9029551 C54.2752771,24.9029551 42.8794554,34.5115163 41.3121702,47.1975534 C40.9043016,50.4989536 43.2499725,53.50591 46.5513726,53.9137786 C49.8527728,54.3216471 52.8597292,51.9759763 53.2675978,48.6745761 C54.0739246,42.1479456 60.2395837,36.9492759 65.3778484,36.9492759 C70.6427674,36.9492759 74.78639,40.5885487 74.78639,46.8554323 C74.78639,50.4892974 73.6853224,52.008304 69.6746221,54.7736715 C69.4052605,54.9593956 68.448509,55.6118556 68.3131127,55.7047319 C65.6309785,57.5445655 64.0858213,58.803255 62.6123358,60.6352315 C60.5044618,63.2559399 59.3714208,66.3518252 59.3547527,69.9487679 C59.3684999,70.8407274 59.3752803,71.8084521 59.3765995,72.9300232 C59.3779294,74.0607297 59.3737237,75.2764258 59.36589,76.482835 C59.3634936,76.8518793 59.3609272,77.1924914 59.3583633,77.4963784 C59.3568319,77.6778944 59.3556368,77.8074256 59.3549845,77.8730928 C59.3219814,81.1994287 61.9917551,83.9227111 65.318091,83.9557142 C68.644427,83.9887173 71.3677093,81.3189435 71.4007124,77.9926076 C71.4014444,77.9187458 71.402672,77.7856841 71.4042554,77.5980086 Z M65.3778489,102.097045 C69.5359735,102.097045 72.9067994,98.7262189 72.9067994,94.5680944 C72.9067994,90.4099698 69.5359735,87.0391439 65.3778489,87.0391439 C61.2197243,87.0391439 57.8488984,90.4099698 57.8488984,94.5680944 C57.8488984,98.7262189 61.2197243,102.097045 65.3778489,102.097045 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/home-button": {
"title": "$:/core/images/home-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-home-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M112.9847,119.501583 C112.99485,119.336814 113,119.170705 113,119.003406 L113,67.56802 C116.137461,70.5156358 121.076014,70.4518569 124.133985,67.3938855 C127.25818,64.2696912 127.260618,59.2068102 124.131541,56.0777326 L70.3963143,2.34250601 C68.8331348,0.779326498 66.7828947,-0.000743167069 64.7337457,1.61675364e-05 C62.691312,-0.00409949529 60.6426632,0.777559815 59.077717,2.34250601 L33,28.420223 L33,28.420223 L33,8.00697327 C33,3.58484404 29.4092877,0 25,0 C20.581722,0 17,3.59075293 17,8.00697327 L17,44.420223 L5.3424904,56.0777326 C2.21694607,59.2032769 2.22220878,64.2760483 5.34004601,67.3938855 C8.46424034,70.5180798 13.5271213,70.5205187 16.6561989,67.3914411 L17,67.04764 L17,119.993027 C17,119.994189 17.0000002,119.995351 17.0000007,119.996514 C17.0000002,119.997675 17,119.998838 17,120 C17,124.418278 20.5881049,128 24.9992458,128 L105.000754,128 C109.418616,128 113,124.409288 113,120 C113,119.832611 112.99485,119.666422 112.9847,119.501583 Z M97,112 L97,51.5736087 L97,51.5736087 L64.7370156,19.3106244 L33,51.04764 L33,112 L97,112 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/import-button": {
"title": "$:/core/images/import-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-import-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M105.449437,94.2138951 C105.449437,94.2138951 110.049457,94.1897106 110.049457,99.4026111 C110.049457,104.615512 105.163246,104.615511 105.163246,104.615511 L45.0075072,105.157833 C45.0075072,105.157833 0.367531803,106.289842 0.367532368,66.6449212 C0.367532934,27.0000003 45.0428249,27.0000003 45.0428249,27.0000003 L105.532495,27.0000003 C105.532495,27.0000003 138.996741,25.6734987 138.996741,55.1771866 C138.996741,84.6808745 105.727102,82.8457535 105.727102,82.8457535 L56.1735087,82.8457535 C56.1735087,82.8457535 22.6899229,85.1500223 22.6899229,66.0913753 C22.6899229,47.0327282 56.1735087,49.3383013 56.1735087,49.3383013 L105.727102,49.3383013 C105.727102,49.3383013 111.245209,49.3383024 111.245209,54.8231115 C111.245209,60.3079206 105.727102,60.5074524 105.727102,60.5074524 L56.1735087,60.5074524 C56.1735087,60.5074524 37.48913,60.5074528 37.48913,66.6449195 C37.48913,72.7823862 56.1735087,71.6766023 56.1735087,71.6766023 L105.727102,71.6766029 C105.727102,71.6766029 127.835546,73.1411469 127.835546,55.1771866 C127.835546,35.5304025 105.727102,38.3035317 105.727102,38.3035317 L45.0428249,38.3035317 C45.0428249,38.3035317 11.5287276,38.3035313 11.5287276,66.6449208 C11.5287276,94.9863103 45.0428244,93.9579678 45.0428244,93.9579678 L105.449437,94.2138951 Z\" transform=\"translate(69.367532, 66.000000) rotate(-45.000000) translate(-69.367532, -66.000000) \"></path>\n </g>\n</svg>"
},
"$:/core/images/info-button": {
"title": "$:/core/images/info-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-info-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <g transform=\"translate(0.049406, 0.000000)\">\n <path d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z\"></path>\n <circle cx=\"64\" cy=\"32\" r=\"8\"></circle>\n <rect x=\"56\" y=\"48\" width=\"16\" height=\"56\" rx=\"8\"></rect>\n </g>\n </g>\n</svg>"
},
"$:/core/images/italic": {
"title": "$:/core/images/italic",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-italic tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <polygon points=\"66.7114846 0 89.1204482 0 62.4089636 128 40 128\"></polygon>\n </g>\n</svg>"
},
"$:/core/images/left-arrow": {
"created": "20150315234410875",
"modified": "20150315235324760",
"tags": "$:/tags/Image",
"title": "$:/core/images/left-arrow",
"text": "<svg class=\"tc-image-left-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path transform=\"rotate(135, 63.8945, 64.1752)\" d=\"m109.07576,109.35336c-1.43248,1.43361 -3.41136,2.32182 -5.59717,2.32182l-79.16816,0c-4.36519,0 -7.91592,-3.5444 -7.91592,-7.91666c0,-4.36337 3.54408,-7.91667 7.91592,-7.91667l71.25075,0l0,-71.25075c0,-4.3652 3.54442,-7.91592 7.91667,-7.91592c4.36336,0 7.91667,3.54408 7.91667,7.91592l0,79.16815c0,2.1825 -0.88602,4.16136 -2.3185,5.59467l-0.00027,-0.00056z\"/>\n</svg>\n"
},
"$:/core/images/line-width": {
"title": "$:/core/images/line-width",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-line-width tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M128,-97 L112.992786,-97 C112.452362,-97 112,-96.5522847 112,-96 C112,-95.4438648 112.444486,-95 112.992786,-95 L128,-95 L128,-97 Z M128,-78.6794919 L111.216185,-88.3696322 C110.748163,-88.6398444 110.132549,-88.4782926 109.856406,-88 C109.578339,-87.5183728 109.741342,-86.9117318 110.216185,-86.6375814 L128,-76.3700908 L128,-78.6794919 Z M78.6794919,-128 L88.3696322,-111.216185 C88.6437826,-110.741342 88.4816272,-110.134474 88,-109.856406 C87.5217074,-109.580264 86.9077936,-109.748163 86.6375814,-110.216185 L76.3700908,-128 L78.6794919,-128 Z M97,-128 L97,-112.992786 C97,-112.444486 96.5561352,-112 96,-112 C95.4477153,-112 95,-112.452362 95,-112.992786 L95,-128 L97,-128 Z M115.629909,-128 L105.362419,-110.216185 C105.088268,-109.741342 104.481627,-109.578339 104,-109.856406 C103.521707,-110.132549 103.360156,-110.748163 103.630368,-111.216185 L113.320508,-128 L115.629909,-128 Z M128,-113.320508 L111.216185,-103.630368 C110.741342,-103.356217 110.134474,-103.518373 109.856406,-104 C109.580264,-104.478293 109.748163,-105.092206 110.216185,-105.362419 L128,-115.629909 L128,-113.320508 Z M48,-96 C48,-96.5522847 48.4523621,-97 48.9927864,-97 L79.0072136,-97 C79.5555144,-97 80,-96.5561352 80,-96 C80,-95.4477153 79.5476379,-95 79.0072136,-95 L48.9927864,-95 C48.4444856,-95 48,-95.4438648 48,-96 Z M54.4307806,-120 C54.706923,-120.478293 55.3225377,-120.639844 55.7905589,-120.369632 L81.7838153,-105.362419 C82.2586577,-105.088268 82.4216611,-104.481627 82.1435935,-104 C81.8674512,-103.521707 81.2518365,-103.360156 80.7838153,-103.630368 L54.7905589,-118.637581 C54.3157165,-118.911732 54.152713,-119.518373 54.4307806,-120 Z M104,-82.1435935 C104.478293,-82.4197359 105.092206,-82.2518365 105.362419,-81.7838153 L120.369632,-55.7905589 C120.643783,-55.3157165 120.481627,-54.7088482 120,-54.4307806 C119.521707,-54.1546382 118.907794,-54.3225377 118.637581,-54.7905589 L103.630368,-80.7838153 C103.356217,-81.2586577 103.518373,-81.865526 104,-82.1435935 Z M96,-80 C96.5522847,-80 97,-79.5476379 97,-79.0072136 L97,-48.9927864 C97,-48.4444856 96.5561352,-48 96,-48 C95.4477153,-48 95,-48.4523621 95,-48.9927864 L95,-79.0072136 C95,-79.5555144 95.4438648,-80 96,-80 Z M88,-82.1435935 C88.4782926,-81.8674512 88.6398444,-81.2518365 88.3696322,-80.7838153 L73.3624186,-54.7905589 C73.0882682,-54.3157165 72.4816272,-54.152713 72,-54.4307806 C71.5217074,-54.706923 71.3601556,-55.3225377 71.6303678,-55.7905589 L86.6375814,-81.7838153 C86.9117318,-82.2586577 87.5183728,-82.4216611 88,-82.1435935 Z M82.1435935,-88 C82.4197359,-87.5217074 82.2518365,-86.9077936 81.7838153,-86.6375814 L55.7905589,-71.6303678 C55.3157165,-71.3562174 54.7088482,-71.5183728 54.4307806,-72 C54.1546382,-72.4782926 54.3225377,-73.0922064 54.7905589,-73.3624186 L80.7838153,-88.3696322 C81.2586577,-88.6437826 81.865526,-88.4816272 82.1435935,-88 Z M1.30626177e-08,-41.9868843 L15.0170091,-57.9923909 L20.7983821,-52.9749272 L44.7207091,-81.2095939 L73.4260467,-42.1002685 L85.984793,-56.6159488 L104.48741,-34.0310661 L127.969109,-47.4978019 L127.969109,7.99473128e-07 L1.30626177e-08,7.99473128e-07 L1.30626177e-08,-41.9868843 Z M96,-84 C102.627417,-84 108,-89.372583 108,-96 C108,-102.627417 102.627417,-108 96,-108 C89.372583,-108 84,-102.627417 84,-96 C84,-89.372583 89.372583,-84 96,-84 Z\"></path>\n <path d=\"M16,18 L112,18 C113.104569,18 114,17.1045695 114,16 C114,14.8954305 113.104569,14 112,14 L16,14 C14.8954305,14 14,14.8954305 14,16 C14,17.1045695 14.8954305,18 16,18 L16,18 Z M16,35 L112,35 C114.209139,35 116,33.209139 116,31 C116,28.790861 114.209139,27 112,27 L16,27 C13.790861,27 12,28.790861 12,31 C12,33.209139 13.790861,35 16,35 L16,35 Z M16,56 L112,56 C115.313708,56 118,53.3137085 118,50 C118,46.6862915 115.313708,44 112,44 L16,44 C12.6862915,44 10,46.6862915 10,50 C10,53.3137085 12.6862915,56 16,56 L16,56 Z M16,85 L112,85 C117.522847,85 122,80.5228475 122,75 C122,69.4771525 117.522847,65 112,65 L16,65 C10.4771525,65 6,69.4771525 6,75 C6,80.5228475 10.4771525,85 16,85 L16,85 Z M16,128 L112,128 C120.836556,128 128,120.836556 128,112 C128,103.163444 120.836556,96 112,96 L16,96 C7.163444,96 0,103.163444 0,112 C0,120.836556 7.163444,128 16,128 L16,128 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/link": {
"title": "$:/core/images/link",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-link tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M128.719999,57.568543 C130.219553,53.8628171 131.045202,49.8121445 131.045202,45.5685425 C131.045202,27.8915447 116.718329,13.5685425 99.0452364,13.5685425 L67.0451674,13.5685425 C49.3655063,13.5685425 35.0452019,27.8954305 35.0452019,45.5685425 C35.0452019,63.2455403 49.3720745,77.5685425 67.0451674,77.5685425 L99.0452364,77.5685425 C100.406772,77.5685425 101.748384,77.4835732 103.065066,77.3186499 C96.4792444,73.7895096 91.1190212,68.272192 87.7873041,61.5685425 L67.0506214,61.5685425 C58.2110723,61.5685425 51.0452019,54.4070414 51.0452019,45.5685425 C51.0452019,36.7319865 58.2005234,29.5685425 67.0506214,29.5685425 L99.0397824,29.5685425 C107.879331,29.5685425 115.045202,36.7300436 115.045202,45.5685425 C115.045202,48.9465282 113.99957,52.0800164 112.21335,54.6623005 C114.314383,56.4735917 117.050039,57.5685425 120.041423,57.5685425 L128.720003,57.5685425 Z\" transform=\"translate(83.045202, 45.568542) rotate(-225.000000) translate(-83.045202, -45.568542)\"></path>\n <path d=\"M-0.106255113,71.0452019 C-1.60580855,74.7509276 -2.43145751,78.8016001 -2.43145751,83.0452019 C-2.43145751,100.7222 11.8954151,115.045202 29.568508,115.045202 L61.568577,115.045202 C79.2482381,115.045202 93.5685425,100.718314 93.5685425,83.0452019 C93.5685425,65.3682041 79.2416699,51.0452019 61.568577,51.0452019 L29.568508,51.0452019 C28.206973,51.0452019 26.8653616,51.1301711 25.5486799,51.2950943 C32.1345,54.8242347 37.4947231,60.3415524 40.8264403,67.0452019 L61.563123,67.0452019 C70.4026721,67.0452019 77.5685425,74.206703 77.5685425,83.0452019 C77.5685425,91.8817579 70.413221,99.0452019 61.563123,99.0452019 L29.573962,99.0452019 C20.7344129,99.0452019 13.5685425,91.8837008 13.5685425,83.0452019 C13.5685425,79.6672162 14.6141741,76.533728 16.4003949,73.9514439 C14.2993609,72.1401527 11.5637054,71.0452019 8.5723215,71.0452019 L-0.106255113,71.0452019 Z\" transform=\"translate(45.568542, 83.045202) rotate(-225.000000) translate(-45.568542, -83.045202)\"></path>\n </g>\n</svg>"
},
"$:/core/images/list-bullet": {
"title": "$:/core/images/list-bullet",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-list-bullet tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M11.6363636,40.2727273 C18.0629498,40.2727273 23.2727273,35.0629498 23.2727273,28.6363636 C23.2727273,22.2097775 18.0629498,17 11.6363636,17 C5.20977746,17 0,22.2097775 0,28.6363636 C0,35.0629498 5.20977746,40.2727273 11.6363636,40.2727273 Z M11.6363636,75.1818182 C18.0629498,75.1818182 23.2727273,69.9720407 23.2727273,63.5454545 C23.2727273,57.1188684 18.0629498,51.9090909 11.6363636,51.9090909 C5.20977746,51.9090909 0,57.1188684 0,63.5454545 C0,69.9720407 5.20977746,75.1818182 11.6363636,75.1818182 Z M11.6363636,110.090909 C18.0629498,110.090909 23.2727273,104.881132 23.2727273,98.4545455 C23.2727273,92.0279593 18.0629498,86.8181818 11.6363636,86.8181818 C5.20977746,86.8181818 0,92.0279593 0,98.4545455 C0,104.881132 5.20977746,110.090909 11.6363636,110.090909 Z M34.9090909,22.8181818 L128,22.8181818 L128,34.4545455 L34.9090909,34.4545455 L34.9090909,22.8181818 Z M34.9090909,57.7272727 L128,57.7272727 L128,69.3636364 L34.9090909,69.3636364 L34.9090909,57.7272727 Z M34.9090909,92.6363636 L128,92.6363636 L128,104.272727 L34.9090909,104.272727 L34.9090909,92.6363636 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/list-number": {
"title": "$:/core/images/list-number",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-list-number tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M33.8390805,22.3563218 L128,22.3563218 L128,34.1264368 L33.8390805,34.1264368 L33.8390805,22.3563218 Z M33.8390805,57.6666667 L128,57.6666667 L128,69.4367816 L33.8390805,69.4367816 L33.8390805,57.6666667 Z M33.8390805,92.9770115 L128,92.9770115 L128,104.747126 L33.8390805,104.747126 L33.8390805,92.9770115 Z M0.379509711,42.6307008 L0.379509711,40.4082314 L1.37821948,40.4082314 C2.20382368,40.4082314 2.82301754,40.268077 3.23581964,39.9877642 C3.64862174,39.7074513 3.85501969,39.0400498 3.85501969,37.9855395 L3.85501969,22.7686318 C3.85501969,21.3270228 3.66193774,20.4327047 3.27576803,20.0856507 C2.88959832,19.7385967 1.79768657,19.5650723 0,19.5650723 L0,17.4226919 C3.50215975,17.2758613 6.25191314,16.4683055 8.24934266,15 L10.3666074,15 L10.3666074,37.865406 C10.3666074,38.786434 10.5164123,39.4404875 10.8160268,39.8275862 C11.1156412,40.2146849 11.764796,40.4082314 12.7635108,40.4082314 L13.7622206,40.4082314 L13.7622206,42.6307008 L0.379509711,42.6307008 Z M0.0798967812,77.9873934 L0.0798967812,76.0852799 C7.27064304,69.5312983 10.8659622,63.5046623 10.8659622,58.005191 C10.8659622,56.4434479 10.5397203,55.195407 9.88722667,54.2610308 C9.23473303,53.3266546 8.36253522,52.8594735 7.27060709,52.8594735 C6.3784219,52.8594735 5.61608107,53.1764892 4.98356173,53.8105302 C4.35104238,54.4445712 4.03478745,55.1753759 4.03478745,56.0029663 C4.03478745,56.9773871 4.28113339,57.8316611 4.77383268,58.5658139 C4.88036225,58.7259926 4.93362624,58.8461249 4.93362624,58.9262143 C4.93362624,59.0730449 4.77383427,59.2065252 4.45424555,59.3266593 C4.2411864,59.4067486 3.70188852,59.6336652 2.83633573,60.0074156 C1.99741533,60.3811661 1.47809145,60.5680386 1.2783485,60.5680386 C1.03865696,60.5680386 0.765679018,60.1976307 0.459406492,59.4568039 C0.153133966,58.715977 0,57.9184322 0,57.0641453 C0,55.1153036 0.848894811,53.5202138 2.5467099,52.2788283 C4.24452499,51.0374428 6.34512352,50.4167594 8.84856852,50.4167594 C11.3120649,50.4167594 13.3793735,51.0874979 15.0505562,52.4289952 C16.7217389,53.7704924 17.5573177,55.5224215 17.5573177,57.684835 C17.5573177,58.9662652 17.2743527,60.2076321 16.7084144,61.4089729 C16.142476,62.6103138 14.7875733,64.4623531 12.6436656,66.9651465 C10.4997579,69.4679398 8.40914641,71.7804862 6.3717683,73.902855 L17.8169822,73.902855 L16.7982982,79.6292176 L14.6810335,79.6292176 C14.7609307,79.3489048 14.8008787,79.0952922 14.8008787,78.8683723 C14.8008787,78.4812736 14.7010087,78.237672 14.5012658,78.1375603 C14.3015228,78.0374485 13.9020429,77.9873934 13.3028141,77.9873934 L0.0798967812,77.9873934 Z M12.2042333,97.1935484 C13.9486551,97.2335931 15.4400468,97.8309175 16.6784531,98.9855395 C17.9168594,100.140162 18.5360532,101.75861 18.5360532,103.840934 C18.5360532,106.830938 17.4041935,109.233584 15.14044,111.048943 C12.8766866,112.864303 10.1402492,113.771969 6.93104577,113.771969 C4.92030005,113.771969 3.26245842,113.388213 1.95747114,112.62069 C0.652483855,111.853166 0,110.848727 0,109.607341 C0,108.833144 0.26964894,108.209124 0.808954909,107.735261 C1.34826088,107.261399 1.93749375,107.024472 2.57667119,107.024472 C3.21584864,107.024472 3.73850152,107.224692 4.14464552,107.625139 C4.55078953,108.025586 4.92696644,108.67964 5.27318756,109.587319 C5.73925445,110.855401 6.51158227,111.489433 7.59019421,111.489433 C8.85523291,111.489433 9.87723568,111.012241 10.6562332,110.057842 C11.4352307,109.103444 11.8247236,107.371536 11.8247236,104.862069 C11.8247236,103.153495 11.7048796,101.838714 11.4651881,100.917686 C11.2254966,99.9966584 10.6728827,99.5361513 9.80732989,99.5361513 C9.22141723,99.5361513 8.62219737,99.843156 8.00965231,100.457175 C7.51695303,100.951059 7.07752513,101.197998 6.69135542,101.197998 C6.3584505,101.197998 6.08880156,101.051169 5.88240051,100.757508 C5.67599946,100.463847 5.57280049,100.183539 5.57280049,99.916574 C5.57280049,99.5962164 5.67599946,99.3225818 5.88240051,99.0956618 C6.08880156,98.8687419 6.57150646,98.5016711 7.33052967,97.9944383 C10.2068282,96.0722929 11.6449559,93.9766521 11.6449559,91.7074527 C11.6449559,90.5194601 11.3386879,89.615131 10.7261429,88.9944383 C10.1135978,88.3737455 9.37455999,88.0634038 8.5090072,88.0634038 C7.71003539,88.0634038 6.98431355,88.3270274 6.33181991,88.8542825 C5.67932627,89.3815377 5.35308434,90.0122321 5.35308434,90.7463849 C5.35308434,91.3871 5.60608828,91.9810874 6.11210376,92.5283648 C6.28521432,92.7285883 6.3717683,92.8954387 6.3717683,93.028921 C6.3717683,93.1490551 5.80250943,93.4560598 4.6639746,93.9499444 C3.52543978,94.4438289 2.80970494,94.6907675 2.51674861,94.6907675 C2.10394651,94.6907675 1.76771758,94.3570667 1.50805174,93.6896552 C1.24838591,93.0222436 1.11855494,92.4082342 1.11855494,91.8476085 C1.11855494,90.0989901 2.04734573,88.6240327 3.90495518,87.4226919 C5.76256463,86.2213511 7.86982116,85.6206897 10.226788,85.6206897 C12.2907985,85.6206897 14.0784711,86.0678487 15.5898594,86.9621802 C17.1012478,87.8565117 17.8569306,89.0778566 17.8569306,90.6262514 C17.8569306,91.987771 17.2876717,93.2491599 16.1491369,94.4104561 C15.0106021,95.5717522 13.6956474,96.4994404 12.2042333,97.1935484 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/locked-padlock": {
"title": "$:/core/images/locked-padlock",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-locked-padlock tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M96.4723753,64 L105,64 L105,96.0097716 C105,113.673909 90.6736461,128 73.001193,128 L55.998807,128 C38.3179793,128 24,113.677487 24,96.0097716 L24,64 L32.0000269,64 C32.0028554,48.2766389 32.3030338,16.2688026 64.1594984,16.2688041 C95.9543927,16.2688056 96.4648869,48.325931 96.4723753,64 Z M80.5749059,64 L48.4413579,64 C48.4426205,47.71306 48.5829272,31.9999996 64.1595001,31.9999996 C79.8437473,31.9999996 81.1369461,48.1359182 80.5749059,64 Z M67.7315279,92.3641717 C70.8232551,91.0923621 73,88.0503841 73,84.5 C73,79.8055796 69.1944204,76 64.5,76 C59.8055796,76 56,79.8055796 56,84.5 C56,87.947435 58.0523387,90.9155206 61.0018621,92.2491029 L55.9067479,115.020857 L72.8008958,115.020857 L67.7315279,92.3641717 L67.7315279,92.3641717 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/mail": {
"title": "$:/core/images/mail",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mail tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M122.826782,104.894066 C121.945525,105.22777 120.990324,105.41043 119.993027,105.41043 L8.00697327,105.41043 C7.19458381,105.41043 6.41045219,105.289614 5.67161357,105.064967 L5.67161357,105.064967 L39.8346483,70.9019325 L60.6765759,91.7438601 C61.6118278,92.679112 62.8865166,93.0560851 64.0946097,92.8783815 C65.2975108,93.0473238 66.5641085,92.6696979 67.4899463,91.7438601 L88.5941459,70.6396605 C88.6693095,70.7292352 88.7490098,70.8162939 88.8332479,70.9005321 L122.826782,104.894066 Z M127.903244,98.6568194 C127.966933,98.2506602 128,97.8343714 128,97.4103789 L128,33.410481 C128,32.7414504 127.917877,32.0916738 127.763157,31.4706493 L94.2292399,65.0045665 C94.3188145,65.0797417 94.4058701,65.1594458 94.4901021,65.2436778 L127.903244,98.6568194 Z M0.205060636,99.2178117 C0.0709009529,98.6370366 0,98.0320192 0,97.4103789 L0,33.410481 C0,32.694007 0.0944223363,31.9995312 0.27147538,31.3387595 L0.27147538,31.3387595 L34.1777941,65.2450783 L0.205060636,99.2178117 L0.205060636,99.2178117 Z M5.92934613,25.6829218 C6.59211333,25.5051988 7.28862283,25.4104299 8.00697327,25.4104299 L119.993027,25.4104299 C120.759109,25.4104299 121.500064,25.5178649 122.201605,25.7184927 L122.201605,25.7184927 L64.0832611,83.8368368 L5.92934613,25.6829218 L5.92934613,25.6829218 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/menu-button": {
"title": "$:/core/images/menu-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-menu-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <rect x=\"0\" y=\"16\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"56\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"96\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n</svg>"
},
"$:/core/images/mono-block": {
"title": "$:/core/images/mono-block",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mono-block tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M23.9653488,32.9670593 L24.3217888,32.9670593 C25.0766067,32.9670593 25.6497006,33.1592554 26.0410876,33.5436534 C26.4324747,33.9280514 26.6281653,34.4906619 26.6281653,35.2315017 C26.6281653,36.0562101 26.4219913,36.6502709 26.009637,37.0137017 C25.5972828,37.3771326 24.9158602,37.5588453 23.9653488,37.5588453 L17.6542639,37.5588453 C16.6897744,37.5588453 16.0048573,37.380627 15.5994921,37.0241852 C15.1941269,36.6677435 14.9914474,36.0701882 14.9914474,35.2315017 C14.9914474,34.4207713 15.1941269,33.8406885 15.5994921,33.4912358 C16.0048573,33.141783 16.6897744,32.9670593 17.6542639,32.9670593 L18.388111,32.9670593 L17.5284616,30.5139133 L8.47069195,30.5139133 L7.5691084,32.9670593 L8.30295547,32.9670593 C9.25346691,32.9670593 9.93488953,33.1452775 10.3472438,33.5017193 C10.759598,33.8581611 10.965772,34.4347494 10.965772,35.2315017 C10.965772,36.0562101 10.759598,36.6502709 10.3472438,37.0137017 C9.93488953,37.3771326 9.25346691,37.5588453 8.30295547,37.5588453 L2.89345418,37.5588453 C1.92896463,37.5588453 1.24404754,37.3771326 0.838682371,37.0137017 C0.433317198,36.6502709 0.230637652,36.0562101 0.230637652,35.2315017 C0.230637652,34.4906619 0.426328248,33.9280514 0.817715312,33.5436534 C1.20910238,33.1592554 1.78219626,32.9670593 2.53701417,32.9670593 L2.89345418,32.9670593 L8.51262607,17.3256331 L6.83526132,17.3256331 C5.88474988,17.3256331 5.20332727,17.1439204 4.79097304,16.7804895 C4.37861882,16.4170587 4.1724448,15.8299869 4.1724448,15.0192565 C4.1724448,14.1945481 4.37861882,13.6004873 4.79097304,13.2370565 C5.20332727,12.8736257 5.88474988,12.691913 6.83526132,12.691913 L14.6979086,12.691913 C15.9419603,12.691913 16.815579,13.3628521 17.318791,14.7047506 L17.318791,14.7676518 L23.9653488,32.9670593 Z M12.9786097,17.3256331 L9.9383861,26.1737321 L16.0188333,26.1737321 L12.9786097,17.3256331 Z M35.3809383,26.6979086 L35.3809383,33.0928616 L38.5259972,33.0928616 C40.7485166,33.0928616 42.3140414,32.8482484 43.2226185,32.3590146 C44.1311956,31.8697807 44.5854773,31.0520736 44.5854773,29.9058686 C44.5854773,28.7456855 44.1521624,27.9209895 43.2855197,27.4317556 C42.4188769,26.9425218 40.9022748,26.6979086 38.7356678,26.6979086 L35.3809383,26.6979086 Z M46.0741385,24.370565 C47.5977525,24.9296893 48.7159844,25.6949794 49.428868,26.666458 C50.1417516,27.6379366 50.498188,28.8784752 50.498188,30.388111 C50.498188,31.6601189 50.1906743,32.8202846 49.5756374,33.8686428 C48.9606006,34.917001 48.0799929,35.7766419 46.933788,36.4475911 C46.2628387,36.8389782 45.5115266,37.1220307 44.6798291,37.296757 C43.8481316,37.4714834 42.6704935,37.5588453 41.1468796,37.5588453 L39.3856466,37.5588453 L30.2020747,37.5588453 C29.2795194,37.5588453 28.6190637,37.3771326 28.2206876,37.0137017 C27.8223114,36.6502709 27.6231264,36.0562101 27.6231264,35.2315017 C27.6231264,34.4906619 27.811828,33.9280514 28.189237,33.5436534 C28.5666459,33.1592554 29.118773,32.9670593 29.8456347,32.9670593 L30.2020747,32.9670593 L30.2020747,17.3256331 L29.8456347,17.3256331 C29.118773,17.3256331 28.5666459,17.1299425 28.189237,16.7385554 C27.811828,16.3471683 27.6231264,15.7740744 27.6231264,15.0192565 C27.6231264,14.2085262 27.8258059,13.6179599 28.2311711,13.24754 C28.6365363,12.8771201 29.2934976,12.691913 30.2020747,12.691913 L39.8469219,12.691913 C42.796303,12.691913 45.0362615,13.2650068 46.5668644,14.4112118 C48.0974674,15.5574168 48.8627574,17.2347648 48.8627574,19.443306 C48.8627574,20.5335986 48.6286276,21.4945792 48.1603609,22.3262767 C47.6920943,23.1579742 46.9966938,23.8393968 46.0741385,24.370565 L46.0741385,24.370565 Z M35.3809383,17.1998307 L35.3809383,22.4835296 L38.2114913,22.4835296 C39.9307988,22.4835296 41.1433816,22.2808501 41.8492761,21.8754849 C42.5551706,21.4701197 42.9081126,20.7852027 42.9081126,19.8207131 C42.9081126,18.912136 42.5901154,18.2481858 41.9541114,17.8288425 C41.3181074,17.4094992 40.2872373,17.1998307 38.8614701,17.1998307 L35.3809383,17.1998307 Z M71.244119,13.3838259 C71.5236812,12.880614 71.8102281,12.5241775 72.1037684,12.3145059 C72.3973087,12.1048342 72.7677231,12 73.2150226,12 C73.8999499,12 74.3856819,12.1817127 74.6722332,12.5451435 C74.9587844,12.9085744 75.1020579,13.5305909 75.1020579,14.4112118 L75.143992,19.8626472 C75.143992,20.8271368 74.9867406,21.4771091 74.6722332,21.8125837 C74.3577257,22.1480584 73.7881263,22.3157932 72.9634178,22.3157932 C72.3763372,22.3157932 71.92555,22.1760142 71.6110425,21.896452 C71.2965351,21.6168898 71.0274605,21.0997075 70.8038107,20.3448896 C70.4403799,19.0169692 69.8602971,18.0629775 69.0635448,17.482886 C68.2667926,16.9027945 67.1625385,16.612753 65.7507494,16.612753 C63.5981206,16.612753 61.9487284,17.3396038 60.8025235,18.7933272 C59.6563185,20.2470506 59.0832246,22.3507245 59.0832246,25.104412 C59.0832246,27.8441215 59.6633074,29.9477954 60.8234905,31.4154969 C61.9836736,32.8831984 63.6400547,33.6170381 65.7926836,33.6170381 C67.2603851,33.6170381 68.878327,33.1278116 70.6465578,32.149344 C72.4147886,31.1708763 73.5295261,30.6816498 73.9908037,30.6816498 C74.53595,30.6816498 74.9937262,30.9122852 75.3641461,31.3735628 C75.734566,31.8348404 75.9197732,32.4079343 75.9197732,33.0928616 C75.9197732,34.3229353 74.836486,35.4831009 72.669879,36.5733935 C70.5032721,37.663686 68.0641285,38.2088241 65.3523753,38.2088241 C61.6901107,38.2088241 58.7267959,36.9997358 56.4623422,34.5815228 C54.1978885,32.1633099 53.0656786,29.0043046 53.0656786,25.104412 C53.0656786,21.3443006 54.2118664,18.22024 56.5042763,15.7321366 C58.7966863,13.2440331 61.7040894,12 65.226573,12 C66.2190187,12 67.1974717,12.1118232 68.1619613,12.3354729 C69.1264508,12.5591227 70.1538264,12.9085702 71.244119,13.3838259 L71.244119,13.3838259 Z M81.4645862,32.9670593 L81.4645862,17.3256331 L81.1081461,17.3256331 C80.3533282,17.3256331 79.7802344,17.1299425 79.3888473,16.7385554 C78.9974602,16.3471683 78.8017696,15.7740744 78.8017696,15.0192565 C78.8017696,14.2085262 79.0114381,13.6179599 79.4307814,13.24754 C79.8501247,12.8771201 80.5280528,12.691913 81.4645862,12.691913 L85.4063933,12.691913 L86.6434498,12.691913 C89.5648747,12.691913 91.7034933,12.8177141 93.0593699,13.06932 C94.4152465,13.320926 95.5684233,13.740263 96.5189347,14.3273436 C98.210286,15.3337675 99.5067362,16.7699967 100.408324,18.6360743 C101.309912,20.5021519 101.7607,22.6582429 101.7607,25.104412 C101.7607,27.6903623 101.247012,29.9512876 100.219621,31.8872557 C99.1922296,33.8232239 97.7350336,35.2874089 95.8479888,36.2798546 C94.9953241,36.7271541 93.9959043,37.0521403 92.8496993,37.2548229 C91.7034944,37.4575055 89.9981906,37.5588453 87.7337369,37.5588453 L85.4063933,37.5588453 L81.4645862,37.5588453 C80.5000966,37.5588453 79.8151795,37.380627 79.4098143,37.0241852 C79.0044492,36.6677435 78.8017696,36.0701882 78.8017696,35.2315017 C78.8017696,34.4906619 78.9974602,33.9280514 79.3888473,33.5436534 C79.7802344,33.1592554 80.3533282,32.9670593 81.1081461,32.9670593 L81.4645862,32.9670593 Z M86.8740874,17.2417648 L86.8740874,32.9670593 L88.0692098,32.9670593 C90.7110725,32.9670593 92.6609895,32.3205814 93.9190194,31.0276063 C95.1770492,29.7346312 95.8060547,27.7462749 95.8060547,25.0624779 C95.8060547,22.4206153 95.1665658,20.4497314 93.8875688,19.1497672 C92.6085718,17.849803 90.6831161,17.1998307 88.1111439,17.1998307 C87.7756693,17.1998307 87.5205727,17.2033252 87.3458463,17.2103142 C87.1711199,17.2173033 87.0138685,17.2277867 86.8740874,17.2417648 L86.8740874,17.2417648 Z M121.94052,17.1159625 L112.190837,17.1159625 L112.190837,22.4835296 L115.88104,22.4835296 L115.88104,22.2319249 C115.88104,21.4351727 116.055763,20.841112 116.405216,20.4497249 C116.754669,20.0583378 117.285829,19.8626472 117.998713,19.8626472 C118.627728,19.8626472 119.141415,20.0408655 119.539792,20.3973072 C119.938168,20.753749 120.137353,21.2045363 120.137353,21.7496826 C120.137353,21.7776388 120.144342,21.8684951 120.15832,22.0222543 C120.172298,22.1760135 120.179287,22.3297704 120.179287,22.4835296 L120.179287,26.8237109 C120.179287,27.7602442 120.011552,28.4311834 119.676077,28.8365486 C119.340603,29.2419138 118.795465,29.4445933 118.040647,29.4445933 C117.327763,29.4445933 116.789614,29.2558917 116.426183,28.8784827 C116.062752,28.5010738 115.88104,27.9419578 115.88104,27.201118 L115.88104,26.8237109 L112.190837,26.8237109 L112.190837,33.0928616 L121.94052,33.0928616 L121.94052,30.5977816 C121.94052,29.6612482 122.118738,28.9903091 122.47518,28.5849439 C122.831622,28.1795787 123.415199,27.9768992 124.225929,27.9768992 C125.022682,27.9768992 125.592281,28.1760842 125.934745,28.5744604 C126.277208,28.9728365 126.448438,29.6472701 126.448438,30.5977816 L126.448438,35.6718099 C126.448438,36.4266278 126.30167,36.9298322 126.008129,37.1814382 C125.714589,37.4330442 125.134506,37.5588453 124.267863,37.5588453 L107.095842,37.5588453 C106.173287,37.5588453 105.512831,37.3771326 105.114455,37.0137017 C104.716079,36.6502709 104.516894,36.0562101 104.516894,35.2315017 C104.516894,34.4906619 104.705595,33.9280514 105.083004,33.5436534 C105.460413,33.1592554 106.01254,32.9670593 106.739402,32.9670593 L107.095842,32.9670593 L107.095842,17.3256331 L106.739402,17.3256331 C106.026518,17.3256331 105.477886,17.126448 105.093488,16.7280719 C104.70909,16.3296957 104.516894,15.7600963 104.516894,15.0192565 C104.516894,14.2085262 104.719573,13.6179599 105.124938,13.24754 C105.530304,12.8771201 106.187265,12.691913 107.095842,12.691913 L124.267863,12.691913 C125.120528,12.691913 125.697116,12.8212085 125.997646,13.0798036 C126.298175,13.3383986 126.448438,13.8520864 126.448438,14.6208824 L126.448438,19.3175037 C126.448438,20.2680151 126.273714,20.9494377 125.924261,21.361792 C125.574808,21.7741462 125.008703,21.9803202 124.225929,21.9803202 C123.415199,21.9803202 122.831622,21.7706517 122.47518,21.3513084 C122.118738,20.9319652 121.94052,20.254037 121.94052,19.3175037 L121.94052,17.1159625 Z M19.7719369,47.6405477 C20.037521,47.1373358 20.3205734,46.7808993 20.6211028,46.5712277 C20.9216322,46.361556 21.295541,46.2567218 21.7428405,46.2567218 C22.4277678,46.2567218 22.9134998,46.4384345 23.2000511,46.8018653 C23.4866023,47.1652962 23.6298758,47.7873127 23.6298758,48.6679336 L23.6718099,54.119369 C23.6718099,55.0838586 23.5145586,55.7338309 23.2000511,56.0693055 C22.8855436,56.4047802 22.3089553,56.572515 21.4702687,56.572515 C20.8831881,56.572515 20.4254119,56.4292415 20.0969263,56.1426902 C19.7684407,55.856139 19.4993662,55.3424512 19.2896945,54.6016114 C18.9122856,53.2597129 18.3322027,52.3022267 17.5494286,51.7291243 C16.7666545,51.1560218 15.6693894,50.8694748 14.2576003,50.8694748 C12.1049715,50.8694748 10.4590738,51.5963256 9.31985785,53.050049 C8.18064193,54.5037724 7.61104252,56.6074463 7.61104252,59.3611338 C7.61104252,62.1148214 8.20859773,64.2429566 9.40372609,65.7456034 C10.5988544,67.2482501 12.2936748,67.9995623 14.488238,67.9995623 C14.9914499,67.9995623 15.5645438,67.9401562 16.2075368,67.8213423 C16.8505299,67.7025283 17.6053364,67.5173212 18.4719792,67.2657152 L18.4719792,63.9529198 L16.1027015,63.9529198 C15.1521901,63.9529198 14.4777564,63.7781961 14.0793803,63.4287433 C13.6810042,63.0792906 13.4818191,62.4992078 13.4818191,61.6884774 C13.4818191,60.8497908 13.6810042,60.2522356 14.0793803,59.8957938 C14.4777564,59.5393521 15.1521901,59.3611338 16.1027015,59.3611338 L23.6718099,59.3611338 C24.6502776,59.3611338 25.3386891,59.5358576 25.7370653,59.8853103 C26.1354414,60.2347631 26.3346265,60.8218348 26.3346265,61.6465433 C26.3346265,62.3873831 26.1354414,62.9569825 25.7370653,63.3553586 C25.3386891,63.7537347 24.7621008,63.9529198 24.0072829,63.9529198 L23.6718099,63.9529198 L23.6718099,68.9430799 L23.6718099,69.1946846 C23.6718099,69.6419841 23.6228873,69.9529924 23.5250405,70.1277188 C23.4271937,70.3024451 23.2315031,70.4806634 22.9379628,70.6623788 C22.1412106,71.1376345 20.8762107,71.5569715 19.1429251,71.9204023 C17.4096396,72.2838332 15.6554131,72.4655459 13.8801932,72.4655459 C10.2179286,72.4655459 7.25461383,71.2564576 4.99016011,68.8382446 C2.72570638,66.4200317 1.59349651,63.2610264 1.59349651,59.3611338 C1.59349651,55.6010224 2.73968428,52.4769618 5.03209423,49.9888583 C7.32450417,47.5007549 10.2319073,46.2567218 13.7543909,46.2567218 C14.7328585,46.2567218 15.7078171,46.368545 16.6792957,46.5921947 C17.6507743,46.8158445 18.6816444,47.165292 19.7719369,47.6405477 L19.7719369,47.6405477 Z M35.611576,51.5823548 L35.611576,56.4047785 L42.4678043,56.4047785 L42.4678043,51.5823548 L42.1323314,51.5823548 C41.3775135,51.5823548 40.8009251,51.3866642 40.402549,50.9952772 C40.0041729,50.6038901 39.8049878,50.0307962 39.8049878,49.2759783 C39.8049878,48.4512699 40.0111618,47.8572091 40.4235161,47.4937783 C40.8358703,47.1303474 41.5172929,46.9486347 42.4678043,46.9486347 L47.8773056,46.9486347 C48.8278171,46.9486347 49.5022507,47.1303474 49.9006269,47.4937783 C50.299003,47.8572091 50.498188,48.4512699 50.498188,49.2759783 C50.498188,50.0307962 50.3059919,50.6038901 49.9215939,50.9952772 C49.5371959,51.3866642 48.9745854,51.5823548 48.2337456,51.5823548 L47.8773056,51.5823548 L47.8773056,67.2237811 L48.2337456,67.2237811 C48.9885636,67.2237811 49.5616574,67.4159772 49.9530445,67.8003752 C50.3444316,68.1847732 50.5401222,68.7473837 50.5401222,69.4882235 C50.5401222,70.3129319 50.3374426,70.9069927 49.9320774,71.2704235 C49.5267123,71.6338543 48.8417952,71.815567 47.8773056,71.815567 L42.4678043,71.815567 C41.5033148,71.815567 40.8183977,71.6373488 40.4130325,71.280907 C40.0076674,70.9244652 39.8049878,70.32691 39.8049878,69.4882235 C39.8049878,68.7473837 40.0041729,68.1847732 40.402549,67.8003752 C40.8009251,67.4159772 41.3775135,67.2237811 42.1323314,67.2237811 L42.4678043,67.2237811 L42.4678043,61.0384986 L35.611576,61.0384986 L35.611576,67.2237811 L35.9470489,67.2237811 C36.7018668,67.2237811 37.2784552,67.4159772 37.6768313,67.8003752 C38.0752074,68.1847732 38.2743925,68.7473837 38.2743925,69.4882235 C38.2743925,70.3129319 38.0682185,70.9069927 37.6558642,71.2704235 C37.24351,71.6338543 36.5620874,71.815567 35.611576,71.815567 L30.2020747,71.815567 C29.2375851,71.815567 28.552668,71.6373488 28.1473029,71.280907 C27.7419377,70.9244652 27.5392581,70.32691 27.5392581,69.4882235 C27.5392581,68.7473837 27.7349487,68.1847732 28.1263358,67.8003752 C28.5177229,67.4159772 29.0908168,67.2237811 29.8456347,67.2237811 L30.2020747,67.2237811 L30.2020747,51.5823548 L29.8456347,51.5823548 C29.1047949,51.5823548 28.5421844,51.3866642 28.1577864,50.9952772 C27.7733884,50.6038901 27.5811923,50.0307962 27.5811923,49.2759783 C27.5811923,48.4512699 27.7803773,47.8572091 28.1787534,47.4937783 C28.5771296,47.1303474 29.2515632,46.9486347 30.2020747,46.9486347 L35.611576,46.9486347 C36.5481093,46.9486347 37.2260374,47.1303474 37.6453807,47.4937783 C38.064724,47.8572091 38.2743925,48.4512699 38.2743925,49.2759783 C38.2743925,50.0307962 38.0752074,50.6038901 37.6768313,50.9952772 C37.2784552,51.3866642 36.7018668,51.5823548 35.9470489,51.5823548 L35.611576,51.5823548 Z M67.365213,51.5823548 L67.365213,67.2237811 L70.887679,67.2237811 C71.8381904,67.2237811 72.519613,67.4019993 72.9319673,67.7584411 C73.3443215,68.1148829 73.5504955,68.6914712 73.5504955,69.4882235 C73.5504955,70.2989538 73.340827,70.8895201 72.9214837,71.25994 C72.5021404,71.6303599 71.8242123,71.815567 70.887679,71.815567 L58.4332458,71.815567 C57.4827343,71.815567 56.8013117,71.6338543 56.3889575,71.2704235 C55.9766033,70.9069927 55.7704292,70.3129319 55.7704292,69.4882235 C55.7704292,68.6774931 55.9731088,68.0974103 56.378474,67.7479575 C56.7838391,67.3985048 57.4687562,67.2237811 58.4332458,67.2237811 L61.9557117,67.2237811 L61.9557117,51.5823548 L58.4332458,51.5823548 C57.4827343,51.5823548 56.8013117,51.4006421 56.3889575,51.0372113 C55.9766033,50.6737805 55.7704292,50.0867087 55.7704292,49.2759783 C55.7704292,48.4512699 55.9731088,47.8641981 56.378474,47.5147453 C56.7838391,47.1652926 57.4687562,46.9905689 58.4332458,46.9905689 L70.887679,46.9905689 C71.8801247,46.9905689 72.5720308,47.1652926 72.9634178,47.5147453 C73.3548049,47.8641981 73.5504955,48.4512699 73.5504955,49.2759783 C73.5504955,50.0867087 73.347816,50.6737805 72.9424508,51.0372113 C72.5370856,51.4006421 71.8521685,51.5823548 70.887679,51.5823548 L67.365213,51.5823548 Z M97.8608265,51.5823548 L97.8608265,63.1771386 L97.8608265,63.5755127 C97.8608265,65.4485794 97.7385199,66.8044357 97.493903,67.6431222 C97.2492861,68.4818088 96.8404325,69.2296264 96.26733,69.8865976 C95.5264902,70.7392623 94.4991146,71.3822457 93.1851723,71.815567 C91.87123,72.2488884 90.2917273,72.4655459 88.4466169,72.4655459 C87.1466527,72.4655459 85.8921362,72.3397448 84.6830298,72.0881388 C83.4739233,71.8365328 82.3102631,71.4591296 81.1920144,70.9559176 C80.5769776,70.6763554 80.175113,70.31293 79.9864085,69.8656305 C79.797704,69.418331 79.7033532,68.6914802 79.7033532,67.6850564 L79.7033532,63.3658422 C79.7033532,62.1637247 79.8780769,61.3250508 80.2275297,60.849795 C80.5769824,60.3745393 81.185021,60.136915 82.0516638,60.136915 C83.2957156,60.136915 83.9806326,61.0524675 84.1064356,62.8835998 C84.1204137,63.2050963 84.1413806,63.4497096 84.1693368,63.6174469 C84.3370741,65.2389076 84.7144774,66.3466561 85.301558,66.9407258 C85.8886386,67.5347954 86.8251579,67.8318258 88.1111439,67.8318258 C89.7046484,67.8318258 90.8263749,67.4089943 91.476357,66.5633187 C92.126339,65.7176431 92.4513252,64.1765796 92.4513252,61.9400821 L92.4513252,51.5823548 L88.9288593,51.5823548 C87.9783478,51.5823548 87.2969252,51.4006421 86.884571,51.0372113 C86.4722168,50.6737805 86.2660427,50.0867087 86.2660427,49.2759783 C86.2660427,48.4512699 86.4652278,47.8641981 86.8636039,47.5147453 C87.26198,47.1652926 87.9503916,46.9905689 88.9288593,46.9905689 L99.6220595,46.9905689 C100.600527,46.9905689 101.288939,47.1652926 101.687315,47.5147453 C102.085691,47.8641981 102.284876,48.4512699 102.284876,49.2759783 C102.284876,50.0867087 102.078702,50.6737805 101.666348,51.0372113 C101.253994,51.4006421 100.572571,51.5823548 99.6220595,51.5823548 L97.8608265,51.5823548 Z M112.505343,51.5823548 L112.505343,57.9353738 L118.984165,51.4565525 C118.257303,51.3726838 117.747109,51.1665098 117.453569,50.8380242 C117.160029,50.5095387 117.013261,49.9888619 117.013261,49.2759783 C117.013261,48.4512699 117.212446,47.8572091 117.610822,47.4937783 C118.009198,47.1303474 118.683632,46.9486347 119.634143,46.9486347 L124.771073,46.9486347 C125.721584,46.9486347 126.396018,47.1303474 126.794394,47.4937783 C127.19277,47.8572091 127.391955,48.4512699 127.391955,49.2759783 C127.391955,50.0447743 127.19277,50.6213627 126.794394,51.0057607 C126.396018,51.3901587 125.812441,51.5823548 125.043645,51.5823548 L124.561402,51.5823548 L118.459988,57.641835 C119.592215,58.4805215 120.626579,59.5812811 121.563113,60.9441468 C122.499646,62.3070125 123.596911,64.400203 124.854941,67.2237811 L125.127513,67.2237811 L125.546854,67.2237811 C126.371563,67.2237811 126.98659,67.4124827 127.391955,67.7898917 C127.79732,68.1673006 128,68.7334056 128,69.4882235 C128,70.3129319 127.793826,70.9069927 127.381472,71.2704235 C126.969118,71.6338543 126.287695,71.815567 125.337183,71.815567 L122.758235,71.815567 C121.626008,71.815567 120.710456,71.0537715 120.01155,69.5301576 C119.885747,69.2505954 119.787902,69.026949 119.718012,68.8592117 C118.795456,66.9022764 117.949793,65.3926632 117.180997,64.3303269 C116.412201,63.2679906 115.510627,62.2965265 114.476247,61.4159056 L112.505343,63.302941 L112.505343,67.2237811 L112.840816,67.2237811 C113.595634,67.2237811 114.172222,67.4159772 114.570599,67.8003752 C114.968975,68.1847732 115.16816,68.7473837 115.16816,69.4882235 C115.16816,70.3129319 114.961986,70.9069927 114.549631,71.2704235 C114.137277,71.6338543 113.455855,71.815567 112.505343,71.815567 L107.095842,71.815567 C106.131352,71.815567 105.446435,71.6373488 105.04107,71.280907 C104.635705,70.9244652 104.433025,70.32691 104.433025,69.4882235 C104.433025,68.7473837 104.628716,68.1847732 105.020103,67.8003752 C105.41149,67.4159772 105.984584,67.2237811 106.739402,67.2237811 L107.095842,67.2237811 L107.095842,51.5823548 L106.739402,51.5823548 C105.998562,51.5823548 105.435952,51.3866642 105.051554,50.9952772 C104.667156,50.6038901 104.474959,50.0307962 104.474959,49.2759783 C104.474959,48.4512699 104.674145,47.8572091 105.072521,47.4937783 C105.470897,47.1303474 106.14533,46.9486347 107.095842,46.9486347 L112.505343,46.9486347 C113.441877,46.9486347 114.119805,47.1303474 114.539148,47.4937783 C114.958491,47.8572091 115.16816,48.4512699 115.16816,49.2759783 C115.16816,50.0307962 114.968975,50.6038901 114.570599,50.9952772 C114.172222,51.3866642 113.595634,51.5823548 112.840816,51.5823548 L112.505343,51.5823548 Z M13.439885,96.325622 L17.4445933,84.4372993 C17.6961993,83.6545252 18.0456468,83.0849258 18.4929463,82.728484 C18.9402458,82.3720422 19.5343065,82.193824 20.2751463,82.193824 L23.5460076,82.193824 C24.496519,82.193824 25.1779416,82.3755367 25.5902958,82.7389675 C26.0026501,83.1023984 26.2088241,83.6964591 26.2088241,84.5211676 C26.2088241,85.2759855 26.009639,85.8490794 25.6112629,86.2404664 C25.2128868,86.6318535 24.6362984,86.8275441 23.8814805,86.8275441 L23.5460076,86.8275441 L24.1330852,102.46897 L24.4895252,102.46897 C25.2443431,102.46897 25.8104481,102.661166 26.187857,103.045564 C26.565266,103.429962 26.7539676,103.992573 26.7539676,104.733413 C26.7539676,105.558121 26.5547826,106.152182 26.1564064,106.515613 C25.7580303,106.879044 25.0835967,107.060756 24.1330852,107.060756 L19.4154969,107.060756 C18.4649855,107.060756 17.7905518,106.882538 17.3921757,106.526096 C16.9937996,106.169654 16.7946145,105.572099 16.7946145,104.733413 C16.7946145,103.992573 16.9868106,103.429962 17.3712086,103.045564 C17.7556066,102.661166 18.325206,102.46897 19.0800239,102.46897 L19.4154969,102.46897 L19.1219581,89.6790642 L16.0607674,99.1981091 C15.8371177,99.9109927 15.5191204,100.42468 15.1067662,100.739188 C14.694412,101.053695 14.1248126,101.210947 13.3979509,101.210947 C12.6710892,101.210947 12.0945008,101.053695 11.6681685,100.739188 C11.2418362,100.42468 10.91685,99.9109927 10.6932002,99.1981091 L7.65297664,89.6790642 L7.35943781,102.46897 L7.69491075,102.46897 C8.44972866,102.46897 9.01932808,102.661166 9.40372609,103.045564 C9.78812409,103.429962 9.98032022,103.992573 9.98032022,104.733413 C9.98032022,105.558121 9.77764067,106.152182 9.3722755,106.515613 C8.96691032,106.879044 8.29597114,107.060756 7.35943781,107.060756 L2.62088241,107.060756 C1.68434908,107.060756 1.01340989,106.879044 0.608044719,106.515613 C0.202679546,106.152182 0,105.558121 0,104.733413 C0,103.992573 0.192196121,103.429962 0.57659413,103.045564 C0.960992139,102.661166 1.53059155,102.46897 2.28540946,102.46897 L2.62088241,102.46897 L3.22892713,86.8275441 L2.89345418,86.8275441 C2.13863627,86.8275441 1.56204791,86.6318535 1.16367179,86.2404664 C0.765295672,85.8490794 0.5661106,85.2759855 0.5661106,84.5211676 C0.5661106,83.6964591 0.772284622,83.1023984 1.18463885,82.7389675 C1.59699308,82.3755367 2.27841569,82.193824 3.22892713,82.193824 L6.49978838,82.193824 C7.22665007,82.193824 7.81022738,82.3685477 8.25053783,82.7180005 C8.69084827,83.0674532 9.05077919,83.6405471 9.33034138,84.4372993 L13.439885,96.325622 Z M43.8935644,98.3803938 L43.8935644,86.8275441 L42.7403761,86.8275441 C41.8178209,86.8275441 41.1573651,86.6458314 40.758989,86.2824006 C40.3606129,85.9189697 40.1614278,85.3318979 40.1614278,84.5211676 C40.1614278,83.7104372 40.3606129,83.119871 40.758989,82.7494511 C41.1573651,82.3790312 41.8178209,82.193824 42.7403761,82.193824 L48.6950209,82.193824 C49.6035981,82.193824 50.2605593,82.3790312 50.6659245,82.7494511 C51.0712897,83.119871 51.2739692,83.7104372 51.2739692,84.5211676 C51.2739692,85.2620074 51.0817731,85.8316068 50.6973751,86.2299829 C50.3129771,86.628359 49.7643445,86.8275441 49.051461,86.8275441 L48.6950209,86.8275441 L48.6950209,105.865634 C48.6950209,106.522605 48.6251315,106.934953 48.4853504,107.10269 C48.3455693,107.270428 48.0310665,107.354295 47.5418327,107.354295 L45.4451268,107.354295 C44.7741775,107.354295 44.3024234,107.284406 44.0298503,107.144625 C43.7572771,107.004843 43.5231473,106.76023 43.3274538,106.410777 L34.6051571,91.0838571 L34.6051571,102.46897 L35.8212466,102.46897 C36.7298237,102.46897 37.379796,102.643694 37.7711831,102.993147 C38.1625701,103.3426 38.3582607,103.922682 38.3582607,104.733413 C38.3582607,105.558121 38.1590757,106.152182 37.7606995,106.515613 C37.3623234,106.879044 36.7158456,107.060756 35.8212466,107.060756 L29.8037005,107.060756 C28.8951234,107.060756 28.2381621,106.879044 27.832797,106.515613 C27.4274318,106.152182 27.2247522,105.558121 27.2247522,104.733413 C27.2247522,103.992573 27.4134539,103.429962 27.7908629,103.045564 C28.1682718,102.661166 28.7273878,102.46897 29.4682276,102.46897 L29.8037005,102.46897 L29.8037005,86.8275441 L29.4682276,86.8275441 C28.755344,86.8275441 28.203217,86.628359 27.8118299,86.2299829 C27.4204428,85.8316068 27.2247522,85.2620074 27.2247522,84.5211676 C27.2247522,83.7104372 27.4309263,83.119871 27.8432805,82.7494511 C28.2556347,82.3790312 28.9091015,82.193824 29.8037005,82.193824 L33.2422983,82.193824 C34.0670067,82.193824 34.6261227,82.3021527 34.919663,82.5188134 C35.2132033,82.7354741 35.5416839,83.1722835 35.9051148,83.8292546 L43.8935644,98.3803938 Z M64.6604624,86.3662688 C62.8572863,86.3662688 61.4420239,87.0931196 60.4146329,88.546843 C59.3872418,90.0005663 58.873554,92.0203728 58.873554,94.6063231 C58.873554,97.1922733 59.3907363,99.2190688 60.4251164,100.68677 C61.4594965,102.154472 62.8712644,102.888312 64.6604624,102.888312 C66.4636385,102.888312 67.8823953,102.157966 68.9167754,100.697254 C69.9511555,99.2365414 70.4683378,97.2062514 70.4683378,94.6063231 C70.4683378,92.0203728 69.95465,90.0005663 68.9272589,88.546843 C67.8998679,87.0931196 66.4776166,86.3662688 64.6604624,86.3662688 L64.6604624,86.3662688 Z M64.6604624,81.501911 C68.0990773,81.501911 70.929602,82.7319662 73.1521214,85.1921135 C75.3746408,87.6522607 76.4858838,90.7902992 76.4858838,94.6063231 C76.4858838,98.4503032 75.3816297,101.595331 73.1730884,104.0415 C70.9645471,106.487669 68.1270335,107.710735 64.6604624,107.710735 C61.2358256,107.710735 58.4053009,106.477185 56.1688034,104.010049 C53.9323059,101.542913 52.8140739,98.4083688 52.8140739,94.6063231 C52.8140739,90.7763211 53.9218224,87.6347881 56.1373528,85.1816299 C58.3528831,82.7284717 61.1938912,81.501911 64.6604624,81.501911 L64.6604624,81.501911 Z M87.4611651,98.1707232 L87.4611651,102.46897 L89.6207722,102.46897 C90.5293493,102.46897 91.1758272,102.643694 91.5602252,102.993147 C91.9446232,103.3426 92.1368193,103.922682 92.1368193,104.733413 C92.1368193,105.558121 91.9411287,106.152182 91.5497417,106.515613 C91.1583546,106.879044 90.5153712,107.060756 89.6207722,107.060756 L82.3661697,107.060756 C81.4436145,107.060756 80.7831587,106.879044 80.3847826,106.515613 C79.9864065,106.152182 79.7872214,105.558121 79.7872214,104.733413 C79.7872214,103.992573 79.9759231,103.429962 80.353332,103.045564 C80.730741,102.661166 81.282868,102.46897 82.0097297,102.46897 L82.3661697,102.46897 L82.3661697,86.8275441 L82.0097297,86.8275441 C81.2968461,86.8275441 80.7482136,86.628359 80.3638155,86.2299829 C79.9794175,85.8316068 79.7872214,85.2620074 79.7872214,84.5211676 C79.7872214,83.7104372 79.989901,83.119871 80.3952661,82.7494511 C80.8006313,82.3790312 81.4575926,82.193824 82.3661697,82.193824 L91.0255652,82.193824 C94.450202,82.193824 97.0396079,82.8507853 98.7938606,84.1647276 C100.548113,85.4786699 101.425227,87.414609 101.425227,89.972603 C101.425227,92.6703781 100.551608,94.7111515 98.8043442,96.0949843 C97.0570805,97.4788171 94.4641801,98.1707232 91.0255652,98.1707232 L87.4611651,98.1707232 Z M87.4611651,86.8275441 L87.4611651,93.4531348 L90.4384875,93.4531348 C92.0879044,93.4531348 93.328443,93.1735768 94.1601405,92.6144525 C94.9918381,92.0553281 95.4076806,91.2166541 95.4076806,90.0984053 C95.4076806,89.0500471 94.9778602,88.2428234 94.1182064,87.67671 C93.2585527,87.1105966 92.031992,86.8275441 90.4384875,86.8275441 L87.4611651,86.8275441 Z M114.727851,107.396229 L113.092421,109.03166 C113.69348,108.835966 114.284046,108.689198 114.864137,108.591352 C115.444229,108.493505 116.013828,108.444582 116.572953,108.444582 C117.677223,108.444582 118.840883,108.608823 120.063968,108.937308 C121.287053,109.265794 122.031376,109.430034 122.29696,109.430034 C122.744259,109.430034 123.327837,109.279772 124.047709,108.979242 C124.767582,108.678713 125.253314,108.52845 125.50492,108.52845 C126.02211,108.52845 126.45193,108.727636 126.794394,109.126012 C127.136858,109.524388 127.308087,110.024098 127.308087,110.625156 C127.308087,111.421909 126.836333,112.099837 125.892811,112.658961 C124.949288,113.218086 123.792617,113.497643 122.422762,113.497643 C121.486229,113.497643 120.28413,113.277492 118.816428,112.837181 C117.348727,112.396871 116.286406,112.176719 115.629435,112.176719 C114.636989,112.176719 113.518757,112.449288 112.274706,112.994434 C111.030654,113.53958 110.261869,113.812149 109.968329,113.812149 C109.36727,113.812149 108.857077,113.612964 108.437734,113.214588 C108.01839,112.816212 107.808722,112.337469 107.808722,111.778345 C107.808722,111.386958 107.941512,110.971115 108.207096,110.530805 C108.47268,110.090494 108.94094,109.520895 109.611889,108.821989 L111.729562,106.683349 C109.395218,105.830685 107.536157,104.29661 106.152324,102.08108 C104.768491,99.8655494 104.076585,97.3180772 104.076585,94.4385866 C104.076585,90.6365409 105.180839,87.5299526 107.389381,85.1187288 C109.597922,82.7075049 112.442425,81.501911 115.922974,81.501911 C119.389545,81.501911 122.227059,82.7109994 124.4356,85.1292123 C126.644141,87.5474252 127.748395,90.650519 127.748395,94.4385866 C127.748395,98.2126762 126.65113,101.322759 124.456567,103.768928 C122.262004,106.215097 119.480402,107.438163 116.111677,107.438163 C115.888028,107.438163 115.660887,107.434669 115.430248,107.42768 C115.199609,107.420691 114.965479,107.410207 114.727851,107.396229 L114.727851,107.396229 Z M115.922974,86.3662688 C114.119798,86.3662688 112.704535,87.0931196 111.677144,88.546843 C110.649753,90.0005663 110.136065,92.0203728 110.136065,94.6063231 C110.136065,97.1922733 110.653248,99.2190688 111.687628,100.68677 C112.722008,102.154472 114.133776,102.888312 115.922974,102.888312 C117.72615,102.888312 119.144907,102.157966 120.179287,100.697254 C121.213667,99.2365414 121.730849,97.2062514 121.730849,94.6063231 C121.730849,92.0203728 121.217161,90.0005663 120.18977,88.546843 C119.162379,87.0931196 117.740128,86.3662688 115.922974,86.3662688 L115.922974,86.3662688 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/mono-line": {
"title": "$:/core/images/mono-line",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mono-line tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60.4374591,84.522627 L61.3450888,84.522627 C63.2671377,84.522627 64.7264493,85.0120303 65.7230673,85.9908515 C66.7196852,86.9696727 67.2179868,88.4022896 67.2179868,90.288745 C67.2179868,92.3887615 66.6929905,93.9014625 65.6429823,94.8268935 C64.5929741,95.7523244 62.857817,96.215033 60.4374591,96.215033 L44.3670747,96.215033 C41.9111232,96.215033 40.1670679,95.7612227 39.1348565,94.8535884 C38.102645,93.9459542 37.586547,92.424355 37.586547,90.288745 C37.586547,88.2243221 38.102645,86.747214 39.1348565,85.8573766 C40.1670679,84.9675391 41.9111232,84.522627 44.3670747,84.522627 L46.235724,84.522627 L44.0467348,78.2759992 L20.9822627,78.2759992 L18.6864935,84.522627 L20.5551429,84.522627 C22.9755008,84.522627 24.7106579,84.9764373 25.7606661,85.8840716 C26.8106743,86.7917058 27.3356705,88.2599156 27.3356705,90.288745 C27.3356705,92.3887615 26.8106743,93.9014625 25.7606661,94.8268935 C24.7106579,95.7523244 22.9755008,96.215033 20.5551429,96.215033 L6.78052766,96.215033 C4.32457622,96.215033 2.58052094,95.7523244 1.54830946,94.8268935 C0.516097994,93.9014625 0,92.3887615 0,90.288745 C0,88.4022896 0.498301511,86.9696727 1.49491948,85.9908515 C2.49153745,85.0120303 3.95084902,84.522627 5.87289797,84.522627 L6.78052766,84.522627 L21.0890427,44.6937008 L16.8178442,44.6937008 C14.3974863,44.6937008 12.6623292,44.2309922 11.612321,43.3055613 C10.5623128,42.3801303 10.0373165,40.8852258 10.0373165,38.8208028 C10.0373165,36.7207864 10.5623128,35.2080854 11.612321,34.2826544 C12.6623292,33.3572234 14.3974863,32.8945149 16.8178442,32.8945149 L36.8390873,32.8945149 C40.0069087,32.8945149 42.231469,34.6029772 43.512835,38.0199531 L43.512835,38.180123 L60.4374591,84.522627 Z M32.4611088,44.6937008 L24.7195615,67.224273 L40.2026561,67.224273 L32.4611088,44.6937008 Z M89.5058233,68.5590225 L89.5058233,84.8429669 L97.5143205,84.8429669 C103.173687,84.8429669 107.160099,84.22009 109.473676,82.9743176 C111.787254,81.7285451 112.944025,79.6463566 112.944025,76.7276897 C112.944025,73.7734293 111.840643,71.6734444 109.633846,70.4276719 C107.427049,69.1818994 103.565213,68.5590225 98.0482204,68.5590225 L89.5058233,68.5590225 Z M116.734714,62.6327346 C120.614405,64.0564746 123.461842,66.0051894 125.277111,68.4789376 C127.092379,70.9526857 128,74.1115614 128,77.9556593 C128,81.1946677 127.216955,84.1488838 125.650841,86.8183962 C124.084727,89.4879087 121.84237,91.676876 118.923703,93.385364 C117.215215,94.3819819 115.302093,95.1027395 113.18428,95.5476582 C111.066467,95.9925769 108.06776,96.215033 104.188068,96.215033 L99.7033098,96.215033 L76.3184979,96.215033 C73.9693269,96.215033 72.2875593,95.7523244 71.2731446,94.8268935 C70.2587299,93.9014625 69.7515301,92.3887615 69.7515301,90.288745 C69.7515301,88.4022896 70.2320352,86.9696727 71.1930596,85.9908515 C72.1540841,85.0120303 73.5600062,84.522627 75.4108682,84.522627 L76.3184979,84.522627 L76.3184979,44.6937008 L75.4108682,44.6937008 C73.5600062,44.6937008 72.1540841,44.1953993 71.1930596,43.1987813 C70.2320352,42.2021633 69.7515301,40.7428518 69.7515301,38.8208028 C69.7515301,36.7563799 70.2676281,35.2525771 71.2998396,34.3093494 C72.3320511,33.3661217 74.0049204,32.8945149 76.3184979,32.8945149 L100.877889,32.8945149 C108.388118,32.8945149 114.09189,34.3538264 117.989378,37.2724934 C121.886867,40.1911603 123.835581,44.4623161 123.835581,50.0860889 C123.835581,52.8623819 123.239399,55.3093982 122.047017,57.4272114 C120.854635,59.5450246 119.083885,61.2801816 116.734714,62.6327346 L116.734714,62.6327346 Z M89.5058233,44.3733609 L89.5058233,57.8276363 L96.7134708,57.8276363 C101.091471,57.8276363 104.179161,57.3115383 105.976633,56.2793268 C107.774104,55.2471153 108.672827,53.50306 108.672827,51.0471086 C108.672827,48.7335312 107.863087,47.0428653 106.243583,45.9750604 C104.624078,44.9072554 101.999097,44.3733609 98.3685602,44.3733609 L89.5058233,44.3733609 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-button": {
"title": "$:/core/images/new-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M56,72 L8.00697327,72 C3.59075293,72 0,68.418278 0,64 C0,59.5907123 3.58484404,56 8.00697327,56 L56,56 L56,8.00697327 C56,3.59075293 59.581722,0 64,0 C68.4092877,0 72,3.58484404 72,8.00697327 L72,56 L119.993027,56 C124.409247,56 128,59.581722 128,64 C128,68.4092877 124.415156,72 119.993027,72 L72,72 L72,119.993027 C72,124.409247 68.418278,128 64,128 C59.5907123,128 56,124.415156 56,119.993027 L56,72 L56,72 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-here-button": {
"title": "$:/core/images/new-here-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-here-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n \t<g transform=\"translate(52.233611, 64.389922) rotate(75.000000) translate(-52.233611, -64.389922) translate(-7.734417, 3.702450)\">\n\t <path d=\"M18.9270186,45.959338 L18.9080585,49.6521741 C18.8884833,53.4648378 21.0574548,58.7482162 23.7526408,61.4434022 L78.5671839,116.257945 C81.2617332,118.952495 85.6348701,118.950391 88.3334363,116.251825 L115.863237,88.7220241 C118.555265,86.0299959 118.564544,81.6509578 115.869358,78.9557717 L61.0548144,24.1412286 C58.3602652,21.4466794 53.0787224,19.2788426 49.2595808,19.3006519 L25.9781737,19.4336012 C22.1633003,19.4553862 19.0471195,22.5673232 19.0275223,26.3842526 L18.9871663,34.2443819 C19.0818862,34.255617 19.1779758,34.2665345 19.2754441,34.2771502 C22.6891275,34.6489512 27.0485594,34.2348566 31.513244,33.2285542 C31.7789418,32.8671684 32.075337,32.5211298 32.4024112,32.1940556 C34.8567584,29.7397084 38.3789778,29.0128681 41.4406288,30.0213822 C41.5958829,29.9543375 41.7503946,29.8866669 41.9041198,29.8183808 L42.1110981,30.2733467 C43.1114373,30.6972371 44.0473796,31.3160521 44.8614145,32.1300869 C48.2842088,35.5528813 48.2555691,41.130967 44.7974459,44.5890903 C41.4339531,47.952583 36.0649346,48.0717177 32.6241879,44.9262969 C27.8170558,45.8919233 23.0726921,46.2881596 18.9270186,45.959338 Z\"></path>\n\t <path d=\"M45.4903462,38.8768094 C36.7300141,42.6833154 26.099618,44.7997354 18.1909048,43.9383587 C7.2512621,42.7468685 1.50150083,35.8404432 4.66865776,24.7010202 C7.51507386,14.6896965 15.4908218,6.92103848 24.3842626,4.38423012 C34.1310219,1.60401701 42.4070208,6.15882777 42.4070209,16.3101169 L34.5379395,16.310117 C34.5379394,11.9285862 31.728784,10.3825286 26.5666962,11.8549876 C20.2597508,13.6540114 14.3453742,19.4148216 12.2444303,26.8041943 C10.4963869,32.9523565 12.6250796,35.5092726 19.0530263,36.2093718 C25.5557042,36.9176104 35.0513021,34.9907189 42.7038419,31.5913902 L42.7421786,31.6756595 C44.3874154,31.5384763 47.8846101,37.3706354 45.9274416,38.6772897 L45.9302799,38.6835285 C45.9166992,38.6895612 45.9031139,38.6955897 45.8895238,38.7016142 C45.8389288,38.7327898 45.7849056,38.7611034 45.7273406,38.7863919 C45.6506459,38.8200841 45.571574,38.8501593 45.4903462,38.8768094 Z\"></path>\n </g>\n <rect x=\"96\" y=\"80\" width=\"16\" height=\"48\" rx=\"8\"></rect>\n <rect x=\"80\" y=\"96\" width=\"48\" height=\"16\" rx=\"8\"></rect>\n </g>\n </g>\n</svg>"
},
"$:/core/images/new-image-button": {
"title": "$:/core/images/new-image-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-image-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M81.3619177,73.6270062 L97.1875317,46.2162388 C97.91364,44.9585822 97.4824378,43.3533085 96.2260476,42.6279312 L46.2162388,13.7547547 C44.9585822,13.0286463 43.3533085,13.4598485 42.6279312,14.7162388 L30.0575956,36.4886988 L40.0978909,31.2276186 C43.1404959,29.6333041 46.8692155,31.3421319 47.6479264,34.6877101 L51.2545483,52.3903732 L61.1353556,53.2399953 C63.2899974,53.4346096 65.1046382,54.9309951 65.706105,57.0091178 C65.7395572,57.1246982 65.8069154,57.3539875 65.9047035,57.6813669 C66.0696435,58.2335608 66.2581528,58.852952 66.4667073,59.5238092 C67.0618822,61.4383079 67.6960725,63.3742727 68.3393254,65.2021174 C68.5462918,65.7902259 68.7511789,66.3583016 68.953259,66.9034738 C69.5777086,68.5881157 70.1617856,70.0172008 70.6783305,71.110045 C70.9334784,71.6498566 71.1627732,72.0871602 71.4035746,72.5373068 C71.6178999,72.7492946 71.9508843,72.9623307 72.4151452,73.1586945 C73.5561502,73.6412938 75.1990755,73.899146 77.0720271,73.9171651 C77.9355886,73.9254732 78.7819239,73.8832103 79.5638842,73.8072782 C80.0123946,73.7637257 80.3172916,73.7224469 80.4352582,73.7027375 C80.7503629,73.6500912 81.0598053,73.6256267 81.3619177,73.6270062 L81.3619177,73.6270062 L81.3619177,73.6270062 L81.3619177,73.6270062 Z M37.4707881,2.64867269 C38.9217993,0.135447653 42.1388058,-0.723707984 44.6486727,0.725364314 L108.293614,37.4707881 C110.806839,38.9217993 111.665994,42.1388058 110.216922,44.6486727 L73.4714982,108.293614 C72.0204871,110.806839 68.8034805,111.665994 66.2936136,110.216922 L2.64867269,73.4714982 C0.135447653,72.0204871 -0.723707984,68.8034805 0.725364314,66.2936136 L37.4707881,2.64867269 L37.4707881,2.64867269 L37.4707881,2.64867269 L37.4707881,2.64867269 Z M80.3080975,53.1397764 C82.8191338,54.5895239 86.0299834,53.7291793 87.4797308,51.218143 C88.9294783,48.7071068 88.0691338,45.4962571 85.5580975,44.0465097 C83.0470612,42.5967622 79.8362116,43.4571068 78.3864641,45.968143 C76.9367166,48.4791793 77.7970612,51.6900289 80.3080975,53.1397764 L80.3080975,53.1397764 L80.3080975,53.1397764 L80.3080975,53.1397764 Z M96,112 L88.0070969,112 C83.5881712,112 80,108.418278 80,104 C80,99.5907123 83.5848994,96 88.0070969,96 L96,96 L96,88.0070969 C96,83.5881712 99.581722,80 104,80 C108.409288,80 112,83.5848994 112,88.0070969 L112,96 L119.992903,96 C124.411829,96 128,99.581722 128,104 C128,108.409288 124.415101,112 119.992903,112 L112,112 L112,119.992903 C112,124.411829 108.418278,128 104,128 C99.5907123,128 96,124.415101 96,119.992903 L96,112 L96,112 Z M33.3471097,51.7910932 C40.7754579,59.7394511 42.3564368,62.4818351 40.7958321,65.1848818 C39.2352273,67.8879286 26.9581062,62.8571718 24.7019652,66.7649227 C22.4458242,70.6726735 23.7947046,70.0228006 22.2648667,72.6725575 L41.9944593,84.0634431 C41.9944593,84.0634431 36.3904568,75.8079231 37.7602356,73.4353966 C40.2754811,69.0788636 46.5298923,72.1787882 48.1248275,69.4162793 C50.538989,65.234829 43.0222016,59.7770885 33.3471097,51.7910932 L33.3471097,51.7910932 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-journal-button": {
"title": "$:/core/images/new-journal-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-journal-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M102.545455,112.818182 L102.545455,124.636364 L102.545455,124.636364 L102.545455,124.636364 C102.545455,125.941761 103.630828,127 104.969697,127 L111.030303,127 C112.369172,127 113.454545,125.941761 113.454545,124.636364 L113.454545,112.818182 L125.575758,112.818182 C126.914626,112.818182 128,111.759982 128,110.454545 L128,104.545455 C128,103.240018 126.914626,102.181818 125.575758,102.181818 L113.454545,102.181818 L113.454545,90.3636364 C113.454545,89.0582 112.369172,88 111.030303,88 L104.969697,88 L104.969697,88 C103.630828,88 102.545455,89.0582 102.545455,90.3636364 L102.545455,102.181818 L90.4242424,102.181818 L90.4242424,102.181818 C89.0853705,102.181818 88,103.240018 88,104.545455 L88,110.454545 L88,110.454545 L88,110.454545 C88,111.759982 89.0853705,112.818182 90.4242424,112.818182 L102.545455,112.818182 Z\"></path>\n <g transform=\"translate(59.816987, 64.316987) rotate(30.000000) translate(-59.816987, -64.316987) translate(20.316987, 12.816987)\">\n <g transform=\"translate(0.000000, 0.000000)\">\n <path d=\"M9.99631148,0 C4.4755011,0 -2.27373675e-13,4.48070044 -2.27373675e-13,9.99759461 L-2.27373675e-13,91.6128884 C-2.27373675e-13,97.1344074 4.46966773,101.610483 9.99631148,101.610483 L68.9318917,101.610483 C74.4527021,101.610483 78.9282032,97.1297826 78.9282032,91.6128884 L78.9282032,9.99759461 C78.9282032,4.47607557 74.4585355,0 68.9318917,0 L9.99631148,0 Z M20.8885263,26 C24.2022348,26 26.8885263,23.3137085 26.8885263,20 C26.8885263,16.6862915 24.2022348,14 20.8885263,14 C17.5748178,14 14.8885263,16.6862915 14.8885263,20 C14.8885263,23.3137085 17.5748178,26 20.8885263,26 Z M57.3033321,25.6783342 C60.6170406,25.6783342 63.3033321,22.9920427 63.3033321,19.6783342 C63.3033321,16.3646258 60.6170406,13.6783342 57.3033321,13.6783342 C53.9896236,13.6783342 51.3033321,16.3646258 51.3033321,19.6783342 C51.3033321,22.9920427 53.9896236,25.6783342 57.3033321,25.6783342 Z\"></path>\n <text font-family=\"Helvetica\" font-size=\"47.1724138\" font-weight=\"bold\" fill=\"#FFFFFF\">\n <tspan x=\"42\" y=\"77.4847912\" text-anchor=\"middle\"><<now \"DD\">></tspan>\n </text>\n </g>\n </g>\n </g>\n</svg>"
},
"$:/core/images/opacity": {
"title": "$:/core/images/opacity",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-opacity tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M102.361773,65 C101.833691,67.051742 101.183534,69.0544767 100.419508,71 L82.5835324,71 C83.7602504,69.1098924 84.7666304,67.1027366 85.581205,65 L102.361773,65 Z M102.834311,63 C103.256674,61.0388326 103.568427,59.0365486 103.762717,57 L87.6555706,57 C87.3692052,59.0609452 86.9083652,61.0660782 86.2884493,63 L102.834311,63 Z M99.5852583,73 C98.6682925,75.0747721 97.6196148,77.0783056 96.4498253,79 L75.8124196,79 C77.8387053,77.2115633 79.6621163,75.1985844 81.2437158,73 L99.5852583,73 Z M95.1689122,81 C93.7449202,83.1155572 92.1695234,85.1207336 90.458251,87 L60.4614747,87 C65.1836162,85.86248 69.5430327,83.794147 73.3347255,81 L95.1689122,81 Z M87.6555706,47 L103.762717,47 C101.246684,20.6269305 79.0321807,0 52,0 C23.281193,0 0,23.281193 0,52 C0,77.2277755 17.9651296,98.2595701 41.8000051,103 L62.1999949,103 C67.8794003,101.870444 73.2255333,99.8158975 78.074754,97 L39,97 L39,95 L81.2493857,95 C83.8589242,93.2215015 86.2981855,91.2116653 88.5376609,89 L39,89 L39,87 L43.5385253,87 C27.7389671,83.1940333 16,68.967908 16,52 C16,32.117749 32.117749,16 52,16 C70.1856127,16 85.2217929,29.4843233 87.6555706,47 Z M87.8767787,49 L103.914907,49 C103.971379,49.9928025 104,50.9930589 104,52 C104,53.0069411 103.971379,54.0071975 103.914907,55 L87.8767787,55 C87.958386,54.0107999 88,53.0102597 88,52 C88,50.9897403 87.958386,49.9892001 87.8767787,49 Z\"></path>\n <path d=\"M76,128 C104.718807,128 128,104.718807 128,76 C128,47.281193 104.718807,24 76,24 C47.281193,24 24,47.281193 24,76 C24,104.718807 47.281193,128 76,128 L76,128 Z M76,112 C95.882251,112 112,95.882251 112,76 C112,56.117749 95.882251,40 76,40 C56.117749,40 40,56.117749 40,76 C40,95.882251 56.117749,112 76,112 L76,112 Z\"></path>\n <path d=\"M37,58 L90,58 L90,62 L37,62 L37,58 L37,58 Z M40,50 L93,50 L93,54 L40,54 L40,50 L40,50 Z M40,42 L93,42 L93,46 L40,46 L40,42 L40,42 Z M32,66 L85,66 L85,70 L32,70 L32,66 L32,66 Z M30,74 L83,74 L83,78 L30,78 L30,74 L30,74 Z M27,82 L80,82 L80,86 L27,86 L27,82 L27,82 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/open-window": {
"title": "$:/core/images/open-window",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-open-window tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M16,112 L104.993898,112 C108.863261,112 112,115.590712 112,120 C112,124.418278 108.858091,128 104.993898,128 L7.00610161,128 C3.13673853,128 0,124.409288 0,120 C0,119.998364 4.30952878e-07,119.996727 1.29273572e-06,119.995091 C4.89579306e-07,119.993456 0,119.99182 0,119.990183 L0,24.0098166 C0,19.586117 3.59071231,16 8,16 C12.418278,16 16,19.5838751 16,24.0098166 L16,112 Z\"></path>\n <path d=\"M96,43.1959595 L96,56 C96,60.418278 99.581722,64 104,64 C108.418278,64 112,60.418278 112,56 L112,24 C112,19.5907123 108.415101,16 103.992903,16 L72.0070969,16 C67.5881712,16 64,19.581722 64,24 C64,28.4092877 67.5848994,32 72.0070969,32 L84.5685425,32 L48.2698369,68.2987056 C45.1421332,71.4264093 45.1434327,76.4904296 48.267627,79.614624 C51.3854642,82.7324612 56.4581306,82.7378289 59.5835454,79.6124141 L96,43.1959595 Z M32,7.9992458 C32,3.58138434 35.5881049,0 39.9992458,0 L120.000754,0 C124.418616,0 128,3.5881049 128,7.9992458 L128,88.0007542 C128,92.4186157 124.411895,96 120.000754,96 L39.9992458,96 C35.5813843,96 32,92.4118951 32,88.0007542 L32,7.9992458 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/options-button": {
"title": "$:/core/images/options-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-options-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M110.48779,76.0002544 C109.354214,80.4045063 107.611262,84.5641217 105.354171,88.3838625 L105.354171,88.3838625 L112.07833,95.1080219 C115.20107,98.2307613 115.210098,103.299824 112.089164,106.420759 L106.420504,112.089418 C103.301049,115.208874 98.2346851,115.205502 95.1077675,112.078585 L88.3836082,105.354425 C84.5638673,107.611516 80.4042519,109.354468 76,110.488045 L76,110.488045 L76,119.993281 C76,124.409501 72.4220153,128.000254 68.0083475,128.000254 L59.9916525,128.000254 C55.5800761,128.000254 52,124.41541 52,119.993281 L52,110.488045 C47.5957481,109.354468 43.4361327,107.611516 39.6163918,105.354425 L32.8922325,112.078585 C29.7694931,115.201324 24.7004301,115.210353 21.5794957,112.089418 L15.9108363,106.420759 C12.7913807,103.301303 12.7947522,98.2349395 15.9216697,95.1080219 L22.6458291,88.3838625 C20.3887383,84.5641217 18.6457859,80.4045063 17.5122098,76.0002544 L8.00697327,76.0002544 C3.59075293,76.0002544 2.19088375e-16,72.4222697 4.89347582e-16,68.0086019 L9.80228577e-16,59.9919069 C1.25035972e-15,55.5803305 3.58484404,52.0002544 8.00697327,52.0002544 L17.5122098,52.0002544 C18.6457859,47.5960025 20.3887383,43.4363871 22.6458291,39.6166462 L15.9216697,32.8924868 C12.7989304,29.7697475 12.7899019,24.7006845 15.9108363,21.5797501 L21.5794957,15.9110907 C24.6989513,12.7916351 29.7653149,12.7950065 32.8922325,15.9219241 L39.6163918,22.6460835 C43.4361327,20.3889927 47.5957481,18.6460403 52,17.5124642 L52,8.00722764 C52,3.5910073 55.5779847,0.000254375069 59.9916525,0.000254375069 L68.0083475,0.000254375069 C72.4199239,0.000254375069 76,3.58509841 76,8.00722764 L76,17.5124642 C80.4042519,18.6460403 84.5638673,20.3889927 88.3836082,22.6460835 L95.1077675,15.9219241 C98.2305069,12.7991848 103.29957,12.7901562 106.420504,15.9110907 L112.089164,21.5797501 C115.208619,24.6992057 115.205248,29.7655693 112.07833,32.8924868 L105.354171,39.6166462 L105.354171,39.6166462 C107.611262,43.4363871 109.354214,47.5960025 110.48779,52.0002544 L119.993027,52.0002544 C124.409247,52.0002544 128,55.5782391 128,59.9919069 L128,68.0086019 C128,72.4201783 124.415156,76.0002544 119.993027,76.0002544 L110.48779,76.0002544 L110.48779,76.0002544 Z M64,96.0002544 C81.673112,96.0002544 96,81.6733664 96,64.0002544 C96,46.3271424 81.673112,32.0002544 64,32.0002544 C46.326888,32.0002544 32,46.3271424 32,64.0002544 C32,81.6733664 46.326888,96.0002544 64,96.0002544 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/paint": {
"title": "$:/core/images/paint",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-paint tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M83.5265806,76.1907935 C90.430962,69.2864121 91.8921169,59.0000433 87.9100453,50.6642209 L125.812763,12.7615036 C128.732035,9.84223095 128.72611,5.10322984 125.812796,2.18991592 C122.893542,-0.729338085 118.161775,-0.730617045 115.241209,2.18994966 L77.3384914,40.092667 C69.002669,36.1105954 58.7163002,37.5717503 51.8119188,44.4761317 L83.5265806,76.1907935 L83.5265806,76.1907935 L83.5265806,76.1907935 L83.5265806,76.1907935 Z M80.8836921,78.8336819 L49.1690303,47.1190201 C49.1690303,47.1190201 8.50573364,81.242543 0,80.2820711 C0,80.2820711 3.78222974,85.8744423 6.82737483,88.320684 C20.8514801,82.630792 44.1526049,63.720771 44.1526049,63.720771 L44.8144806,64.3803375 C44.8144806,64.3803375 19.450356,90.2231043 9.18040433,92.0477601 C10.4017154,93.4877138 13.5343883,96.1014812 15.4269991,97.8235871 C20.8439164,96.3356979 50.1595367,69.253789 50.1595367,69.253789 L50.8214124,69.9133555 L18.4136144,100.936036 L23.6993903,106.221812 L56.1060358,75.2002881 L56.7679115,75.8598546 C56.7679115,75.8598546 28.9040131,106.396168 28.0841366,108.291555 C28.0841366,108.291555 34.1159238,115.144621 35.6529617,116.115796 C36.3545333,113.280171 63.5365402,82.6307925 63.5365402,82.6307925 L64.1984159,83.290359 C64.1984159,83.290359 43.6013016,107.04575 39.2343772,120.022559 C42.443736,123.571575 46.7339155,125.159692 50.1595362,126.321151 C47.9699978,114.504469 80.8836921,78.8336819 80.8836921,78.8336819 L80.8836921,78.8336819 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/palette": {
"title": "$:/core/images/palette",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-palette tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M80.2470434,39.1821571 C75.0645698,38.2680897 69.6261555,37.7814854 64.0193999,37.7814854 C28.6624616,37.7814854 0,57.1324214 0,81.0030106 C0,90.644534 4.67604329,99.5487133 12.5805659,106.738252 C23.5031767,91.1899067 26.3405471,72.3946229 36.8885698,63.5622337 C52.0716764,50.8486559 63.4268694,55.7343343 63.4268694,55.7343343 L80.2470434,39.1821571 Z M106.781666,48.8370714 C119.830962,56.749628 128.0388,68.229191 128.0388,81.0030106 C128.0388,90.3534932 128.557501,98.4142085 116.165191,106.082518 C105.367708,112.763955 112.341384,99.546808 104.321443,95.1851533 C96.3015017,90.8234987 84.3749007,96.492742 86.1084305,103.091059 C89.3087234,115.272303 105.529892,114.54645 92.4224435,119.748569 C79.3149955,124.950687 74.2201582,124.224536 64.0193999,124.224536 C56.1979176,124.224536 48.7040365,123.277578 41.7755684,121.544216 C51.620343,117.347916 69.6563669,109.006202 75.129737,102.088562 C82.7876655,92.4099199 87.3713218,80.0000002 83.3235694,72.4837191 C83.1303943,72.1250117 94.5392656,60.81569 106.781666,48.8370714 Z M1.13430476,123.866563 C0.914084026,123.867944 0.693884185,123.868637 0.473712455,123.868637 C33.9526848,108.928928 22.6351223,59.642592 59.2924543,59.6425917 C59.6085574,61.0606542 59.9358353,62.5865065 60.3541977,64.1372318 C34.4465025,59.9707319 36.7873124,112.168427 1.13429588,123.866563 L1.13430476,123.866563 Z M1.84669213,123.859694 C40.7185279,123.354338 79.9985412,101.513051 79.9985401,79.0466836 C70.7284906,79.0466835 65.9257264,75.5670082 63.1833375,71.1051511 C46.585768,64.1019718 32.81846,116.819636 1.84665952,123.859695 L1.84669213,123.859694 Z M67.1980193,59.8524981 C62.748213,63.9666823 72.0838429,76.2846822 78.5155805,71.1700593 C89.8331416,59.8524993 112.468264,37.2173758 123.785825,25.8998146 C135.103386,14.5822535 123.785825,3.26469247 112.468264,14.5822535 C101.150703,25.8998144 78.9500931,48.9868127 67.1980193,59.8524981 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/permalink-button": {
"title": "$:/core/images/permalink-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-permalink-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M80.4834582,48 L73.0956761,80 L73.0956761,80 L47.5165418,80 L54.9043239,48 L80.4834582,48 Z M84.1773493,32 L89.8007299,7.64246248 C90.7941633,3.33942958 95.0918297,0.64641956 99.3968675,1.64031585 C103.693145,2.63218977 106.385414,6.93288901 105.390651,11.2416793 L100.598215,32 L104.000754,32 C108.411895,32 112,35.581722 112,40 C112,44.4092877 108.418616,48 104.000754,48 L96.9043239,48 L89.5165418,80 L104.000754,80 C108.411895,80 112,83.581722 112,88 C112,92.4092877 108.418616,96 104.000754,96 L85.8226507,96 L80.1992701,120.357538 C79.2058367,124.66057 74.9081703,127.35358 70.6031325,126.359684 C66.3068546,125.36781 63.6145865,121.067111 64.6093491,116.758321 L69.401785,96 L43.8226507,96 L38.1992701,120.357538 C37.2058367,124.66057 32.9081703,127.35358 28.6031325,126.359684 C24.3068546,125.36781 21.6145865,121.067111 22.6093491,116.758321 L27.401785,96 L23.9992458,96 C19.5881049,96 16,92.418278 16,88 C16,83.5907123 19.5813843,80 23.9992458,80 L31.0956761,80 L38.4834582,48 L23.9992458,48 C19.5881049,48 16,44.418278 16,40 C16,35.5907123 19.5813843,32 23.9992458,32 L42.1773493,32 L47.8007299,7.64246248 C48.7941633,3.33942958 53.0918297,0.64641956 57.3968675,1.64031585 C61.6931454,2.63218977 64.3854135,6.93288901 63.3906509,11.2416793 L58.598215,32 L84.1773493,32 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/permaview-button": {
"title": "$:/core/images/permaview-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-permaview-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M81.4834582,48 L79.6365127,56 L79.6365127,56 L74.0573784,56 L75.9043239,48 L81.4834582,48 Z M85.1773493,32 L90.8007299,7.64246248 C91.7941633,3.33942958 96.0918297,0.64641956 100.396867,1.64031585 C104.693145,2.63218977 107.385414,6.93288901 106.390651,11.2416793 L101.598215,32 L104.000754,32 C108.411895,32 112,35.581722 112,40 C112,44.4092877 108.418616,48 104.000754,48 L97.9043239,48 L96.0573784,56 L104.000754,56 C108.411895,56 112,59.581722 112,64 C112,68.4092877 108.418616,72 104.000754,72 L92.3634873,72 L90.5165418,80 L104.000754,80 C108.411895,80 112,83.581722 112,88 C112,92.4092877 108.418616,96 104.000754,96 L86.8226507,96 L81.1992701,120.357538 C80.2058367,124.66057 75.9081703,127.35358 71.6031325,126.359684 C67.3068546,125.36781 64.6145865,121.067111 65.6093491,116.758321 L70.401785,96 L64.8226507,96 L59.1992701,120.357538 C58.2058367,124.66057 53.9081703,127.35358 49.6031325,126.359684 C45.3068546,125.36781 42.6145865,121.067111 43.6093491,116.758321 L48.401785,96 L42.8226507,96 L37.1992701,120.357538 C36.2058367,124.66057 31.9081703,127.35358 27.6031325,126.359684 C23.3068546,125.36781 20.6145865,121.067111 21.6093491,116.758321 L26.401785,96 L23.9992458,96 C19.5881049,96 16,92.418278 16,88 C16,83.5907123 19.5813843,80 23.9992458,80 L30.0956761,80 L31.9426216,72 L23.9992458,72 C19.5881049,72 16,68.418278 16,64 C16,59.5907123 19.5813843,56 23.9992458,56 L35.6365127,56 L37.4834582,48 L23.9992458,48 C19.5881049,48 16,44.418278 16,40 C16,35.5907123 19.5813843,32 23.9992458,32 L41.1773493,32 L46.8007299,7.64246248 C47.7941633,3.33942958 52.0918297,0.64641956 56.3968675,1.64031585 C60.6931454,2.63218977 63.3854135,6.93288901 62.3906509,11.2416793 L57.598215,32 L63.1773493,32 L68.8007299,7.64246248 C69.7941633,3.33942958 74.0918297,0.64641956 78.3968675,1.64031585 C82.6931454,2.63218977 85.3854135,6.93288901 84.3906509,11.2416793 L79.598215,32 L85.1773493,32 Z M53.9043239,48 L52.0573784,56 L57.6365127,56 L59.4834582,48 L53.9043239,48 Z M75.9426216,72 L74.0956761,80 L74.0956761,80 L68.5165418,80 L70.3634873,72 L75.9426216,72 L75.9426216,72 Z M48.3634873,72 L46.5165418,80 L52.0956761,80 L53.9426216,72 L48.3634873,72 L48.3634873,72 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/picture": {
"title": "$:/core/images/picture",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-picture tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M112,68.2332211 L112,20.0027785 C112,17.7898769 110.207895,16 107.997221,16 L20.0027785,16 C17.7898769,16 16,17.792105 16,20.0027785 L16,58.312373 L25.2413115,43.7197989 C28.041793,39.297674 34.2643908,38.7118128 37.8410347,42.5335275 L56.0882845,63.1470817 L69.7748997,56.7400579 C72.766567,55.3552503 76.3013751,55.9473836 78.678437,58.2315339 C78.8106437,58.3585731 79.0742301,58.609836 79.4527088,58.9673596 C80.0910923,59.570398 80.8117772,60.2441563 81.598127,60.9705595 C83.8422198,63.043576 86.1541548,65.1151944 88.3956721,67.0372264 C89.1168795,67.6556396 89.8200801,68.2492007 90.5021258,68.8146755 C92.6097224,70.5620551 94.4693308,72.0029474 95.9836366,73.0515697 C96.7316295,73.5695379 97.3674038,73.9719282 98.0281481,74.3824999 C98.4724987,74.4989557 99.0742374,74.5263881 99.8365134,74.4317984 C101.709944,74.1993272 104.074502,73.2878514 106.559886,71.8846196 C107.705822,71.2376318 108.790494,70.5370325 109.764561,69.8410487 C110.323259,69.4418522 110.694168,69.1550757 110.834827,69.0391868 C111.210545,68.7296319 111.600264,68.4615815 112,68.2332211 L112,68.2332211 Z M0,8.00697327 C0,3.58484404 3.59075293,0 8.00697327,0 L119.993027,0 C124.415156,0 128,3.59075293 128,8.00697327 L128,119.993027 C128,124.415156 124.409247,128 119.993027,128 L8.00697327,128 C3.58484404,128 0,124.409247 0,119.993027 L0,8.00697327 L0,8.00697327 Z M95,42 C99.418278,42 103,38.418278 103,34 C103,29.581722 99.418278,26 95,26 C90.581722,26 87,29.581722 87,34 C87,38.418278 90.581722,42 95,42 L95,42 Z M32,76 C47.8587691,80.8294182 52.0345556,83.2438712 52.0345556,88 C52.0345556,92.7561288 32,95.4712486 32,102.347107 C32,109.222965 33.2849191,107.337637 33.2849191,112 L67.999999,112 C67.999999,112 54.3147136,105.375255 54.3147136,101.200691 C54.3147136,93.535181 64.9302432,92.860755 64.9302432,88 C64.9302432,80.6425555 50.8523779,79.167282 32,76 L32,76 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-language": {
"title": "$:/core/images/plugin-generic-language",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M61.2072232,68.1369825 C56.8829239,70.9319564 54.2082892,74.793177 54.2082892,79.0581634 C54.2082892,86.9638335 63.3980995,93.4821994 75.2498076,94.3940006 C77.412197,98.2964184 83.8475284,101.178858 91.5684735,101.403106 C86.4420125,100.27851 82.4506393,97.6624107 80.9477167,94.3948272 C92.8046245,93.4861461 102,86.9662269 102,79.0581634 C102,70.5281905 91.3014611,63.6132813 78.1041446,63.6132813 C71.5054863,63.6132813 65.5315225,65.3420086 61.2072232,68.1369825 Z M74.001066,53.9793443 C69.6767667,56.7743182 63.7028029,58.5030456 57.1041446,58.5030456 C54.4851745,58.5030456 51.9646095,58.2307276 49.6065315,57.7275105 C46.2945155,59.9778212 41.2235699,61.4171743 35.5395922,61.4171743 C35.4545771,61.4171743 35.3696991,61.4168523 35.2849622,61.4162104 C39.404008,60.5235193 42.7961717,58.6691298 44.7630507,56.286533 C37.8379411,53.5817651 33.2082892,48.669413 33.2082892,43.0581634 C33.2082892,34.5281905 43.9068281,27.6132812 57.1041446,27.6132812 C70.3014611,27.6132812 81,34.5281905 81,43.0581634 C81,47.3231498 78.3253653,51.1843704 74.001066,53.9793443 Z M64,0 L118.5596,32 L118.5596,96 L64,128 L9.44039956,96 L9.44039956,32 L64,0 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-plugin": {
"title": "$:/core/images/plugin-generic-plugin",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M40.3972881,76.4456988 L40.3972881,95.3404069 L54.5170166,95.3404069 L54.5170166,95.3404069 C54.5165526,95.3385183 54.516089,95.3366295 54.515626,95.3347404 C54.6093153,95.3385061 54.7034848,95.3404069 54.7980982,95.3404069 C58.6157051,95.3404069 61.710487,92.245625 61.710487,88.4280181 C61.710487,86.6197822 61.01617,84.9737128 59.8795929,83.7418666 L59.8795929,83.7418666 C59.8949905,83.7341665 59.9104102,83.7265043 59.925852,83.7188798 C58.8840576,82.5086663 58.2542926,80.9336277 58.2542926,79.2114996 C58.2542926,75.3938927 61.3490745,72.2991108 65.1666814,72.2991108 C68.9842884,72.2991108 72.0790703,75.3938927 72.0790703,79.2114996 C72.0790703,81.1954221 71.2432806,82.9841354 69.9045961,84.2447446 L69.9045961,84.2447446 C69.9333407,84.2629251 69.9619885,84.281245 69.9905383,84.2997032 L69.9905383,84.2997032 C69.1314315,85.4516923 68.6228758,86.8804654 68.6228758,88.4280181 C68.6228758,91.8584969 71.1218232,94.7053153 74.3986526,95.2474079 C74.3913315,95.2784624 74.3838688,95.3094624 74.3762652,95.3404069 L95.6963988,95.3404069 L95.6963988,75.5678578 L95.6963988,75.5678578 C95.6466539,75.5808558 95.5967614,75.5934886 95.5467242,75.6057531 C95.5504899,75.5120637 95.5523907,75.4178943 95.5523907,75.3232809 C95.5523907,71.505674 92.4576088,68.4108921 88.6400019,68.4108921 C86.831766,68.4108921 85.1856966,69.105209 83.9538504,70.2417862 L83.9538504,70.2417862 C83.9461503,70.2263886 83.938488,70.2109688 83.9308636,70.1955271 C82.7206501,71.2373215 81.1456115,71.8670865 79.4234834,71.8670865 C75.6058765,71.8670865 72.5110946,68.7723046 72.5110946,64.9546976 C72.5110946,61.1370907 75.6058765,58.0423088 79.4234834,58.0423088 C81.4074059,58.0423088 83.1961192,58.8780985 84.4567284,60.2167829 L84.4567284,60.2167829 C84.4749089,60.1880383 84.4932288,60.1593906 84.511687,60.1308407 L84.511687,60.1308407 C85.6636761,60.9899475 87.0924492,61.4985032 88.6400019,61.4985032 C92.0704807,61.4985032 94.9172991,58.9995558 95.4593917,55.7227265 C95.538755,55.7414363 95.6177614,55.761071 95.6963988,55.7816184 L95.6963988,40.0412962 L74.3762652,40.0412962 L74.3762652,40.0412962 C74.3838688,40.0103516 74.3913315,39.9793517 74.3986526,39.9482971 L74.3986526,39.9482971 C71.1218232,39.4062046 68.6228758,36.5593862 68.6228758,33.1289073 C68.6228758,31.5813547 69.1314315,30.1525815 69.9905383,29.0005925 C69.9619885,28.9821342 69.9333407,28.9638143 69.9045961,28.9456339 C71.2432806,27.6850247 72.0790703,25.8963113 72.0790703,23.9123888 C72.0790703,20.0947819 68.9842884,17 65.1666814,17 C61.3490745,17 58.2542926,20.0947819 58.2542926,23.9123888 C58.2542926,25.6345169 58.8840576,27.2095556 59.925852,28.419769 L59.925852,28.419769 C59.9104102,28.4273935 59.8949905,28.4350558 59.8795929,28.4427558 C61.01617,29.674602 61.710487,31.3206715 61.710487,33.1289073 C61.710487,36.9465143 58.6157051,40.0412962 54.7980982,40.0412962 C54.7034848,40.0412962 54.6093153,40.0393953 54.515626,40.0356296 L54.515626,40.0356296 C54.516089,40.0375187 54.5165526,40.0394075 54.5170166,40.0412962 L40.3972881,40.0412962 L40.3972881,52.887664 L40.3972881,52.887664 C40.4916889,53.3430132 40.5412962,53.8147625 40.5412962,54.2980982 C40.5412962,58.1157051 37.4465143,61.210487 33.6289073,61.210487 C32.0813547,61.210487 30.6525815,60.7019313 29.5005925,59.8428245 C29.4821342,59.8713744 29.4638143,59.9000221 29.4456339,59.9287667 C28.1850247,58.5900823 26.3963113,57.7542926 24.4123888,57.7542926 C20.5947819,57.7542926 17.5,60.8490745 17.5,64.6666814 C17.5,68.4842884 20.5947819,71.5790703 24.4123888,71.5790703 C26.134517,71.5790703 27.7095556,70.9493053 28.919769,69.9075109 L28.919769,69.9075109 C28.9273935,69.9229526 28.9350558,69.9383724 28.9427558,69.95377 C30.174602,68.8171928 31.8206715,68.1228758 33.6289073,68.1228758 C37.4465143,68.1228758 40.5412962,71.2176578 40.5412962,75.0352647 C40.5412962,75.5186004 40.4916889,75.9903496 40.3972881,76.4456988 Z M64,0 L118.5596,32 L118.5596,96 L64,128 L9.44039956,96 L9.44039956,32 L64,0 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-theme": {
"title": "$:/core/images/plugin-generic-theme",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M29.4078519,91.4716406 L51.4693474,69.4101451 L51.4646675,69.4054652 C50.5969502,68.5377479 50.5929779,67.1348725 51.4693474,66.2585029 C52.3396494,65.3882009 53.7499654,65.3874786 54.6163097,66.2538229 L64.0805963,75.7181095 C64.9483136,76.5858268 64.9522859,77.9887022 64.0759163,78.8650718 C63.2056143,79.7353737 61.7952984,79.736096 60.9289541,78.8697517 L60.9242741,78.8650718 L60.9242741,78.8650718 L38.8627786,100.926567 C36.2518727,103.537473 32.0187578,103.537473 29.4078519,100.926567 C26.796946,98.3156614 26.796946,94.0825465 29.4078519,91.4716406 Z M60.8017407,66.3810363 C58.3659178,63.6765806 56.3370667,61.2899536 54.9851735,59.5123615 C48.1295381,50.4979488 44.671561,55.2444054 40.7586738,59.5123614 C36.8457866,63.7803174 41.789473,67.2384487 38.0759896,70.2532832 C34.3625062,73.2681177 34.5917646,74.3131575 28.3243876,68.7977024 C22.0570105,63.2822473 21.6235306,61.7636888 24.5005999,58.6166112 C27.3776691,55.4695337 29.7823103,60.4247912 35.6595047,54.8320442 C41.5366991,49.2392972 36.5996215,44.2825646 36.5996215,44.2825646 C36.5996215,44.2825646 48.8365511,19.267683 65.1880231,21.1152173 C81.5394952,22.9627517 59.0022276,18.7228947 53.3962199,38.3410355 C50.9960082,46.7405407 53.8429162,44.7613399 58.3941742,48.3090467 C59.7875202,49.3951602 64.4244828,52.7100463 70.1884353,56.9943417 L90.8648751,36.3179019 L92.4795866,31.5515482 L100.319802,26.8629752 L103.471444,30.0146174 L98.782871,37.8548326 L94.0165173,39.4695441 L73.7934912,59.6925702 C86.4558549,69.2403631 102.104532,81.8392557 102.104532,86.4016913 C102.104533,93.6189834 99.0337832,97.9277545 92.5695848,95.5655717 C87.8765989,93.8506351 73.8015497,80.3744087 63.8173444,69.668717 L60.9242741,72.5617873 L57.7726319,69.4101451 L60.8017407,66.3810363 L60.8017407,66.3810363 Z M63.9533761,1.42108547e-13 L118.512977,32 L118.512977,96 L63.9533761,128 L9.39377563,96 L9.39377563,32 L63.9533761,1.42108547e-13 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/preview-closed": {
"title": "$:/core/images/preview-closed",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-preview-closed tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M0.0881363238,64 C-0.210292223,65.8846266 0.249135869,67.8634737 1.4664206,69.4579969 C16.2465319,88.8184886 39.1692554,100.414336 64,100.414336 C88.8307446,100.414336 111.753468,88.8184886 126.533579,69.4579969 C127.750864,67.8634737 128.210292,65.8846266 127.911864,64 C110.582357,78.4158332 88.3036732,87.0858436 64,87.0858436 C39.6963268,87.0858436 17.4176431,78.4158332 0.0881363238,64 Z\"></path>\n <rect x=\"62\" y=\"96\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(80.000000, 101.000000) rotate(-5.000000) translate(-80.000000, -101.000000) \" x=\"78\" y=\"93\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(48.000000, 101.000000) rotate(-355.000000) translate(-48.000000, -101.000000) \" x=\"46\" y=\"93\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(32.000000, 96.000000) rotate(-350.000000) translate(-32.000000, -96.000000) \" x=\"30\" y=\"88\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(96.000000, 96.000000) rotate(-10.000000) translate(-96.000000, -96.000000) \" x=\"94\" y=\"88\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(112.000000, 88.000000) rotate(-20.000000) translate(-112.000000, -88.000000) \" x=\"110\" y=\"80\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(16.000000, 88.000000) rotate(-340.000000) translate(-16.000000, -88.000000) \" x=\"14\" y=\"80\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n </g>\n</svg>"
},
"$:/core/images/preview-open": {
"title": "$:/core/images/preview-open",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-preview-open tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64.1099282,99.5876785 C39.2791836,99.5876785 16.3564602,87.9918313 1.57634884,68.6313396 C-0.378878622,66.070184 -0.378878622,62.5174945 1.57634884,59.9563389 C16.3564602,40.5958472 39.2791836,29 64.1099282,29 C88.9406729,29 111.863396,40.5958472 126.643508,59.9563389 C128.598735,62.5174945 128.598735,66.070184 126.643508,68.6313396 C111.863396,87.9918313 88.9406729,99.5876785 64.1099282,99.5876785 Z M110.213805,67.5808331 C111.654168,66.0569335 111.654168,63.9430665 110.213805,62.4191669 C99.3257042,50.8995835 82.4391647,44 64.1470385,44 C45.8549124,44 28.9683729,50.8995835 18.0802717,62.4191669 C16.6399094,63.9430665 16.6399094,66.0569335 18.0802717,67.5808331 C28.9683729,79.1004165 45.8549124,86 64.1470385,86 C82.4391647,86 99.3257042,79.1004165 110.213805,67.5808331 Z\"></path>\n <path d=\"M63.5,88 C76.4786916,88 87,77.4786916 87,64.5 C87,51.5213084 76.4786916,41 63.5,41 C50.5213084,41 40,51.5213084 40,64.5 C40,77.4786916 50.5213084,88 63.5,88 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/quote": {
"title": "$:/core/images/quote",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-quote tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M51.2188077,117.712501 L51.2188077,62.1993386 L27.4274524,62.1993386 C27.4274524,53.3075754 29.1096526,45.797753 32.4741035,39.669646 C35.8385544,33.541539 42.0867267,28.9154883 51.2188077,25.7913554 L51.2188077,2 C43.7689521,2.96127169 36.8599155,5.18417913 30.4914905,8.668789 C24.1230656,12.1533989 18.6559149,16.5391352 14.0898743,21.8261295 C9.52383382,27.1131238 5.97919764,33.2411389 3.45585945,40.2103586 C0.932521268,47.1795784 -0.208971741,54.6293222 0.0313461819,62.5598136 L0.0313461819,117.712501 L51.2188077,117.712501 Z M128,117.712501 L128,62.1993386 L104.208645,62.1993386 C104.208645,53.3075754 105.890845,45.797753 109.255296,39.669646 C112.619747,33.541539 118.867919,28.9154883 128,25.7913554 L128,2 C120.550144,2.96127169 113.641108,5.18417913 107.272683,8.668789 C100.904258,12.1533989 95.4371072,16.5391352 90.8710666,21.8261295 C86.3050261,27.1131238 82.7603899,33.2411389 80.2370517,40.2103586 C77.7137136,47.1795784 76.5722206,54.6293222 76.8125385,62.5598136 L76.8125385,117.712501 L128,117.712501 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/refresh-button": {
"title": "$:/core/images/refresh-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-refresh-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M106.369002,39.4325143 C116.529932,60.3119371 112.939592,86.1974934 95.5979797,103.539105 C73.7286194,125.408466 38.2713806,125.408466 16.4020203,103.539105 C-5.46734008,81.6697449 -5.46734008,46.2125061 16.4020203,24.3431458 C19.5262146,21.2189514 24.5915344,21.2189514 27.7157288,24.3431458 C30.8399231,27.4673401 30.8399231,32.5326599 27.7157288,35.6568542 C12.0947571,51.2778259 12.0947571,76.6044251 27.7157288,92.2253967 C43.3367004,107.846368 68.6632996,107.846368 84.2842712,92.2253967 C97.71993,78.7897379 99.5995262,58.1740623 89.9230597,42.729491 L83.4844861,54.9932839 C81.4307001,58.9052072 76.5945372,60.4115251 72.682614,58.3577391 C68.7706907,56.3039532 67.2643728,51.4677903 69.3181587,47.555867 L84.4354914,18.7613158 C86.4966389,14.8353707 91.3577499,13.3347805 95.273202,15.415792 L124.145886,30.7612457 C128.047354,32.8348248 129.52915,37.6785572 127.455571,41.5800249 C125.381992,45.4814927 120.53826,46.9632892 116.636792,44.8897102 L106.369002,39.4325143 Z M98.1470904,27.0648707 C97.9798954,26.8741582 97.811187,26.6843098 97.6409651,26.4953413 L98.6018187,26.1987327 L98.1470904,27.0648707 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/right-arrow": {
"title": "$:/core/images/right-arrow",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-right-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M80.3563798,109.353315 C78.9238993,110.786918 76.9450203,111.675144 74.7592239,111.675144 L-4.40893546,111.675144 C-8.77412698,111.675144 -12.3248558,108.130732 -12.3248558,103.758478 C-12.3248558,99.3951199 -8.78077754,95.8418109 -4.40893546,95.8418109 L66.8418109,95.8418109 L66.8418109,24.5910645 C66.8418109,20.225873 70.3862233,16.6751442 74.7584775,16.6751442 C79.1218352,16.6751442 82.6751442,20.2192225 82.6751442,24.5910645 L82.6751442,103.759224 C82.6751442,105.941695 81.7891419,107.920575 80.3566508,109.353886 Z\" transform=\"translate(35.175144, 64.175144) rotate(-45.000000) translate(-35.175144, -64.175144) \"></path>\n</svg>"
},
"$:/core/images/save-button": {
"title": "$:/core/images/save-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-save-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M120.78304,34.329058 C125.424287,43.1924006 128.049406,53.2778608 128.049406,63.9764502 C128.049406,99.3226742 99.3956295,127.97645 64.0494055,127.97645 C28.7031816,127.97645 0.0494055385,99.3226742 0.0494055385,63.9764502 C0.0494055385,28.6302262 28.7031816,-0.0235498012 64.0494055,-0.0235498012 C82.8568763,-0.0235498012 99.769563,8.08898558 111.479045,21.0056358 L114.159581,18.3250998 C117.289194,15.1954866 122.356036,15.1939641 125.480231,18.3181584 C128.598068,21.4359957 128.601317,26.5107804 125.473289,29.6388083 L120.78304,34.329058 Z M108.72451,46.3875877 C110.870571,51.8341374 112.049406,57.767628 112.049406,63.9764502 C112.049406,90.4861182 90.5590735,111.97645 64.0494055,111.97645 C37.5397375,111.97645 16.0494055,90.4861182 16.0494055,63.9764502 C16.0494055,37.4667822 37.5397375,15.9764502 64.0494055,15.9764502 C78.438886,15.9764502 91.3495036,22.308215 100.147097,32.3375836 L58.9411255,73.5435552 L41.975581,56.5780107 C38.8486152,53.4510448 33.7746915,53.4551552 30.6568542,56.5729924 C27.5326599,59.6971868 27.5372202,64.7670668 30.6618725,67.8917192 L53.279253,90.5090997 C54.8435723,92.073419 56.8951519,92.8541315 58.9380216,92.8558261 C60.987971,92.8559239 63.0389578,92.0731398 64.6049211,90.5071765 L108.72451,46.3875877 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/size": {
"title": "$:/core/images/size",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-size tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M92.3431458,26 L83.1715729,35.1715729 C81.6094757,36.73367 81.6094757,39.26633 83.1715729,40.8284271 C84.73367,42.3905243 87.26633,42.3905243 88.8284271,40.8284271 L104.828427,24.8284271 C106.390524,23.26633 106.390524,20.73367 104.828427,19.1715729 L88.8284271,3.17157288 C87.26633,1.60947571 84.73367,1.60947571 83.1715729,3.17157288 C81.6094757,4.73367004 81.6094757,7.26632996 83.1715729,8.82842712 L92.3431457,18 L22,18 C19.790861,18 18,19.790861 18,22 L18,92.3431458 L8.82842712,83.1715729 C7.26632996,81.6094757 4.73367004,81.6094757 3.17157288,83.1715729 C1.60947571,84.73367 1.60947571,87.26633 3.17157288,88.8284271 L19.1715729,104.828427 C20.73367,106.390524 23.26633,106.390524 24.8284271,104.828427 L40.8284271,88.8284271 C42.3905243,87.26633 42.3905243,84.73367 40.8284271,83.1715729 C39.26633,81.6094757 36.73367,81.6094757 35.1715729,83.1715729 L26,92.3431458 L26,22 L22,26 L92.3431458,26 L92.3431458,26 Z M112,52 L112,116 L116,112 L52,112 C49.790861,112 48,113.790861 48,116 C48,118.209139 49.790861,120 52,120 L116,120 C118.209139,120 120,118.209139 120,116 L120,52 C120,49.790861 118.209139,48 116,48 C113.790861,48 112,49.790861 112,52 L112,52 Z\"></path>\n</svg>"
},
"$:/core/images/spiral": {
"title": "$:/core/images/spiral",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-spiral tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M64.534 68.348c3.39 0 6.097-2.62 6.476-5.968l-4.755-.538 4.75.583c.377-3.07-1.194-6.054-3.89-7.78-2.757-1.773-6.34-2.01-9.566-.7-3.46 1.403-6.14 4.392-7.35 8.148l-.01.026c-1.3 4.08-.72 8.64 1.58 12.52 2.5 4.2 6.77 7.2 11.76 8.27 5.37 1.15 11.11-.05 15.83-3.31 5.04-3.51 8.46-9.02 9.45-15.3 1.05-6.7-.72-13.63-4.92-19.19l.02.02c-4.42-5.93-11.2-9.82-18.78-10.78-7.96-1.01-16.13 1.31-22.59 6.43-6.81 5.39-11.18 13.41-12.11 22.26-.98 9.27 1.87 18.65 7.93 26.02 6.32 7.69 15.6 12.56 25.74 13.48 10.54.96 21.15-2.42 29.45-9.4l.01-.01c8.58-7.25 13.94-17.78 14.86-29.21.94-11.84-2.96-23.69-10.86-32.9-8.19-9.5-19.95-15.36-32.69-16.27-13.16-.94-26.24 3.49-36.34 12.34l.01-.01c-10.41 9.08-16.78 22.1-17.68 36.15-.93 14.44 4.03 28.77 13.79 39.78 10.03 11.32 24.28 18.2 39.6 19.09 15.73.92 31.31-4.56 43.24-15.234 12.23-10.954 19.61-26.44 20.5-43.074.14-2.64-1.89-4.89-4.52-5.03-2.64-.14-4.89 1.88-5.03 4.52-.75 14.1-7 27.2-17.33 36.45-10.03 8.98-23.11 13.58-36.3 12.81-12.79-.75-24.67-6.48-33-15.89-8.07-9.11-12.17-20.94-11.41-32.827.74-11.52 5.942-22.15 14.43-29.54l.01-.01c8.18-7.17 18.74-10.75 29.35-9.998 10.21.726 19.6 5.41 26.11 12.96 6.24 7.273 9.32 16.61 8.573 25.894-.718 8.9-4.88 17.064-11.504 22.66l.01-.007c-6.36 5.342-14.44 7.92-22.425 7.19-7.604-.68-14.52-4.314-19.21-10.027-4.44-5.4-6.517-12.23-5.806-18.94.67-6.3 3.76-11.977 8.54-15.766 4.46-3.54 10.05-5.128 15.44-4.44 5.03.63 9.46 3.18 12.32 7.01l.02.024c2.65 3.5 3.75 7.814 3.1 11.92-.59 3.71-2.58 6.925-5.45 8.924-2.56 1.767-5.61 2.403-8.38 1.81-2.42-.516-4.42-1.92-5.53-3.79-.93-1.56-1.15-3.3-.69-4.75l-4.56-1.446L59.325 65c.36-1.12 1.068-1.905 1.84-2.22.25-.103.48-.14.668-.13.06.006.11.015.14.025.01 0 .01 0-.01-.01-.02-.015-.054-.045-.094-.088-.06-.064-.12-.145-.17-.244-.15-.29-.23-.678-.18-1.11l-.005.04c.15-1.332 1.38-2.523 3.035-2.523-2.65 0-4.79 2.144-4.79 4.787s2.14 4.785 4.78 4.785z\"></path>\n </g>\n</svg>"
},
"$:/core/images/stamp": {
"title": "$:/core/images/stamp",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-stamp tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M49.7334301,64 L16.0098166,64 C11.5838751,64 8,67.5829053 8,72.002643 L8,74.4986785 L8,97 L120,97 L120,74.4986785 L120,72.002643 C120,67.5737547 116.413883,64 111.990183,64 L78.2665699,64 C76.502049,60.7519149 75.5,57.0311962 75.5,53.0769231 C75.5,46.6017951 78.1869052,40.7529228 82.5087769,36.5800577 C85.3313113,32.7688808 87,28.0549983 87,22.952183 C87,10.2760423 76.7025492,0 64,0 C51.2974508,0 41,10.2760423 41,22.952183 C41,28.0549983 42.6686887,32.7688808 45.4912231,36.5800577 C49.8130948,40.7529228 52.5,46.6017951 52.5,53.0769231 C52.5,57.0311962 51.497951,60.7519149 49.7334301,64 Z M8,104 L120,104 L120,112 L8,112 L8,104 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/star-filled": {
"title": "$:/core/images/star-filled",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-star-filled tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M61.8361286,96.8228569 L99.1627704,124.110219 C101.883827,126.099427 105.541968,123.420868 104.505636,120.198072 L90.2895569,75.9887263 L89.0292911,79.8977279 L126.314504,52.5528988 C129.032541,50.5595011 127.635256,46.2255025 124.273711,46.2229134 L78.1610486,46.1873965 L81.4604673,48.6032923 L67.1773543,4.41589688 C66.1361365,1.19470104 61.6144265,1.19470104 60.5732087,4.41589688 L46.2900957,48.6032923 L49.5895144,46.1873965 L3.47685231,46.2229134 C0.115307373,46.2255025 -1.28197785,50.5595011 1.43605908,52.5528988 L38.7212719,79.8977279 L37.4610061,75.9887263 L23.2449266,120.198072 C22.2085954,123.420868 25.8667356,126.099427 28.5877926,124.110219 L65.9144344,96.8228569 L61.8361286,96.8228569 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-classic": {
"title": "$:/core/images/storyview-classic",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-classic tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.5776607 16,23.9924054 L16,40.0075946 C16,44.4216782 19.5881049,48 23.9992458,48 L104.000754,48 C108.418616,48 112,44.4223393 112,40.0075946 L112,23.9924054 C112,19.5783218 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z M23.9992458,64 C19.5813843,64 16,67.5907123 16,72 C16,76.418278 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.4092877 112,72 C112,67.581722 108.411895,64 104.000754,64 L23.9992458,64 L23.9992458,64 Z M23.9992458,96 C19.5813843,96 16,99.5907123 16,104 C16,108.418278 19.5881049,112 23.9992458,112 L104.000754,112 C108.418616,112 112,108.409288 112,104 C112,99.581722 108.411895,96 104.000754,96 L23.9992458,96 L23.9992458,96 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-pop": {
"title": "$:/core/images/storyview-pop",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-pop tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.5776607 16,23.9924054 L16,40.0075946 C16,44.4216782 19.5881049,48 23.9992458,48 L104.000754,48 C108.418616,48 112,44.4223393 112,40.0075946 L112,23.9924054 C112,19.5783218 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z M16.0098166,56 C11.586117,56 8,59.5776607 8,63.9924054 L8,80.0075946 C8,84.4216782 11.5838751,88 16.0098166,88 L111.990183,88 C116.413883,88 120,84.4223393 120,80.0075946 L120,63.9924054 C120,59.5783218 116.416125,56 111.990183,56 L16.0098166,56 L16.0098166,56 Z M23.9992458,96 C19.5813843,96 16,99.5907123 16,104 C16,108.418278 19.5881049,112 23.9992458,112 L104.000754,112 C108.418616,112 112,108.409288 112,104 C112,99.581722 108.411895,96 104.000754,96 L23.9992458,96 L23.9992458,96 Z M23.9992458,64 C19.5813843,64 16,67.5907123 16,72 C16,76.418278 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.4092877 112,72 C112,67.581722 108.411895,64 104.000754,64 L23.9992458,64 L23.9992458,64 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-zoomin": {
"title": "$:/core/images/storyview-zoomin",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-zoomin tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.578055 16,24.0085154 L16,71.9914846 C16,76.4144655 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.421945 112,71.9914846 L112,24.0085154 C112,19.5855345 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/strikethrough": {
"title": "$:/core/images/strikethrough",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-strikethrough tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M92.793842,38.7255689 L108.215529,38.7255689 C107.987058,31.985687 106.70193,26.1883331 104.360107,21.3333333 C102.018284,16.4783336 98.8197436,12.4516001 94.7643909,9.25301205 C90.7090382,6.05442399 85.9969032,3.71263572 80.6278447,2.22757697 C75.2587862,0.742518233 69.4328739,0 63.1499331,0 C57.552404,0 52.0977508,0.713959839 46.7858099,2.14190094 C41.473869,3.56984203 36.7331757,5.74027995 32.5635877,8.65327979 C28.3939997,11.5662796 25.0526676,15.2788708 22.5394913,19.7911647 C20.026315,24.3034585 18.7697456,29.6438781 18.7697456,35.8125837 C18.7697456,41.4101128 19.883523,46.0651309 22.1111111,49.7777778 C24.3386992,53.4904246 27.3087722,56.5176144 31.021419,58.8594378 C34.7340659,61.2012612 38.9321497,63.0861151 43.6157965,64.5140562 C48.2994433,65.9419973 53.068695,67.1985666 57.9236948,68.2838019 C62.7786945,69.3690371 67.5479462,70.4256977 72.231593,71.4538153 C76.9152398,72.4819329 81.1133237,73.8241773 84.8259705,75.480589 C88.5386174,77.1370007 91.5086903,79.2788802 93.7362784,81.9062918 C95.9638666,84.5337035 97.0776439,87.9607107 97.0776439,92.1874163 C97.0776439,96.6425926 96.1637753,100.298067 94.3360107,103.153949 C92.5082461,106.009831 90.109341,108.265944 87.1392236,109.922356 C84.1691061,111.578768 80.827774,112.749662 77.1151272,113.435074 C73.4024803,114.120485 69.7184476,114.463186 66.0629183,114.463186 C61.4935068,114.463186 57.0383974,113.892018 52.6974565,112.749665 C48.3565156,111.607312 44.5582492,109.836692 41.3025435,107.437751 C38.0468378,105.03881 35.4194656,101.983062 33.4203481,98.270415 C31.4212305,94.5577681 30.4216867,90.1312171 30.4216867,84.9906292 L15,84.9906292 C15,92.4159229 16.3422445,98.8415614 19.0267738,104.267738 C21.711303,109.693914 25.3667774,114.149023 29.9933066,117.633199 C34.6198357,121.117376 39.9888137,123.71619 46.1004016,125.429719 C52.2119895,127.143248 58.6947448,128 65.5488621,128 C71.1463912,128 76.7723948,127.343157 82.4270415,126.029451 C88.0816882,124.715745 93.1936407,122.602424 97.7630522,119.689424 C102.332464,116.776425 106.073613,113.006717 108.986613,108.380187 C111.899613,103.753658 113.356091,98.1847715 113.356091,91.6733601 C113.356091,85.6188899 112.242314,80.5926126 110.014726,76.5943775 C107.787137,72.5961424 104.817065,69.2833688 101.104418,66.6559572 C97.3917708,64.0285455 93.193687,61.9437828 88.5100402,60.4016064 C83.8263934,58.85943 79.0571416,57.5171855 74.2021419,56.3748327 C69.3471422,55.2324798 64.5778904,54.1758192 59.8942436,53.2048193 C55.2105968,52.2338193 51.012513,51.0058084 47.2998661,49.5207497 C43.5872193,48.0356909 40.6171463,46.1222786 38.3895582,43.7804552 C36.1619701,41.4386318 35.0481928,38.3828836 35.0481928,34.6131191 C35.0481928,30.6148841 35.8192694,27.273552 37.3614458,24.5890228 C38.9036222,21.9044935 40.9598265,19.762614 43.5301205,18.1633199 C46.1004145,16.5640259 49.041929,15.4216902 52.3547523,14.7362784 C55.6675757,14.0508667 59.0374661,13.708166 62.4645248,13.708166 C70.9179361,13.708166 77.8576257,15.6786952 83.2838019,19.6198126 C88.709978,23.56093 91.8799597,29.9294518 92.793842,38.7255689 L92.793842,38.7255689 Z\"></path>\n <rect x=\"5\" y=\"54\" width=\"118\" height=\"16\"></rect>\n </g>\n</svg>"
},
"$:/core/images/subscript": {
"title": "$:/core/images/subscript",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-subscript tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M2.27170276,16 L22.1825093,16 L43.8305003,49.6746527 L66.4138983,16 L85.1220387,16 L53.5854592,61.9685735 L87.3937414,111.411516 L67.0820462,111.411516 L43.295982,74.9306422 L19.1090291,111.411516 L0,111.411516 L33.8082822,61.9685735 L2.27170276,16 Z M127.910914,128.411516 L85.3276227,128.411516 C85.3870139,123.24448 86.6342108,118.730815 89.0692508,114.870386 C91.5042907,111.009956 94.8301491,107.654403 99.0469256,104.803624 C101.066227,103.318844 103.174584,101.878629 105.372059,100.482935 C107.569534,99.0872413 109.588805,97.5876355 111.429933,95.9840726 C113.271061,94.3805097 114.785514,92.6433426 115.973338,90.7725192 C117.161163,88.9016958 117.784761,86.7487964 117.844152,84.3137564 C117.844152,83.1853233 117.710524,81.9826691 117.443264,80.7057579 C117.176003,79.4288467 116.656338,78.2410402 115.884252,77.1423026 C115.112166,76.0435651 114.04314,75.123015 112.677142,74.3806248 C111.311144,73.6382345 109.529434,73.267045 107.331959,73.267045 C105.312658,73.267045 103.634881,73.6679297 102.298579,74.4697112 C100.962276,75.2714926 99.8932503,76.3702137 99.0914688,77.7659073 C98.2896874,79.161601 97.6957841,80.8096826 97.3097412,82.7102016 C96.9236982,84.6107206 96.7009845,86.6596869 96.6415933,88.857162 L86.4857457,88.857162 C86.4857457,85.4124713 86.9460207,82.2202411 87.8665846,79.2803758 C88.7871485,76.3405105 90.1679736,73.801574 92.0091014,71.6634901 C93.8502292,69.5254062 96.092214,67.8476295 98.7351233,66.6301095 C101.378033,65.4125895 104.451482,64.8038386 107.955564,64.8038386 C111.756602,64.8038386 114.933984,65.4274371 117.487807,66.6746527 C120.041629,67.9218683 122.105443,69.4957119 123.67931,71.3962309 C125.253178,73.2967499 126.366746,75.3605638 127.02005,77.5877345 C127.673353,79.8149053 128,81.9381095 128,83.9574109 C128,86.4518421 127.613963,88.7086746 126.841877,90.727976 C126.069791,92.7472774 125.03046,94.6032252 123.723854,96.2958749 C122.417247,97.9885247 120.932489,99.5475208 119.269534,100.97291 C117.60658,102.398299 115.884261,103.734582 114.102524,104.981797 C112.320788,106.229013 110.539078,107.416819 108.757341,108.545253 C106.975605,109.673686 105.327523,110.802102 103.813047,111.930535 C102.298571,113.058968 100.977136,114.231927 99.8487031,115.449447 C98.7202699,116.666967 97.9481956,117.958707 97.5324571,119.324705 L127.910914,119.324705 L127.910914,128.411516 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/superscript": {
"title": "$:/core/images/superscript",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-superscript tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M2.27170276,16 L22.1825093,16 L43.8305003,49.6746527 L66.4138983,16 L85.1220387,16 L53.5854592,61.9685735 L87.3937414,111.411516 L67.0820462,111.411516 L43.295982,74.9306422 L19.1090291,111.411516 L0,111.411516 L33.8082822,61.9685735 L2.27170276,16 Z M127.910914,63.4115159 L85.3276227,63.4115159 C85.3870139,58.2444799 86.6342108,53.7308149 89.0692508,49.8703857 C91.5042907,46.0099565 94.8301491,42.654403 99.0469256,39.8036245 C101.066227,38.318844 103.174584,36.8786285 105.372059,35.4829349 C107.569534,34.0872413 109.588805,32.5876355 111.429933,30.9840726 C113.271061,29.3805097 114.785514,27.6433426 115.973338,25.7725192 C117.161163,23.9016958 117.784761,21.7487964 117.844152,19.3137564 C117.844152,18.1853233 117.710524,16.9826691 117.443264,15.7057579 C117.176003,14.4288467 116.656338,13.2410402 115.884252,12.1423026 C115.112166,11.0435651 114.04314,10.123015 112.677142,9.38062477 C111.311144,8.63823453 109.529434,8.26704499 107.331959,8.26704499 C105.312658,8.26704499 103.634881,8.6679297 102.298579,9.46971115 C100.962276,10.2714926 99.8932503,11.3702137 99.0914688,12.7659073 C98.2896874,14.161601 97.6957841,15.8096826 97.3097412,17.7102016 C96.9236982,19.6107206 96.7009845,21.6596869 96.6415933,23.857162 L86.4857457,23.857162 C86.4857457,20.4124713 86.9460207,17.2202411 87.8665846,14.2803758 C88.7871485,11.3405105 90.1679736,8.80157397 92.0091014,6.6634901 C93.8502292,4.52540622 96.092214,2.84762946 98.7351233,1.63010947 C101.378033,0.412589489 104.451482,-0.196161372 107.955564,-0.196161372 C111.756602,-0.196161372 114.933984,0.427437071 117.487807,1.67465266 C120.041629,2.92186826 122.105443,4.49571195 123.67931,6.39623095 C125.253178,8.29674995 126.366746,10.3605638 127.02005,12.5877345 C127.673353,14.8149053 128,16.9381095 128,18.9574109 C128,21.4518421 127.613963,23.7086746 126.841877,25.727976 C126.069791,27.7472774 125.03046,29.6032252 123.723854,31.2958749 C122.417247,32.9885247 120.932489,34.5475208 119.269534,35.97291 C117.60658,37.3982993 115.884261,38.7345816 114.102524,39.9817972 C112.320788,41.2290128 110.539078,42.4168194 108.757341,43.5452525 C106.975605,44.6736857 105.327523,45.8021019 103.813047,46.9305351 C102.298571,48.0589682 100.977136,49.2319272 99.8487031,50.4494472 C98.7202699,51.6669672 97.9481956,52.9587068 97.5324571,54.3247048 L127.910914,54.3247048 L127.910914,63.4115159 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/tag-button": {
"title": "$:/core/images/tag-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-tag-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M18.1643182,47.6600756 L18.1677196,51.7651887 C18.1708869,55.5878829 20.3581578,60.8623899 23.0531352,63.5573673 L84.9021823,125.406414 C87.5996731,128.103905 91.971139,128.096834 94.6717387,125.396234 L125.766905,94.3010679 C128.473612,91.5943612 128.472063,87.2264889 125.777085,84.5315115 L63.9280381,22.6824644 C61.2305472,19.9849735 55.9517395,17.801995 52.1318769,17.8010313 L25.0560441,17.7942007 C21.2311475,17.7932358 18.1421354,20.8872832 18.1452985,24.7049463 L18.1535504,34.6641936 C18.2481119,34.6754562 18.3439134,34.6864294 18.4409623,34.6971263 C22.1702157,35.1081705 26.9295004,34.6530132 31.806204,33.5444844 C32.1342781,33.0700515 32.5094815,32.6184036 32.9318197,32.1960654 C35.6385117,29.4893734 39.5490441,28.718649 42.94592,29.8824694 C43.0432142,29.8394357 43.1402334,29.7961748 43.2369683,29.7526887 L43.3646982,30.0368244 C44.566601,30.5115916 45.6933052,31.2351533 46.6655958,32.2074439 C50.4612154,36.0030635 50.4663097,42.1518845 46.6769742,45.94122 C43.0594074,49.5587868 37.2914155,49.7181264 33.4734256,46.422636 C28.1082519,47.5454734 22.7987486,48.0186448 18.1643182,47.6600756 Z\"></path>\n <path d=\"M47.6333528,39.5324628 L47.6562932,39.5834939 C37.9670934,43.9391617 26.0718874,46.3819521 17.260095,45.4107025 C5.27267473,44.0894301 -1.02778744,36.4307276 2.44271359,24.0779512 C5.56175386,12.9761516 14.3014034,4.36129832 24.0466405,1.54817001 C34.7269254,-1.53487574 43.7955833,3.51606438 43.7955834,14.7730751 L35.1728168,14.7730752 C35.1728167,9.91428944 32.0946059,8.19982862 26.4381034,9.83267419 C19.5270911,11.8276553 13.046247,18.2159574 10.7440788,26.4102121 C8.82861123,33.2280582 11.161186,36.0634845 18.2047888,36.8398415 C25.3302805,37.6252244 35.7353482,35.4884477 44.1208333,31.7188498 L44.1475077,31.7781871 C44.159701,31.7725635 44.1718402,31.7671479 44.1839238,31.7619434 C45.9448098,31.0035157 50.4503245,38.3109156 47.7081571,39.5012767 C47.6834429,39.512005 47.6585061,39.5223987 47.6333528,39.5324628 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/theme-button": {
"title": "$:/core/images/theme-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-theme-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M55.854113,66.9453198 C54.3299482,65.1432292 53.0133883,63.518995 51.9542746,62.1263761 C40.8899947,47.578055 35.3091807,55.2383404 28.9941893,62.1263758 C22.6791979,69.0144112 30.6577916,74.5954741 24.6646171,79.4611023 C18.6714426,84.3267304 19.0414417,86.0133155 8.92654943,77.1119468 C-1.18834284,68.2105781 -1.88793412,65.7597832 2.7553553,60.6807286 C7.39864472,55.601674 11.2794845,63.5989423 20.7646627,54.5728325 C30.2498409,45.5467226 22.2819131,37.5470737 22.2819131,37.5470737 C22.2819131,37.5470737 42.0310399,-2.82433362 68.4206088,0.157393922 C94.8101776,3.13912147 58.4373806,-3.70356506 49.3898693,27.958066 C45.5161782,41.5139906 50.1107906,38.3197672 57.4560458,44.0453955 C59.1625767,45.3756367 63.8839488,48.777453 70.127165,53.3625321 C63.9980513,59.2416709 58.9704753,64.0315459 55.854113,66.9453198 Z M67.4952439,79.8919946 C83.5082212,96.9282402 105.237121,117.617674 112.611591,120.312493 C123.044132,124.12481 128.000001,117.170903 128,105.522947 C127.999999,98.3705516 104.170675,78.980486 84.0760493,63.7529565 C76.6683337,70.9090328 70.7000957,76.7055226 67.4952439,79.8919946 Z\"></path>\n <path d=\"M58.2852966,138.232794 L58.2852966,88.3943645 C56.318874,88.3923153 54.7254089,86.7952906 54.7254089,84.8344788 C54.7254089,82.8684071 56.3175932,81.2745911 58.2890859,81.2745911 L79.6408336,81.2745911 C81.608998,81.2745911 83.2045105,82.8724076 83.2045105,84.8344788 C83.2045105,86.7992907 81.614366,88.3923238 79.6446228,88.3943645 L79.6446228,88.3943646 L79.6446228,138.232794 C79.6446228,144.131009 74.8631748,148.912457 68.9649597,148.912457 C63.0667446,148.912457 58.2852966,144.131009 58.2852966,138.232794 Z M65.405072,-14.8423767 L72.5248474,-14.8423767 L76.0847351,-0.690681892 L72.5248474,6.51694947 L72.5248474,81.2745911 L65.405072,81.2745911 L65.405072,6.51694947 L61.8451843,-0.690681892 L65.405072,-14.8423767 Z\" transform=\"translate(68.964960, 67.035040) rotate(45.000000) translate(-68.964960, -67.035040) \"></path>\n </g>\n</svg>"
},
"$:/core/images/tip": {
"title": "$:/core/images/tip",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-tip tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,128.241818 C99.346224,128.241818 128,99.5880417 128,64.2418177 C128,28.8955937 99.346224,0.241817675 64,0.241817675 C28.653776,0.241817675 0,28.8955937 0,64.2418177 C0,99.5880417 28.653776,128.241818 64,128.241818 Z M75.9358659,91.4531941 C75.3115438,95.581915 70.2059206,98.8016748 64,98.8016748 C57.7940794,98.8016748 52.6884562,95.581915 52.0641341,91.4531941 C54.3299053,94.0502127 58.8248941,95.8192805 64,95.8192805 C69.1751059,95.8192805 73.6700947,94.0502127 75.9358659,91.4531941 L75.9358659,91.4531941 Z M75.9358659,95.9453413 C75.3115438,100.074062 70.2059206,103.293822 64,103.293822 C57.7940794,103.293822 52.6884562,100.074062 52.0641341,95.9453413 C54.3299053,98.5423599 58.8248941,100.311428 64,100.311428 C69.1751059,100.311428 73.6700947,98.5423599 75.9358659,95.9453413 L75.9358659,95.9453413 Z M75.9358659,100.40119 C75.3115438,104.529911 70.2059206,107.74967 64,107.74967 C57.7940794,107.74967 52.6884562,104.529911 52.0641341,100.40119 C54.3299053,102.998208 58.8248941,104.767276 64,104.767276 C69.1751059,104.767276 73.6700947,102.998208 75.9358659,100.40119 L75.9358659,100.40119 Z M75.9358659,104.893337 C75.3115438,109.022058 70.2059206,112.241818 64,112.241818 C57.7940794,112.241818 52.6884562,109.022058 52.0641341,104.893337 C54.3299053,107.490356 58.8248941,109.259423 64,109.259423 C69.1751059,109.259423 73.6700947,107.490356 75.9358659,104.893337 L75.9358659,104.893337 Z M64.3010456,24.2418177 C75.9193117,24.2418188 88.0000013,32.0619847 88,48.4419659 C87.9999987,64.8219472 75.9193018,71.7540963 75.9193021,83.5755932 C75.9193022,89.4486648 70.0521957,92.8368862 63.9999994,92.8368862 C57.947803,92.8368862 51.9731007,89.8295115 51.9731007,83.5755932 C51.9731007,71.1469799 39.9999998,65.4700602 40,48.4419647 C40.0000002,31.4138691 52.6827796,24.2418166 64.3010456,24.2418177 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/twitter": {
"title": "$:/core/images/twitter",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-twitter tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M41.6263422,115.803477 C27.0279663,115.803477 13.4398394,111.540813 1.99987456,104.234833 C4.02221627,104.472643 6.08004574,104.594302 8.16644978,104.594302 C20.277456,104.594302 31.4238403,100.47763 40.270894,93.5715185 C28.9590538,93.3635501 19.4123842,85.9189246 16.1230832,75.6885328 C17.7011365,75.9892376 19.320669,76.1503787 20.9862896,76.1503787 C23.344152,76.1503787 25.6278127,75.8359011 27.7971751,75.247346 C15.9709927,72.8821073 7.06079851,62.4745062 7.06079851,49.9982394 C7.06079851,49.8898938 7.06079851,49.7820074 7.06264203,49.67458 C10.5482779,51.6032228 14.5339687,52.7615103 18.7717609,52.8951059 C11.8355159,48.277565 7.2714207,40.3958845 7.2714207,31.4624258 C7.2714207,26.7434257 8.54621495,22.3200804 10.7713439,18.5169676 C23.5211299,34.0957738 42.568842,44.3472839 64.0532269,45.4210985 C63.6126256,43.5365285 63.3835682,41.5711584 63.3835682,39.5529928 C63.3835682,25.3326379 74.95811,13.8034766 89.2347917,13.8034766 C96.6697089,13.8034766 103.387958,16.930807 108.103682,21.9353619 C113.991886,20.780288 119.52429,18.6372496 124.518847,15.6866694 C122.588682,21.6993889 118.490075,26.7457211 113.152623,29.9327334 C118.381769,29.3102055 123.363882,27.926045 127.999875,25.8780385 C124.534056,31.0418981 120.151087,35.5772616 115.100763,39.2077561 C115.150538,40.3118708 115.175426,41.4224128 115.175426,42.538923 C115.175426,76.5663154 89.1744164,115.803477 41.6263422,115.803477\"></path>\n </g>\n</svg>\n"
},
"$:/core/images/underline": {
"title": "$:/core/images/underline",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-underline tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M7,117.421488 L121.247934,117.421488 L121.247934,128 L7,128 L7,117.421488 Z M104.871212,98.8958333 L104.871212,0 L88.6117424,0 L88.6117424,55.8560606 C88.6117424,60.3194668 88.0060035,64.432115 86.7945076,68.1941288 C85.5830116,71.9561425 83.7657949,75.239885 81.342803,78.0454545 C78.9198111,80.8510241 75.8911167,83.0189317 72.2566288,84.5492424 C68.6221409,86.0795531 64.3182067,86.844697 59.344697,86.844697 C53.0959284,86.844697 48.1862552,85.0593613 44.6155303,81.4886364 C41.0448054,77.9179114 39.2594697,73.0720003 39.2594697,66.9507576 L39.2594697,0 L23,0 L23,65.0378788 C23,70.3939662 23.5419769,75.2717583 24.625947,79.6714015 C25.709917,84.0710447 27.5908957,87.864883 30.2689394,91.0530303 C32.9469831,94.2411776 36.4538925,96.6960141 40.7897727,98.4176136 C45.125653,100.139213 50.545422,101 57.0492424,101 C64.3182182,101 70.630655,99.5653553 75.9867424,96.6960227 C81.3428298,93.8266902 85.742407,89.33147 89.1856061,83.2102273 L89.5681818,83.2102273 L89.5681818,98.8958333 L104.871212,98.8958333 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/unfold-all-button": {
"title": "$:/core/images/unfold-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unfold-all tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"64\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M85.598226,8.34884273 C84.1490432,6.89863875 82.1463102,6 79.9340286,6 L47.9482224,6 C43.5292967,6 39.9411255,9.581722 39.9411255,14 C39.9411255,18.4092877 43.5260249,22 47.9482224,22 L71.9411255,22 L71.9411255,45.9929031 C71.9411255,50.4118288 75.5228475,54 79.9411255,54 C84.3504132,54 87.9411255,50.4151006 87.9411255,45.9929031 L87.9411255,14.0070969 C87.9411255,11.7964515 87.0447363,9.79371715 85.5956548,8.34412458 Z\" transform=\"translate(63.941125, 30.000000) scale(1, -1) rotate(-45.000000) translate(-63.941125, -30.000000) \"></path>\n <path d=\"M85.6571005,72.2899682 C84.2079177,70.8397642 82.2051847,69.9411255 79.9929031,69.9411255 L48.0070969,69.9411255 C43.5881712,69.9411255 40,73.5228475 40,77.9411255 C40,82.3504132 43.5848994,85.9411255 48.0070969,85.9411255 L72,85.9411255 L72,109.934029 C72,114.352954 75.581722,117.941125 80,117.941125 C84.4092877,117.941125 88,114.356226 88,109.934029 L88,77.9482224 C88,75.737577 87.1036108,73.7348426 85.6545293,72.2852501 Z\" transform=\"translate(64.000000, 93.941125) scale(1, -1) rotate(-45.000000) translate(-64.000000, -93.941125) \"></path>\n </g>\n</svg>"
},
"$:/core/images/unfold-button": {
"title": "$:/core/images/unfold-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unfold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M85.598226,11.3488427 C84.1490432,9.89863875 82.1463102,9 79.9340286,9 L47.9482224,9 C43.5292967,9 39.9411255,12.581722 39.9411255,17 C39.9411255,21.4092877 43.5260249,25 47.9482224,25 L71.9411255,25 L71.9411255,48.9929031 C71.9411255,53.4118288 75.5228475,57 79.9411255,57 C84.3504132,57 87.9411255,53.4151006 87.9411255,48.9929031 L87.9411255,17.0070969 C87.9411255,14.7964515 87.0447363,12.7937171 85.5956548,11.3441246 Z\" transform=\"translate(63.941125, 33.000000) scale(1, -1) rotate(-45.000000) translate(-63.941125, -33.000000) \"></path>\n <path d=\"M85.6571005,53.4077172 C84.2079177,51.9575133 82.2051847,51.0588745 79.9929031,51.0588745 L48.0070969,51.0588745 C43.5881712,51.0588745 40,54.6405965 40,59.0588745 C40,63.4681622 43.5848994,67.0588745 48.0070969,67.0588745 L72,67.0588745 L72,91.0517776 C72,95.4707033 75.581722,99.0588745 80,99.0588745 C84.4092877,99.0588745 88,95.4739751 88,91.0517776 L88,59.0659714 C88,56.855326 87.1036108,54.8525917 85.6545293,53.4029991 Z\" transform=\"translate(64.000000, 75.058875) scale(1, -1) rotate(-45.000000) translate(-64.000000, -75.058875) \"></path>\n </g>\n</svg>"
},
"$:/core/images/unlocked-padlock": {
"title": "$:/core/images/unlocked-padlock",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unlocked-padlock tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M48.6266053,64 L105,64 L105,96.0097716 C105,113.673909 90.6736461,128 73.001193,128 L55.998807,128 C38.3179793,128 24,113.677487 24,96.0097716 L24,64 L30.136303,64 C19.6806213,51.3490406 2.77158986,28.2115132 25.8366966,8.85759246 C50.4723026,-11.8141335 71.6711028,13.2108337 81.613302,25.0594855 C91.5555012,36.9081373 78.9368488,47.4964439 69.1559674,34.9513593 C59.375086,22.4062748 47.9893192,10.8049522 35.9485154,20.9083862 C23.9077117,31.0118202 34.192312,43.2685325 44.7624679,55.8655518 C47.229397,58.805523 48.403443,61.5979188 48.6266053,64 Z M67.7315279,92.3641717 C70.8232551,91.0923621 73,88.0503841 73,84.5 C73,79.8055796 69.1944204,76 64.5,76 C59.8055796,76 56,79.8055796 56,84.5 C56,87.947435 58.0523387,90.9155206 61.0018621,92.2491029 L55.9067479,115.020857 L72.8008958,115.020857 L67.7315279,92.3641717 L67.7315279,92.3641717 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/up-arrow": {
"created": "20150316000544368",
"modified": "20150316000831867",
"tags": "$:/tags/Image",
"title": "$:/core/images/up-arrow",
"text": "<svg class=\"tc-image-up-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n<path transform=\"rotate(-135, 63.8945, 64.1752)\" d=\"m109.07576,109.35336c-1.43248,1.43361 -3.41136,2.32182 -5.59717,2.32182l-79.16816,0c-4.36519,0 -7.91592,-3.5444 -7.91592,-7.91666c0,-4.36337 3.54408,-7.91667 7.91592,-7.91667l71.25075,0l0,-71.25074c0,-4.3652 3.54442,-7.91592 7.91667,-7.91592c4.36336,0 7.91667,3.54408 7.91667,7.91592l0,79.16815c0,2.1825 -0.88602,4.16136 -2.3185,5.59467l-0.00027,-0.00056l0.00001,-0.00001z\" />\n</svg>\n \n"
},
"$:/core/images/video": {
"title": "$:/core/images/video",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-video tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,12 C29.0909091,12 8.72727273,14.9166667 5.81818182,17.8333333 C2.90909091,20.75 1.93784382e-15,41.1666667 0,64.5 C1.93784382e-15,87.8333333 2.90909091,108.25 5.81818182,111.166667 C8.72727273,114.083333 29.0909091,117 64,117 C98.9090909,117 119.272727,114.083333 122.181818,111.166667 C125.090909,108.25 128,87.8333333 128,64.5 C128,41.1666667 125.090909,20.75 122.181818,17.8333333 C119.272727,14.9166667 98.9090909,12 64,12 Z M54.9161194,44.6182253 C51.102648,42.0759111 48.0112186,43.7391738 48.0112186,48.3159447 L48.0112186,79.6840553 C48.0112186,84.2685636 51.109784,85.9193316 54.9161194,83.3817747 L77.0838806,68.6032672 C80.897352,66.0609529 80.890216,61.9342897 77.0838806,59.3967328 L54.9161194,44.6182253 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/warning": {
"title": "$:/core/images/warning",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-warning tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M57.0717968,11 C60.1509982,5.66666667 67.8490018,5.66666667 70.9282032,11 L126.353829,107 C129.433031,112.333333 125.584029,119 119.425626,119 L8.57437416,119 C2.41597129,119 -1.43303051,112.333333 1.64617093,107 L57.0717968,11 Z M64,37 C59.581722,37 56,40.5820489 56,44.9935776 L56,73.0064224 C56,77.4211534 59.5907123,81 64,81 C68.418278,81 72,77.4179511 72,73.0064224 L72,44.9935776 C72,40.5788466 68.4092877,37 64,37 Z M64,104 C68.418278,104 72,100.418278 72,96 C72,91.581722 68.418278,88 64,88 C59.581722,88 56,91.581722 56,96 C56,100.418278 59.581722,104 64,104 Z\"></path>\n </g>\n</svg>"
},
"$:/language/Buttons/AdvancedSearch/Caption": {
"title": "$:/language/Buttons/AdvancedSearch/Caption",
"text": "advanced search"
},
"$:/language/Buttons/AdvancedSearch/Hint": {
"title": "$:/language/Buttons/AdvancedSearch/Hint",
"text": "Advanced search"
},
"$:/language/Buttons/Cancel/Caption": {
"title": "$:/language/Buttons/Cancel/Caption",
"text": "cancel"
},
"$:/language/Buttons/Cancel/Hint": {
"title": "$:/language/Buttons/Cancel/Hint",
"text": "Discard changes to this tiddler"
},
"$:/language/Buttons/Clone/Caption": {
"title": "$:/language/Buttons/Clone/Caption",
"text": "clone"
},
"$:/language/Buttons/Clone/Hint": {
"title": "$:/language/Buttons/Clone/Hint",
"text": "Clone this tiddler"
},
"$:/language/Buttons/Close/Caption": {
"title": "$:/language/Buttons/Close/Caption",
"text": "close"
},
"$:/language/Buttons/Close/Hint": {
"title": "$:/language/Buttons/Close/Hint",
"text": "Close this tiddler"
},
"$:/language/Buttons/CloseAll/Caption": {
"title": "$:/language/Buttons/CloseAll/Caption",
"text": "close all"
},
"$:/language/Buttons/CloseAll/Hint": {
"title": "$:/language/Buttons/CloseAll/Hint",
"text": "Close all tiddlers"
},
"$:/language/Buttons/CloseOthers/Caption": {
"title": "$:/language/Buttons/CloseOthers/Caption",
"text": "close others"
},
"$:/language/Buttons/CloseOthers/Hint": {
"title": "$:/language/Buttons/CloseOthers/Hint",
"text": "Close other tiddlers"
},
"$:/language/Buttons/ControlPanel/Caption": {
"title": "$:/language/Buttons/ControlPanel/Caption",
"text": "control panel"
},
"$:/language/Buttons/ControlPanel/Hint": {
"title": "$:/language/Buttons/ControlPanel/Hint",
"text": "Open control panel"
},
"$:/language/Buttons/Delete/Caption": {
"title": "$:/language/Buttons/Delete/Caption",
"text": "delete"
},
"$:/language/Buttons/Delete/Hint": {
"title": "$:/language/Buttons/Delete/Hint",
"text": "Delete this tiddler"
},
"$:/language/Buttons/Edit/Caption": {
"title": "$:/language/Buttons/Edit/Caption",
"text": "edit"
},
"$:/language/Buttons/Edit/Hint": {
"title": "$:/language/Buttons/Edit/Hint",
"text": "Edit this tiddler"
},
"$:/language/Buttons/Encryption/Caption": {
"title": "$:/language/Buttons/Encryption/Caption",
"text": "encryption"
},
"$:/language/Buttons/Encryption/Hint": {
"title": "$:/language/Buttons/Encryption/Hint",
"text": "Set or clear a password for saving this wiki"
},
"$:/language/Buttons/Encryption/ClearPassword/Caption": {
"title": "$:/language/Buttons/Encryption/ClearPassword/Caption",
"text": "clear password"
},
"$:/language/Buttons/Encryption/ClearPassword/Hint": {
"title": "$:/language/Buttons/Encryption/ClearPassword/Hint",
"text": "Clear the password and save this wiki without encryption"
},
"$:/language/Buttons/Encryption/SetPassword/Caption": {
"title": "$:/language/Buttons/Encryption/SetPassword/Caption",
"text": "set password"
},
"$:/language/Buttons/Encryption/SetPassword/Hint": {
"title": "$:/language/Buttons/Encryption/SetPassword/Hint",
"text": "Set a password for saving this wiki with encryption"
},
"$:/language/Buttons/ExportPage/Caption": {
"title": "$:/language/Buttons/ExportPage/Caption",
"text": "export all"
},
"$:/language/Buttons/ExportPage/Hint": {
"title": "$:/language/Buttons/ExportPage/Hint",
"text": "Export all tiddlers"
},
"$:/language/Buttons/ExportTiddler/Caption": {
"title": "$:/language/Buttons/ExportTiddler/Caption",
"text": "export tiddler"
},
"$:/language/Buttons/ExportTiddler/Hint": {
"title": "$:/language/Buttons/ExportTiddler/Hint",
"text": "Export tiddler"
},
"$:/language/Buttons/ExportTiddlers/Caption": {
"title": "$:/language/Buttons/ExportTiddlers/Caption",
"text": "export tiddlers"
},
"$:/language/Buttons/ExportTiddlers/Hint": {
"title": "$:/language/Buttons/ExportTiddlers/Hint",
"text": "Export tiddlers"
},
"$:/language/Buttons/Fold/Caption": {
"title": "$:/language/Buttons/Fold/Caption",
"text": "fold tiddler"
},
"$:/language/Buttons/Fold/Hint": {
"title": "$:/language/Buttons/Fold/Hint",
"text": "Fold the body of this tiddler"
},
"$:/language/Buttons/Fold/FoldBar/Caption": {
"title": "$:/language/Buttons/Fold/FoldBar/Caption",
"text": "fold-bar"
},
"$:/language/Buttons/Fold/FoldBar/Hint": {
"title": "$:/language/Buttons/Fold/FoldBar/Hint",
"text": "Optional bars to fold and unfold tiddlers"
},
"$:/language/Buttons/Unfold/Caption": {
"title": "$:/language/Buttons/Unfold/Caption",
"text": "unfold tiddler"
},
"$:/language/Buttons/Unfold/Hint": {
"title": "$:/language/Buttons/Unfold/Hint",
"text": "Unfold the body of this tiddler"
},
"$:/language/Buttons/FoldOthers/Caption": {
"title": "$:/language/Buttons/FoldOthers/Caption",
"text": "fold other tiddlers"
},
"$:/language/Buttons/FoldOthers/Hint": {
"title": "$:/language/Buttons/FoldOthers/Hint",
"text": "Fold the bodies of other opened tiddlers"
},
"$:/language/Buttons/FoldAll/Caption": {
"title": "$:/language/Buttons/FoldAll/Caption",
"text": "fold all tiddlers"
},
"$:/language/Buttons/FoldAll/Hint": {
"title": "$:/language/Buttons/FoldAll/Hint",
"text": "Fold the bodies of all opened tiddlers"
},
"$:/language/Buttons/UnfoldAll/Caption": {
"title": "$:/language/Buttons/UnfoldAll/Caption",
"text": "unfold all tiddlers"
},
"$:/language/Buttons/UnfoldAll/Hint": {
"title": "$:/language/Buttons/UnfoldAll/Hint",
"text": "Unfold the bodies of all opened tiddlers"
},
"$:/language/Buttons/FullScreen/Caption": {
"title": "$:/language/Buttons/FullScreen/Caption",
"text": "full-screen"
},
"$:/language/Buttons/FullScreen/Hint": {
"title": "$:/language/Buttons/FullScreen/Hint",
"text": "Enter or leave full-screen mode"
},
"$:/language/Buttons/Help/Caption": {
"title": "$:/language/Buttons/Help/Caption",
"text": "help"
},
"$:/language/Buttons/Help/Hint": {
"title": "$:/language/Buttons/Help/Hint",
"text": "Show help panel"
},
"$:/language/Buttons/Import/Caption": {
"title": "$:/language/Buttons/Import/Caption",
"text": "import"
},
"$:/language/Buttons/Import/Hint": {
"title": "$:/language/Buttons/Import/Hint",
"text": "Import many types of file including text, image, TiddlyWiki or JSON"
},
"$:/language/Buttons/Info/Caption": {
"title": "$:/language/Buttons/Info/Caption",
"text": "info"
},
"$:/language/Buttons/Info/Hint": {
"title": "$:/language/Buttons/Info/Hint",
"text": "Show information for this tiddler"
},
"$:/language/Buttons/Home/Caption": {
"title": "$:/language/Buttons/Home/Caption",
"text": "home"
},
"$:/language/Buttons/Home/Hint": {
"title": "$:/language/Buttons/Home/Hint",
"text": "Open the default tiddlers"
},
"$:/language/Buttons/Language/Caption": {
"title": "$:/language/Buttons/Language/Caption",
"text": "language"
},
"$:/language/Buttons/Language/Hint": {
"title": "$:/language/Buttons/Language/Hint",
"text": "Choose the user interface language"
},
"$:/language/Buttons/More/Caption": {
"title": "$:/language/Buttons/More/Caption",
"text": "more"
},
"$:/language/Buttons/More/Hint": {
"title": "$:/language/Buttons/More/Hint",
"text": "More actions"
},
"$:/language/Buttons/NewHere/Caption": {
"title": "$:/language/Buttons/NewHere/Caption",
"text": "new here"
},
"$:/language/Buttons/NewHere/Hint": {
"title": "$:/language/Buttons/NewHere/Hint",
"text": "Create a new tiddler tagged with this one"
},
"$:/language/Buttons/NewJournal/Caption": {
"title": "$:/language/Buttons/NewJournal/Caption",
"text": "new journal"
},
"$:/language/Buttons/NewJournal/Hint": {
"title": "$:/language/Buttons/NewJournal/Hint",
"text": "Create a new journal tiddler"
},
"$:/language/Buttons/NewJournalHere/Caption": {
"title": "$:/language/Buttons/NewJournalHere/Caption",
"text": "new journal here"
},
"$:/language/Buttons/NewJournalHere/Hint": {
"title": "$:/language/Buttons/NewJournalHere/Hint",
"text": "Create a new journal tiddler tagged with this one"
},
"$:/language/Buttons/NewImage/Caption": {
"title": "$:/language/Buttons/NewImage/Caption",
"text": "new image"
},
"$:/language/Buttons/NewImage/Hint": {
"title": "$:/language/Buttons/NewImage/Hint",
"text": "Create a new image tiddler"
},
"$:/language/Buttons/NewMarkdown/Caption": {
"title": "$:/language/Buttons/NewMarkdown/Caption",
"text": "new Markdown tiddler"
},
"$:/language/Buttons/NewMarkdown/Hint": {
"title": "$:/language/Buttons/NewMarkdown/Hint",
"text": "Create a new Markdown tiddler"
},
"$:/language/Buttons/NewTiddler/Caption": {
"title": "$:/language/Buttons/NewTiddler/Caption",
"text": "new tiddler"
},
"$:/language/Buttons/NewTiddler/Hint": {
"title": "$:/language/Buttons/NewTiddler/Hint",
"text": "Create a new tiddler"
},
"$:/language/Buttons/OpenWindow/Caption": {
"title": "$:/language/Buttons/OpenWindow/Caption",
"text": "open in new window"
},
"$:/language/Buttons/OpenWindow/Hint": {
"title": "$:/language/Buttons/OpenWindow/Hint",
"text": "Open tiddler in new window"
},
"$:/language/Buttons/Palette/Caption": {
"title": "$:/language/Buttons/Palette/Caption",
"text": "palette"
},
"$:/language/Buttons/Palette/Hint": {
"title": "$:/language/Buttons/Palette/Hint",
"text": "Choose the colour palette"
},
"$:/language/Buttons/Permalink/Caption": {
"title": "$:/language/Buttons/Permalink/Caption",
"text": "permalink"
},
"$:/language/Buttons/Permalink/Hint": {
"title": "$:/language/Buttons/Permalink/Hint",
"text": "Set browser address bar to a direct link to this tiddler"
},
"$:/language/Buttons/Permaview/Caption": {
"title": "$:/language/Buttons/Permaview/Caption",
"text": "permaview"
},
"$:/language/Buttons/Permaview/Hint": {
"title": "$:/language/Buttons/Permaview/Hint",
"text": "Set browser address bar to a direct link to all the tiddlers in this story"
},
"$:/language/Buttons/Refresh/Caption": {
"title": "$:/language/Buttons/Refresh/Caption",
"text": "refresh"
},
"$:/language/Buttons/Refresh/Hint": {
"title": "$:/language/Buttons/Refresh/Hint",
"text": "Perform a full refresh of the wiki"
},
"$:/language/Buttons/Save/Caption": {
"title": "$:/language/Buttons/Save/Caption",
"text": "ok"
},
"$:/language/Buttons/Save/Hint": {
"title": "$:/language/Buttons/Save/Hint",
"text": "Confirm changes to this tiddler"
},
"$:/language/Buttons/SaveWiki/Caption": {
"title": "$:/language/Buttons/SaveWiki/Caption",
"text": "save changes"
},
"$:/language/Buttons/SaveWiki/Hint": {
"title": "$:/language/Buttons/SaveWiki/Hint",
"text": "Save changes"
},
"$:/language/Buttons/StoryView/Caption": {
"title": "$:/language/Buttons/StoryView/Caption",
"text": "storyview"
},
"$:/language/Buttons/StoryView/Hint": {
"title": "$:/language/Buttons/StoryView/Hint",
"text": "Choose the story visualisation"
},
"$:/language/Buttons/HideSideBar/Caption": {
"title": "$:/language/Buttons/HideSideBar/Caption",
"text": "hide sidebar"
},
"$:/language/Buttons/HideSideBar/Hint": {
"title": "$:/language/Buttons/HideSideBar/Hint",
"text": "Hide sidebar"
},
"$:/language/Buttons/ShowSideBar/Caption": {
"title": "$:/language/Buttons/ShowSideBar/Caption",
"text": "show sidebar"
},
"$:/language/Buttons/ShowSideBar/Hint": {
"title": "$:/language/Buttons/ShowSideBar/Hint",
"text": "Show sidebar"
},
"$:/language/Buttons/TagManager/Caption": {
"title": "$:/language/Buttons/TagManager/Caption",
"text": "tag manager"
},
"$:/language/Buttons/TagManager/Hint": {
"title": "$:/language/Buttons/TagManager/Hint",
"text": "Open tag manager"
},
"$:/language/Buttons/Theme/Caption": {
"title": "$:/language/Buttons/Theme/Caption",
"text": "theme"
},
"$:/language/Buttons/Theme/Hint": {
"title": "$:/language/Buttons/Theme/Hint",
"text": "Choose the display theme"
},
"$:/language/Buttons/Bold/Caption": {
"title": "$:/language/Buttons/Bold/Caption",
"text": "bold"
},
"$:/language/Buttons/Bold/Hint": {
"title": "$:/language/Buttons/Bold/Hint",
"text": "Apply bold formatting to selection"
},
"$:/language/Buttons/Clear/Caption": {
"title": "$:/language/Buttons/Clear/Caption",
"text": "clear"
},
"$:/language/Buttons/Clear/Hint": {
"title": "$:/language/Buttons/Clear/Hint",
"text": "Clear image to solid colour"
},
"$:/language/Buttons/EditorHeight/Caption": {
"title": "$:/language/Buttons/EditorHeight/Caption",
"text": "editor height"
},
"$:/language/Buttons/EditorHeight/Caption/Auto": {
"title": "$:/language/Buttons/EditorHeight/Caption/Auto",
"text": "Automatically adjust height to fit content"
},
"$:/language/Buttons/EditorHeight/Caption/Fixed": {
"title": "$:/language/Buttons/EditorHeight/Caption/Fixed",
"text": "Fixed height:"
},
"$:/language/Buttons/EditorHeight/Hint": {
"title": "$:/language/Buttons/EditorHeight/Hint",
"text": "Choose the height of the text editor"
},
"$:/language/Buttons/Excise/Caption": {
"title": "$:/language/Buttons/Excise/Caption",
"text": "excise"
},
"$:/language/Buttons/Excise/Caption/Excise": {
"title": "$:/language/Buttons/Excise/Caption/Excise",
"text": "Perform excision"
},
"$:/language/Buttons/Excise/Caption/MacroName": {
"title": "$:/language/Buttons/Excise/Caption/MacroName",
"text": "Macro name:"
},
"$:/language/Buttons/Excise/Caption/NewTitle": {
"title": "$:/language/Buttons/Excise/Caption/NewTitle",
"text": "Title of new tiddler:"
},
"$:/language/Buttons/Excise/Caption/Replace": {
"title": "$:/language/Buttons/Excise/Caption/Replace",
"text": "Replace excised text with:"
},
"$:/language/Buttons/Excise/Caption/Replace/Macro": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Macro",
"text": "macro"
},
"$:/language/Buttons/Excise/Caption/Replace/Link": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Link",
"text": "link"
},
"$:/language/Buttons/Excise/Caption/Replace/Transclusion": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Transclusion",
"text": "transclusion"
},
"$:/language/Buttons/Excise/Caption/Tag": {
"title": "$:/language/Buttons/Excise/Caption/Tag",
"text": "Tag new tiddler with the title of this tiddler"
},
"$:/language/Buttons/Excise/Caption/TiddlerExists": {
"title": "$:/language/Buttons/Excise/Caption/TiddlerExists",
"text": "Warning: tiddler already exists"
},
"$:/language/Buttons/Excise/Hint": {
"title": "$:/language/Buttons/Excise/Hint",
"text": "Excise the selected text into a new tiddler"
},
"$:/language/Buttons/Heading1/Caption": {
"title": "$:/language/Buttons/Heading1/Caption",
"text": "heading 1"
},
"$:/language/Buttons/Heading1/Hint": {
"title": "$:/language/Buttons/Heading1/Hint",
"text": "Apply heading level 1 formatting to lines containing selection"
},
"$:/language/Buttons/Heading2/Caption": {
"title": "$:/language/Buttons/Heading2/Caption",
"text": "heading 2"
},
"$:/language/Buttons/Heading2/Hint": {
"title": "$:/language/Buttons/Heading2/Hint",
"text": "Apply heading level 2 formatting to lines containing selection"
},
"$:/language/Buttons/Heading3/Caption": {
"title": "$:/language/Buttons/Heading3/Caption",
"text": "heading 3"
},
"$:/language/Buttons/Heading3/Hint": {
"title": "$:/language/Buttons/Heading3/Hint",
"text": "Apply heading level 3 formatting to lines containing selection"
},
"$:/language/Buttons/Heading4/Caption": {
"title": "$:/language/Buttons/Heading4/Caption",
"text": "heading 4"
},
"$:/language/Buttons/Heading4/Hint": {
"title": "$:/language/Buttons/Heading4/Hint",
"text": "Apply heading level 4 formatting to lines containing selection"
},
"$:/language/Buttons/Heading5/Caption": {
"title": "$:/language/Buttons/Heading5/Caption",
"text": "heading 5"
},
"$:/language/Buttons/Heading5/Hint": {
"title": "$:/language/Buttons/Heading5/Hint",
"text": "Apply heading level 5 formatting to lines containing selection"
},
"$:/language/Buttons/Heading6/Caption": {
"title": "$:/language/Buttons/Heading6/Caption",
"text": "heading 6"
},
"$:/language/Buttons/Heading6/Hint": {
"title": "$:/language/Buttons/Heading6/Hint",
"text": "Apply heading level 6 formatting to lines containing selection"
},
"$:/language/Buttons/Italic/Caption": {
"title": "$:/language/Buttons/Italic/Caption",
"text": "italic"
},
"$:/language/Buttons/Italic/Hint": {
"title": "$:/language/Buttons/Italic/Hint",
"text": "Apply italic formatting to selection"
},
"$:/language/Buttons/LineWidth/Caption": {
"title": "$:/language/Buttons/LineWidth/Caption",
"text": "line width"
},
"$:/language/Buttons/LineWidth/Hint": {
"title": "$:/language/Buttons/LineWidth/Hint",
"text": "Set line width for painting"
},
"$:/language/Buttons/Link/Caption": {
"title": "$:/language/Buttons/Link/Caption",
"text": "link"
},
"$:/language/Buttons/Link/Hint": {
"title": "$:/language/Buttons/Link/Hint",
"text": "Create wikitext link"
},
"$:/language/Buttons/ListBullet/Caption": {
"title": "$:/language/Buttons/ListBullet/Caption",
"text": "bulleted list"
},
"$:/language/Buttons/ListBullet/Hint": {
"title": "$:/language/Buttons/ListBullet/Hint",
"text": "Apply bulleted list formatting to lines containing selection"
},
"$:/language/Buttons/ListNumber/Caption": {
"title": "$:/language/Buttons/ListNumber/Caption",
"text": "numbered list"
},
"$:/language/Buttons/ListNumber/Hint": {
"title": "$:/language/Buttons/ListNumber/Hint",
"text": "Apply numbered list formatting to lines containing selection"
},
"$:/language/Buttons/MonoBlock/Caption": {
"title": "$:/language/Buttons/MonoBlock/Caption",
"text": "monospaced block"
},
"$:/language/Buttons/MonoBlock/Hint": {
"title": "$:/language/Buttons/MonoBlock/Hint",
"text": "Apply monospaced block formatting to lines containing selection"
},
"$:/language/Buttons/MonoLine/Caption": {
"title": "$:/language/Buttons/MonoLine/Caption",
"text": "monospaced"
},
"$:/language/Buttons/MonoLine/Hint": {
"title": "$:/language/Buttons/MonoLine/Hint",
"text": "Apply monospaced character formatting to selection"
},
"$:/language/Buttons/Opacity/Caption": {
"title": "$:/language/Buttons/Opacity/Caption",
"text": "opacity"
},
"$:/language/Buttons/Opacity/Hint": {
"title": "$:/language/Buttons/Opacity/Hint",
"text": "Set painting opacity"
},
"$:/language/Buttons/Paint/Caption": {
"title": "$:/language/Buttons/Paint/Caption",
"text": "paint colour"
},
"$:/language/Buttons/Paint/Hint": {
"title": "$:/language/Buttons/Paint/Hint",
"text": "Set painting colour"
},
"$:/language/Buttons/Picture/Caption": {
"title": "$:/language/Buttons/Picture/Caption",
"text": "picture"
},
"$:/language/Buttons/Picture/Hint": {
"title": "$:/language/Buttons/Picture/Hint",
"text": "Insert picture"
},
"$:/language/Buttons/Preview/Caption": {
"title": "$:/language/Buttons/Preview/Caption",
"text": "preview"
},
"$:/language/Buttons/Preview/Hint": {
"title": "$:/language/Buttons/Preview/Hint",
"text": "Show preview pane"
},
"$:/language/Buttons/PreviewType/Caption": {
"title": "$:/language/Buttons/PreviewType/Caption",
"text": "preview type"
},
"$:/language/Buttons/PreviewType/Hint": {
"title": "$:/language/Buttons/PreviewType/Hint",
"text": "Choose preview type"
},
"$:/language/Buttons/Quote/Caption": {
"title": "$:/language/Buttons/Quote/Caption",
"text": "quote"
},
"$:/language/Buttons/Quote/Hint": {
"title": "$:/language/Buttons/Quote/Hint",
"text": "Apply quoted text formatting to lines containing selection"
},
"$:/language/Buttons/Size/Caption": {
"title": "$:/language/Buttons/Size/Caption",
"text": "image size"
},
"$:/language/Buttons/Size/Caption/Height": {
"title": "$:/language/Buttons/Size/Caption/Height",
"text": "Height:"
},
"$:/language/Buttons/Size/Caption/Resize": {
"title": "$:/language/Buttons/Size/Caption/Resize",
"text": "Resize image"
},
"$:/language/Buttons/Size/Caption/Width": {
"title": "$:/language/Buttons/Size/Caption/Width",
"text": "Width:"
},
"$:/language/Buttons/Size/Hint": {
"title": "$:/language/Buttons/Size/Hint",
"text": "Set image size"
},
"$:/language/Buttons/Stamp/Caption": {
"title": "$:/language/Buttons/Stamp/Caption",
"text": "stamp"
},
"$:/language/Buttons/Stamp/Caption/New": {
"title": "$:/language/Buttons/Stamp/Caption/New",
"text": "Add your own"
},
"$:/language/Buttons/Stamp/Hint": {
"title": "$:/language/Buttons/Stamp/Hint",
"text": "Insert a preconfigured snippet of text"
},
"$:/language/Buttons/Stamp/New/Title": {
"title": "$:/language/Buttons/Stamp/New/Title",
"text": "Name as shown in menu"
},
"$:/language/Buttons/Stamp/New/Text": {
"title": "$:/language/Buttons/Stamp/New/Text",
"text": "Text of snippet. (Remember to add a descriptive title in the caption field)."
},
"$:/language/Buttons/Strikethrough/Caption": {
"title": "$:/language/Buttons/Strikethrough/Caption",
"text": "strikethrough"
},
"$:/language/Buttons/Strikethrough/Hint": {
"title": "$:/language/Buttons/Strikethrough/Hint",
"text": "Apply strikethrough formatting to selection"
},
"$:/language/Buttons/Subscript/Caption": {
"title": "$:/language/Buttons/Subscript/Caption",
"text": "subscript"
},
"$:/language/Buttons/Subscript/Hint": {
"title": "$:/language/Buttons/Subscript/Hint",
"text": "Apply subscript formatting to selection"
},
"$:/language/Buttons/Superscript/Caption": {
"title": "$:/language/Buttons/Superscript/Caption",
"text": "superscript"
},
"$:/language/Buttons/Superscript/Hint": {
"title": "$:/language/Buttons/Superscript/Hint",
"text": "Apply superscript formatting to selection"
},
"$:/language/Buttons/Underline/Caption": {
"title": "$:/language/Buttons/Underline/Caption",
"text": "underline"
},
"$:/language/Buttons/Underline/Hint": {
"title": "$:/language/Buttons/Underline/Hint",
"text": "Apply underline formatting to selection"
},
"$:/language/ControlPanel/Advanced/Caption": {
"title": "$:/language/ControlPanel/Advanced/Caption",
"text": "Advanced"
},
"$:/language/ControlPanel/Advanced/Hint": {
"title": "$:/language/ControlPanel/Advanced/Hint",
"text": "Internal information about this TiddlyWiki"
},
"$:/language/ControlPanel/Appearance/Caption": {
"title": "$:/language/ControlPanel/Appearance/Caption",
"text": "Appearance"
},
"$:/language/ControlPanel/Appearance/Hint": {
"title": "$:/language/ControlPanel/Appearance/Hint",
"text": "Ways to customise the appearance of your TiddlyWiki."
},
"$:/language/ControlPanel/Basics/AnimDuration/Prompt": {
"title": "$:/language/ControlPanel/Basics/AnimDuration/Prompt",
"text": "Animation duration:"
},
"$:/language/ControlPanel/Basics/Caption": {
"title": "$:/language/ControlPanel/Basics/Caption",
"text": "Basics"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/BottomHint": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/BottomHint",
"text": "Use [[double square brackets]] for titles with spaces. Or you can choose to <$button set=\"$:/DefaultTiddlers\" setTo=\"[list[$:/StoryList]]\">retain story ordering</$button>"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/Prompt",
"text": "Default tiddlers:"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/TopHint": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/TopHint",
"text": "Choose which tiddlers are displayed at startup:"
},
"$:/language/ControlPanel/Basics/Language/Prompt": {
"title": "$:/language/ControlPanel/Basics/Language/Prompt",
"text": "Hello! Current language:"
},
"$:/language/ControlPanel/Basics/NewJournal/Title/Prompt": {
"title": "$:/language/ControlPanel/Basics/NewJournal/Title/Prompt",
"text": "Title of new journal tiddlers"
},
"$:/language/ControlPanel/Basics/NewJournal/Tags/Prompt": {
"title": "$:/language/ControlPanel/Basics/NewJournal/Tags/Prompt",
"text": "Tags for new journal tiddlers"
},
"$:/language/ControlPanel/Basics/OverriddenShadowTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/OverriddenShadowTiddlers/Prompt",
"text": "Number of overridden shadow tiddlers:"
},
"$:/language/ControlPanel/Basics/ShadowTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/ShadowTiddlers/Prompt",
"text": "Number of shadow tiddlers:"
},
"$:/language/ControlPanel/Basics/Subtitle/Prompt": {
"title": "$:/language/ControlPanel/Basics/Subtitle/Prompt",
"text": "Subtitle:"
},
"$:/language/ControlPanel/Basics/SystemTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/SystemTiddlers/Prompt",
"text": "Number of system tiddlers:"
},
"$:/language/ControlPanel/Basics/Tags/Prompt": {
"title": "$:/language/ControlPanel/Basics/Tags/Prompt",
"text": "Number of tags:"
},
"$:/language/ControlPanel/Basics/Tiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/Tiddlers/Prompt",
"text": "Number of tiddlers:"
},
"$:/language/ControlPanel/Basics/Title/Prompt": {
"title": "$:/language/ControlPanel/Basics/Title/Prompt",
"text": "Title of this ~TiddlyWiki:"
},
"$:/language/ControlPanel/Basics/Username/Prompt": {
"title": "$:/language/ControlPanel/Basics/Username/Prompt",
"text": "Username for signing edits:"
},
"$:/language/ControlPanel/Basics/Version/Prompt": {
"title": "$:/language/ControlPanel/Basics/Version/Prompt",
"text": "~TiddlyWiki version:"
},
"$:/language/ControlPanel/EditorTypes/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Caption",
"text": "Editor Types"
},
"$:/language/ControlPanel/EditorTypes/Editor/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Editor/Caption",
"text": "Editor"
},
"$:/language/ControlPanel/EditorTypes/Hint": {
"title": "$:/language/ControlPanel/EditorTypes/Hint",
"text": "These tiddlers determine which editor is used to edit specific tiddler types."
},
"$:/language/ControlPanel/EditorTypes/Type/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Type/Caption",
"text": "Type"
},
"$:/language/ControlPanel/Info/Caption": {
"title": "$:/language/ControlPanel/Info/Caption",
"text": "Info"
},
"$:/language/ControlPanel/Info/Hint": {
"title": "$:/language/ControlPanel/Info/Hint",
"text": "Information about this TiddlyWiki"
},
"$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt",
"text": "Type shortcut here"
},
"$:/language/ControlPanel/KeyboardShortcuts/Add/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Add/Caption",
"text": "add shortcut"
},
"$:/language/ControlPanel/KeyboardShortcuts/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Caption",
"text": "Keyboard Shortcuts"
},
"$:/language/ControlPanel/KeyboardShortcuts/Hint": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Hint",
"text": "Manage keyboard shortcut assignments"
},
"$:/language/ControlPanel/KeyboardShortcuts/NoShortcuts/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/NoShortcuts/Caption",
"text": "No keyboard shortcuts assigned"
},
"$:/language/ControlPanel/KeyboardShortcuts/Remove/Hint": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Remove/Hint",
"text": "remove keyboard shortcut"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/All": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/All",
"text": "All platforms"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Mac": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Mac",
"text": "Macintosh platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonMac": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonMac",
"text": "Non-Macintosh platforms only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Linux": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Linux",
"text": "Linux platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonLinux": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonLinux",
"text": "Non-Linux platforms only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Windows": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Windows",
"text": "Windows platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonWindows": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonWindows",
"text": "Non-Windows platforms only"
},
"$:/language/ControlPanel/LoadedModules/Caption": {
"title": "$:/language/ControlPanel/LoadedModules/Caption",
"text": "Loaded Modules"
},
"$:/language/ControlPanel/LoadedModules/Hint": {
"title": "$:/language/ControlPanel/LoadedModules/Hint",
"text": "These are the currently loaded tiddler modules linked to their source tiddlers. Any italicised modules lack a source tiddler, typically because they were setup during the boot process."
},
"$:/language/ControlPanel/Palette/Caption": {
"title": "$:/language/ControlPanel/Palette/Caption",
"text": "Palette"
},
"$:/language/ControlPanel/Palette/Editor/Clone/Caption": {
"title": "$:/language/ControlPanel/Palette/Editor/Clone/Caption",
"text": "clone"
},
"$:/language/ControlPanel/Palette/Editor/Clone/Prompt": {
"title": "$:/language/ControlPanel/Palette/Editor/Clone/Prompt",
"text": "It is recommended that you clone this shadow palette before editing it"
},
"$:/language/ControlPanel/Palette/Editor/Prompt/Modified": {
"title": "$:/language/ControlPanel/Palette/Editor/Prompt/Modified",
"text": "This shadow palette has been modified"
},
"$:/language/ControlPanel/Palette/Editor/Prompt": {
"title": "$:/language/ControlPanel/Palette/Editor/Prompt",
"text": "Editing"
},
"$:/language/ControlPanel/Palette/Editor/Reset/Caption": {
"title": "$:/language/ControlPanel/Palette/Editor/Reset/Caption",
"text": "reset"
},
"$:/language/ControlPanel/Palette/HideEditor/Caption": {
"title": "$:/language/ControlPanel/Palette/HideEditor/Caption",
"text": "hide editor"
},
"$:/language/ControlPanel/Palette/Prompt": {
"title": "$:/language/ControlPanel/Palette/Prompt",
"text": "Current palette:"
},
"$:/language/ControlPanel/Palette/ShowEditor/Caption": {
"title": "$:/language/ControlPanel/Palette/ShowEditor/Caption",
"text": "show editor"
},
"$:/language/ControlPanel/Parsing/Caption": {
"title": "$:/language/ControlPanel/Parsing/Caption",
"text": "Parsing"
},
"$:/language/ControlPanel/Parsing/Hint": {
"title": "$:/language/ControlPanel/Parsing/Hint",
"text": "Here you can globally disable individual wiki parser rules. Take care as disabling some parser rules can prevent ~TiddlyWiki functioning correctly (you can restore normal operation with [[safe mode|http://tiddlywiki.com/#SafeMode]] )"
},
"$:/language/ControlPanel/Parsing/Block/Caption": {
"title": "$:/language/ControlPanel/Parsing/Block/Caption",
"text": "Block Parse Rules"
},
"$:/language/ControlPanel/Parsing/Inline/Caption": {
"title": "$:/language/ControlPanel/Parsing/Inline/Caption",
"text": "Inline Parse Rules"
},
"$:/language/ControlPanel/Parsing/Pragma/Caption": {
"title": "$:/language/ControlPanel/Parsing/Pragma/Caption",
"text": "Pragma Parse Rules"
},
"$:/language/ControlPanel/Plugins/Add/Caption": {
"title": "$:/language/ControlPanel/Plugins/Add/Caption",
"text": "Get more plugins"
},
"$:/language/ControlPanel/Plugins/Add/Hint": {
"title": "$:/language/ControlPanel/Plugins/Add/Hint",
"text": "Install plugins from the official library"
},
"$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint": {
"title": "$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint",
"text": "This plugin is already installed at version <$text text=<<installedVersion>>/>"
},
"$:/language/ControlPanel/Plugins/Caption": {
"title": "$:/language/ControlPanel/Plugins/Caption",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Disable/Caption": {
"title": "$:/language/ControlPanel/Plugins/Disable/Caption",
"text": "disable"
},
"$:/language/ControlPanel/Plugins/Disable/Hint": {
"title": "$:/language/ControlPanel/Plugins/Disable/Hint",
"text": "Disable this plugin when reloading page"
},
"$:/language/ControlPanel/Plugins/Disabled/Status": {
"title": "$:/language/ControlPanel/Plugins/Disabled/Status",
"text": "(disabled)"
},
"$:/language/ControlPanel/Plugins/Empty/Hint": {
"title": "$:/language/ControlPanel/Plugins/Empty/Hint",
"text": "None"
},
"$:/language/ControlPanel/Plugins/Enable/Caption": {
"title": "$:/language/ControlPanel/Plugins/Enable/Caption",
"text": "enable"
},
"$:/language/ControlPanel/Plugins/Enable/Hint": {
"title": "$:/language/ControlPanel/Plugins/Enable/Hint",
"text": "Enable this plugin when reloading page"
},
"$:/language/ControlPanel/Plugins/Install/Caption": {
"title": "$:/language/ControlPanel/Plugins/Install/Caption",
"text": "install"
},
"$:/language/ControlPanel/Plugins/Installed/Hint": {
"title": "$:/language/ControlPanel/Plugins/Installed/Hint",
"text": "Currently installed plugins:"
},
"$:/language/ControlPanel/Plugins/Languages/Caption": {
"title": "$:/language/ControlPanel/Plugins/Languages/Caption",
"text": "Languages"
},
"$:/language/ControlPanel/Plugins/Languages/Hint": {
"title": "$:/language/ControlPanel/Plugins/Languages/Hint",
"text": "Language pack plugins"
},
"$:/language/ControlPanel/Plugins/NoInfoFound/Hint": {
"title": "$:/language/ControlPanel/Plugins/NoInfoFound/Hint",
"text": "No ''\"<$text text=<<currentTab>>/>\"'' found"
},
"$:/language/ControlPanel/Plugins/NoInformation/Hint": {
"title": "$:/language/ControlPanel/Plugins/NoInformation/Hint",
"text": "No information provided"
},
"$:/language/ControlPanel/Plugins/NotInstalled/Hint": {
"title": "$:/language/ControlPanel/Plugins/NotInstalled/Hint",
"text": "This plugin is not currently installed"
},
"$:/language/ControlPanel/Plugins/OpenPluginLibrary": {
"title": "$:/language/ControlPanel/Plugins/OpenPluginLibrary",
"text": "open plugin library"
},
"$:/language/ControlPanel/Plugins/Plugins/Caption": {
"title": "$:/language/ControlPanel/Plugins/Plugins/Caption",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Plugins/Hint": {
"title": "$:/language/ControlPanel/Plugins/Plugins/Hint",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Reinstall/Caption": {
"title": "$:/language/ControlPanel/Plugins/Reinstall/Caption",
"text": "reinstall"
},
"$:/language/ControlPanel/Plugins/Themes/Caption": {
"title": "$:/language/ControlPanel/Plugins/Themes/Caption",
"text": "Themes"
},
"$:/language/ControlPanel/Plugins/Themes/Hint": {
"title": "$:/language/ControlPanel/Plugins/Themes/Hint",
"text": "Theme plugins"
},
"$:/language/ControlPanel/Saving/Caption": {
"title": "$:/language/ControlPanel/Saving/Caption",
"text": "Saving"
},
"$:/language/ControlPanel/Saving/Heading": {
"title": "$:/language/ControlPanel/Saving/Heading",
"text": "Saving"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Advanced/Heading": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Advanced/Heading",
"text": "Advanced Settings"
},
"$:/language/ControlPanel/Saving/TiddlySpot/BackupDir": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/BackupDir",
"text": "Backup Directory"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Backups": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Backups",
"text": "Backups"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Description": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Description",
"text": "These settings are only used when saving to http://tiddlyspot.com or a compatible remote server"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Filename": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Filename",
"text": "Upload Filename"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Heading": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Heading",
"text": "~TiddlySpot"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Hint": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Hint",
"text": "//The server URL defaults to `http://<wikiname>.tiddlyspot.com/store.cgi` and can be changed to use a custom server address, e.g. `http://example.com/store.php`.//"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Password": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Password",
"text": "Password"
},
"$:/language/ControlPanel/Saving/TiddlySpot/ServerURL": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/ServerURL",
"text": "Server URL"
},
"$:/language/ControlPanel/Saving/TiddlySpot/UploadDir": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/UploadDir",
"text": "Upload Directory"
},
"$:/language/ControlPanel/Saving/TiddlySpot/UserName": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/UserName",
"text": "Wiki Name"
},
"$:/language/ControlPanel/Settings/AutoSave/Caption": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Caption",
"text": "Autosave"
},
"$:/language/ControlPanel/Settings/AutoSave/Disabled/Description": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Disabled/Description",
"text": "Do not save changes automatically"
},
"$:/language/ControlPanel/Settings/AutoSave/Enabled/Description": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Enabled/Description",
"text": "Save changes automatically"
},
"$:/language/ControlPanel/Settings/AutoSave/Hint": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Hint",
"text": "Automatically save changes during editing"
},
"$:/language/ControlPanel/Settings/CamelCase/Caption": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Caption",
"text": "Camel Case Wiki Links"
},
"$:/language/ControlPanel/Settings/CamelCase/Hint": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Hint",
"text": "You can globally disable automatic linking of ~CamelCase phrases. Requires reload to take effect"
},
"$:/language/ControlPanel/Settings/CamelCase/Description": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Description",
"text": "Enable automatic ~CamelCase linking"
},
"$:/language/ControlPanel/Settings/Caption": {
"title": "$:/language/ControlPanel/Settings/Caption",
"text": "Settings"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Caption": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Caption",
"text": "Editor Toolbar"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Hint": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Hint",
"text": "Enable or disable the editor toolbar:"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Description": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Description",
"text": "Show editor toolbar"
},
"$:/language/ControlPanel/Settings/Hint": {
"title": "$:/language/ControlPanel/Settings/Hint",
"text": "These settings let you customise the behaviour of TiddlyWiki."
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Caption": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Caption",
"text": "Navigation Address Bar"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Hint": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Hint",
"text": "Behaviour of the browser address bar when navigating to a tiddler:"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/No/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/No/Description",
"text": "Do not update the address bar"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Permalink/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Permalink/Description",
"text": "Include the target tiddler"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Permaview/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Permaview/Description",
"text": "Include the target tiddler and the current story sequence"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Caption": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Caption",
"text": "Navigation History"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Hint": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Hint",
"text": "Update browser history when navigating to a tiddler:"
},
"$:/language/ControlPanel/Settings/NavigationHistory/No/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/No/Description",
"text": "Do not update history"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Yes/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Yes/Description",
"text": "Update history"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption",
"text": "Performance Instrumentation"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Hint": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Hint",
"text": "Displays performance statistics in the browser developer console. Requires reload to take effect"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Description": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Description",
"text": "Enable performance instrumentation"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption",
"text": "Toolbar Button Style"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Hint": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Hint",
"text": "Choose the style for toolbar buttons:"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless",
"text": "Borderless"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed",
"text": "Boxed"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded",
"text": "Rounded"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Caption": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Caption",
"text": "Toolbar Buttons"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Hint": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Hint",
"text": "Default toolbar button appearance:"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Icons/Description": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Icons/Description",
"text": "Include icon"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Text/Description": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Text/Description",
"text": "Include text"
},
"$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption": {
"title": "$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption",
"text": "Default Sidebar Tab"
},
"$:/language/ControlPanel/Settings/DefaultSidebarTab/Hint": {
"title": "$:/language/ControlPanel/Settings/DefaultSidebarTab/Hint",
"text": "Specify which sidebar tab is displayed by default"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/Caption": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/Caption",
"text": "Tiddler Opening Behaviour"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/InsideRiver/Hint": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/InsideRiver/Hint",
"text": "Navigation from //within// the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OutsideRiver/Hint": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OutsideRiver/Hint",
"text": "Navigation from //outside// the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAbove": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAbove",
"text": "Open above the current tiddler"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenBelow": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenBelow",
"text": "Open below the current tiddler"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtTop": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtTop",
"text": "Open at the top of the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtBottom": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtBottom",
"text": "Open at the bottom of the story river"
},
"$:/language/ControlPanel/Settings/TitleLinks/Caption": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Caption",
"text": "Tiddler Titles"
},
"$:/language/ControlPanel/Settings/TitleLinks/Hint": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Hint",
"text": "Optionally display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/TitleLinks/No/Description": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/No/Description",
"text": "Do not display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/TitleLinks/Yes/Description": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Yes/Description",
"text": "Display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/MissingLinks/Caption": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Caption",
"text": "Wiki Links"
},
"$:/language/ControlPanel/Settings/MissingLinks/Hint": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Hint",
"text": "Choose whether to link to tiddlers that do not exist yet"
},
"$:/language/ControlPanel/Settings/MissingLinks/Description": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Description",
"text": "Enable links to missing tiddlers"
},
"$:/language/ControlPanel/StoryView/Caption": {
"title": "$:/language/ControlPanel/StoryView/Caption",
"text": "Story View"
},
"$:/language/ControlPanel/StoryView/Prompt": {
"title": "$:/language/ControlPanel/StoryView/Prompt",
"text": "Current view:"
},
"$:/language/ControlPanel/Theme/Caption": {
"title": "$:/language/ControlPanel/Theme/Caption",
"text": "Theme"
},
"$:/language/ControlPanel/Theme/Prompt": {
"title": "$:/language/ControlPanel/Theme/Prompt",
"text": "Current theme:"
},
"$:/language/ControlPanel/TiddlerFields/Caption": {
"title": "$:/language/ControlPanel/TiddlerFields/Caption",
"text": "Tiddler Fields"
},
"$:/language/ControlPanel/TiddlerFields/Hint": {
"title": "$:/language/ControlPanel/TiddlerFields/Hint",
"text": "This is the full set of TiddlerFields in use in this wiki (including system tiddlers but excluding shadow tiddlers)."
},
"$:/language/ControlPanel/Toolbars/Caption": {
"title": "$:/language/ControlPanel/Toolbars/Caption",
"text": "Toolbars"
},
"$:/language/ControlPanel/Toolbars/EditToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/EditToolbar/Caption",
"text": "Edit Toolbar"
},
"$:/language/ControlPanel/Toolbars/EditToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/EditToolbar/Hint",
"text": "Choose which buttons are displayed for tiddlers in edit mode"
},
"$:/language/ControlPanel/Toolbars/Hint": {
"title": "$:/language/ControlPanel/Toolbars/Hint",
"text": "Select which toolbar buttons are displayed"
},
"$:/language/ControlPanel/Toolbars/PageControls/Caption": {
"title": "$:/language/ControlPanel/Toolbars/PageControls/Caption",
"text": "Page Toolbar"
},
"$:/language/ControlPanel/Toolbars/PageControls/Hint": {
"title": "$:/language/ControlPanel/Toolbars/PageControls/Hint",
"text": "Choose which buttons are displayed on the main page toolbar"
},
"$:/language/ControlPanel/Toolbars/EditorToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/EditorToolbar/Caption",
"text": "Editor Toolbar"
},
"$:/language/ControlPanel/Toolbars/EditorToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/EditorToolbar/Hint",
"text": "Choose which buttons are displayed in the editor toolbar. Note that some buttons will only appear when editing tiddlers of a certain type"
},
"$:/language/ControlPanel/Toolbars/ViewToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/ViewToolbar/Caption",
"text": "View Toolbar"
},
"$:/language/ControlPanel/Toolbars/ViewToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/ViewToolbar/Hint",
"text": "Choose which buttons are displayed for tiddlers in view mode"
},
"$:/language/ControlPanel/Tools/Download/Full/Caption": {
"title": "$:/language/ControlPanel/Tools/Download/Full/Caption",
"text": "Download full wiki"
},
"$:/language/Date/DaySuffix/1": {
"title": "$:/language/Date/DaySuffix/1",
"text": "st"
},
"$:/language/Date/DaySuffix/2": {
"title": "$:/language/Date/DaySuffix/2",
"text": "nd"
},
"$:/language/Date/DaySuffix/3": {
"title": "$:/language/Date/DaySuffix/3",
"text": "rd"
},
"$:/language/Date/DaySuffix/4": {
"title": "$:/language/Date/DaySuffix/4",
"text": "th"
},
"$:/language/Date/DaySuffix/5": {
"title": "$:/language/Date/DaySuffix/5",
"text": "th"
},
"$:/language/Date/DaySuffix/6": {
"title": "$:/language/Date/DaySuffix/6",
"text": "th"
},
"$:/language/Date/DaySuffix/7": {
"title": "$:/language/Date/DaySuffix/7",
"text": "th"
},
"$:/language/Date/DaySuffix/8": {
"title": "$:/language/Date/DaySuffix/8",
"text": "th"
},
"$:/language/Date/DaySuffix/9": {
"title": "$:/language/Date/DaySuffix/9",
"text": "th"
},
"$:/language/Date/DaySuffix/10": {
"title": "$:/language/Date/DaySuffix/10",
"text": "th"
},
"$:/language/Date/DaySuffix/11": {
"title": "$:/language/Date/DaySuffix/11",
"text": "th"
},
"$:/language/Date/DaySuffix/12": {
"title": "$:/language/Date/DaySuffix/12",
"text": "th"
},
"$:/language/Date/DaySuffix/13": {
"title": "$:/language/Date/DaySuffix/13",
"text": "th"
},
"$:/language/Date/DaySuffix/14": {
"title": "$:/language/Date/DaySuffix/14",
"text": "th"
},
"$:/language/Date/DaySuffix/15": {
"title": "$:/language/Date/DaySuffix/15",
"text": "th"
},
"$:/language/Date/DaySuffix/16": {
"title": "$:/language/Date/DaySuffix/16",
"text": "th"
},
"$:/language/Date/DaySuffix/17": {
"title": "$:/language/Date/DaySuffix/17",
"text": "th"
},
"$:/language/Date/DaySuffix/18": {
"title": "$:/language/Date/DaySuffix/18",
"text": "th"
},
"$:/language/Date/DaySuffix/19": {
"title": "$:/language/Date/DaySuffix/19",
"text": "th"
},
"$:/language/Date/DaySuffix/20": {
"title": "$:/language/Date/DaySuffix/20",
"text": "th"
},
"$:/language/Date/DaySuffix/21": {
"title": "$:/language/Date/DaySuffix/21",
"text": "st"
},
"$:/language/Date/DaySuffix/22": {
"title": "$:/language/Date/DaySuffix/22",
"text": "nd"
},
"$:/language/Date/DaySuffix/23": {
"title": "$:/language/Date/DaySuffix/23",
"text": "rd"
},
"$:/language/Date/DaySuffix/24": {
"title": "$:/language/Date/DaySuffix/24",
"text": "th"
},
"$:/language/Date/DaySuffix/25": {
"title": "$:/language/Date/DaySuffix/25",
"text": "th"
},
"$:/language/Date/DaySuffix/26": {
"title": "$:/language/Date/DaySuffix/26",
"text": "th"
},
"$:/language/Date/DaySuffix/27": {
"title": "$:/language/Date/DaySuffix/27",
"text": "th"
},
"$:/language/Date/DaySuffix/28": {
"title": "$:/language/Date/DaySuffix/28",
"text": "th"
},
"$:/language/Date/DaySuffix/29": {
"title": "$:/language/Date/DaySuffix/29",
"text": "th"
},
"$:/language/Date/DaySuffix/30": {
"title": "$:/language/Date/DaySuffix/30",
"text": "th"
},
"$:/language/Date/DaySuffix/31": {
"title": "$:/language/Date/DaySuffix/31",
"text": "st"
},
"$:/language/Date/Long/Day/0": {
"title": "$:/language/Date/Long/Day/0",
"text": "Sunday"
},
"$:/language/Date/Long/Day/1": {
"title": "$:/language/Date/Long/Day/1",
"text": "Monday"
},
"$:/language/Date/Long/Day/2": {
"title": "$:/language/Date/Long/Day/2",
"text": "Tuesday"
},
"$:/language/Date/Long/Day/3": {
"title": "$:/language/Date/Long/Day/3",
"text": "Wednesday"
},
"$:/language/Date/Long/Day/4": {
"title": "$:/language/Date/Long/Day/4",
"text": "Thursday"
},
"$:/language/Date/Long/Day/5": {
"title": "$:/language/Date/Long/Day/5",
"text": "Friday"
},
"$:/language/Date/Long/Day/6": {
"title": "$:/language/Date/Long/Day/6",
"text": "Saturday"
},
"$:/language/Date/Long/Month/1": {
"title": "$:/language/Date/Long/Month/1",
"text": "January"
},
"$:/language/Date/Long/Month/2": {
"title": "$:/language/Date/Long/Month/2",
"text": "February"
},
"$:/language/Date/Long/Month/3": {
"title": "$:/language/Date/Long/Month/3",
"text": "March"
},
"$:/language/Date/Long/Month/4": {
"title": "$:/language/Date/Long/Month/4",
"text": "April"
},
"$:/language/Date/Long/Month/5": {
"title": "$:/language/Date/Long/Month/5",
"text": "May"
},
"$:/language/Date/Long/Month/6": {
"title": "$:/language/Date/Long/Month/6",
"text": "June"
},
"$:/language/Date/Long/Month/7": {
"title": "$:/language/Date/Long/Month/7",
"text": "July"
},
"$:/language/Date/Long/Month/8": {
"title": "$:/language/Date/Long/Month/8",
"text": "August"
},
"$:/language/Date/Long/Month/9": {
"title": "$:/language/Date/Long/Month/9",
"text": "September"
},
"$:/language/Date/Long/Month/10": {
"title": "$:/language/Date/Long/Month/10",
"text": "October"
},
"$:/language/Date/Long/Month/11": {
"title": "$:/language/Date/Long/Month/11",
"text": "November"
},
"$:/language/Date/Long/Month/12": {
"title": "$:/language/Date/Long/Month/12",
"text": "December"
},
"$:/language/Date/Period/am": {
"title": "$:/language/Date/Period/am",
"text": "am"
},
"$:/language/Date/Period/pm": {
"title": "$:/language/Date/Period/pm",
"text": "pm"
},
"$:/language/Date/Short/Day/0": {
"title": "$:/language/Date/Short/Day/0",
"text": "Sun"
},
"$:/language/Date/Short/Day/1": {
"title": "$:/language/Date/Short/Day/1",
"text": "Mon"
},
"$:/language/Date/Short/Day/2": {
"title": "$:/language/Date/Short/Day/2",
"text": "Tue"
},
"$:/language/Date/Short/Day/3": {
"title": "$:/language/Date/Short/Day/3",
"text": "Wed"
},
"$:/language/Date/Short/Day/4": {
"title": "$:/language/Date/Short/Day/4",
"text": "Thu"
},
"$:/language/Date/Short/Day/5": {
"title": "$:/language/Date/Short/Day/5",
"text": "Fri"
},
"$:/language/Date/Short/Day/6": {
"title": "$:/language/Date/Short/Day/6",
"text": "Sat"
},
"$:/language/Date/Short/Month/1": {
"title": "$:/language/Date/Short/Month/1",
"text": "Jan"
},
"$:/language/Date/Short/Month/2": {
"title": "$:/language/Date/Short/Month/2",
"text": "Feb"
},
"$:/language/Date/Short/Month/3": {
"title": "$:/language/Date/Short/Month/3",
"text": "Mar"
},
"$:/language/Date/Short/Month/4": {
"title": "$:/language/Date/Short/Month/4",
"text": "Apr"
},
"$:/language/Date/Short/Month/5": {
"title": "$:/language/Date/Short/Month/5",
"text": "May"
},
"$:/language/Date/Short/Month/6": {
"title": "$:/language/Date/Short/Month/6",
"text": "Jun"
},
"$:/language/Date/Short/Month/7": {
"title": "$:/language/Date/Short/Month/7",
"text": "Jul"
},
"$:/language/Date/Short/Month/8": {
"title": "$:/language/Date/Short/Month/8",
"text": "Aug"
},
"$:/language/Date/Short/Month/9": {
"title": "$:/language/Date/Short/Month/9",
"text": "Sep"
},
"$:/language/Date/Short/Month/10": {
"title": "$:/language/Date/Short/Month/10",
"text": "Oct"
},
"$:/language/Date/Short/Month/11": {
"title": "$:/language/Date/Short/Month/11",
"text": "Nov"
},
"$:/language/Date/Short/Month/12": {
"title": "$:/language/Date/Short/Month/12",
"text": "Dec"
},
"$:/language/RelativeDate/Future/Days": {
"title": "$:/language/RelativeDate/Future/Days",
"text": "<<period>> days from now"
},
"$:/language/RelativeDate/Future/Hours": {
"title": "$:/language/RelativeDate/Future/Hours",
"text": "<<period>> hours from now"
},
"$:/language/RelativeDate/Future/Minutes": {
"title": "$:/language/RelativeDate/Future/Minutes",
"text": "<<period>> minutes from now"
},
"$:/language/RelativeDate/Future/Months": {
"title": "$:/language/RelativeDate/Future/Months",
"text": "<<period>> months from now"
},
"$:/language/RelativeDate/Future/Second": {
"title": "$:/language/RelativeDate/Future/Second",
"text": "1 second from now"
},
"$:/language/RelativeDate/Future/Seconds": {
"title": "$:/language/RelativeDate/Future/Seconds",
"text": "<<period>> seconds from now"
},
"$:/language/RelativeDate/Future/Years": {
"title": "$:/language/RelativeDate/Future/Years",
"text": "<<period>> years from now"
},
"$:/language/RelativeDate/Past/Days": {
"title": "$:/language/RelativeDate/Past/Days",
"text": "<<period>> days ago"
},
"$:/language/RelativeDate/Past/Hours": {
"title": "$:/language/RelativeDate/Past/Hours",
"text": "<<period>> hours ago"
},
"$:/language/RelativeDate/Past/Minutes": {
"title": "$:/language/RelativeDate/Past/Minutes",
"text": "<<period>> minutes ago"
},
"$:/language/RelativeDate/Past/Months": {
"title": "$:/language/RelativeDate/Past/Months",
"text": "<<period>> months ago"
},
"$:/language/RelativeDate/Past/Second": {
"title": "$:/language/RelativeDate/Past/Second",
"text": "1 second ago"
},
"$:/language/RelativeDate/Past/Seconds": {
"title": "$:/language/RelativeDate/Past/Seconds",
"text": "<<period>> seconds ago"
},
"$:/language/RelativeDate/Past/Years": {
"title": "$:/language/RelativeDate/Past/Years",
"text": "<<period>> years ago"
},
"$:/language/Docs/ModuleTypes/animation": {
"title": "$:/language/Docs/ModuleTypes/animation",
"text": "Animations that may be used with the RevealWidget."
},
"$:/language/Docs/ModuleTypes/command": {
"title": "$:/language/Docs/ModuleTypes/command",
"text": "Commands that can be executed under Node.js."
},
"$:/language/Docs/ModuleTypes/config": {
"title": "$:/language/Docs/ModuleTypes/config",
"text": "Data to be inserted into `$tw.config`."
},
"$:/language/Docs/ModuleTypes/filteroperator": {
"title": "$:/language/Docs/ModuleTypes/filteroperator",
"text": "Individual filter operator methods."
},
"$:/language/Docs/ModuleTypes/global": {
"title": "$:/language/Docs/ModuleTypes/global",
"text": "Global data to be inserted into `$tw`."
},
"$:/language/Docs/ModuleTypes/isfilteroperator": {
"title": "$:/language/Docs/ModuleTypes/isfilteroperator",
"text": "Operands for the ''is'' filter operator."
},
"$:/language/Docs/ModuleTypes/macro": {
"title": "$:/language/Docs/ModuleTypes/macro",
"text": "JavaScript macro definitions."
},
"$:/language/Docs/ModuleTypes/parser": {
"title": "$:/language/Docs/ModuleTypes/parser",
"text": "Parsers for different content types."
},
"$:/language/Docs/ModuleTypes/saver": {
"title": "$:/language/Docs/ModuleTypes/saver",
"text": "Savers handle different methods for saving files from the browser."
},
"$:/language/Docs/ModuleTypes/startup": {
"title": "$:/language/Docs/ModuleTypes/startup",
"text": "Startup functions."
},
"$:/language/Docs/ModuleTypes/storyview": {
"title": "$:/language/Docs/ModuleTypes/storyview",
"text": "Story views customise the animation and behaviour of list widgets."
},
"$:/language/Docs/ModuleTypes/tiddlerdeserializer": {
"title": "$:/language/Docs/ModuleTypes/tiddlerdeserializer",
"text": "Converts different content types into tiddlers."
},
"$:/language/Docs/ModuleTypes/tiddlerfield": {
"title": "$:/language/Docs/ModuleTypes/tiddlerfield",
"text": "Defines the behaviour of an individual tiddler field."
},
"$:/language/Docs/ModuleTypes/tiddlermethod": {
"title": "$:/language/Docs/ModuleTypes/tiddlermethod",
"text": "Adds methods to the `$tw.Tiddler` prototype."
},
"$:/language/Docs/ModuleTypes/upgrader": {
"title": "$:/language/Docs/ModuleTypes/upgrader",
"text": "Applies upgrade processing to tiddlers during an upgrade/import."
},
"$:/language/Docs/ModuleTypes/utils": {
"title": "$:/language/Docs/ModuleTypes/utils",
"text": "Adds methods to `$tw.utils`."
},
"$:/language/Docs/ModuleTypes/utils-node": {
"title": "$:/language/Docs/ModuleTypes/utils-node",
"text": "Adds Node.js-specific methods to `$tw.utils`."
},
"$:/language/Docs/ModuleTypes/widget": {
"title": "$:/language/Docs/ModuleTypes/widget",
"text": "Widgets encapsulate DOM rendering and refreshing."
},
"$:/language/Docs/ModuleTypes/wikimethod": {
"title": "$:/language/Docs/ModuleTypes/wikimethod",
"text": "Adds methods to `$tw.Wiki`."
},
"$:/language/Docs/ModuleTypes/wikirule": {
"title": "$:/language/Docs/ModuleTypes/wikirule",
"text": "Individual parser rules for the main WikiText parser."
},
"$:/language/Docs/PaletteColours/alert-background": {
"title": "$:/language/Docs/PaletteColours/alert-background",
"text": "Alert background"
},
"$:/language/Docs/PaletteColours/alert-border": {
"title": "$:/language/Docs/PaletteColours/alert-border",
"text": "Alert border"
},
"$:/language/Docs/PaletteColours/alert-highlight": {
"title": "$:/language/Docs/PaletteColours/alert-highlight",
"text": "Alert highlight"
},
"$:/language/Docs/PaletteColours/alert-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/alert-muted-foreground",
"text": "Alert muted foreground"
},
"$:/language/Docs/PaletteColours/background": {
"title": "$:/language/Docs/PaletteColours/background",
"text": "General background"
},
"$:/language/Docs/PaletteColours/blockquote-bar": {
"title": "$:/language/Docs/PaletteColours/blockquote-bar",
"text": "Blockquote bar"
},
"$:/language/Docs/PaletteColours/button-background": {
"title": "$:/language/Docs/PaletteColours/button-background",
"text": "Default button background"
},
"$:/language/Docs/PaletteColours/button-border": {
"title": "$:/language/Docs/PaletteColours/button-border",
"text": "Default button border"
},
"$:/language/Docs/PaletteColours/button-foreground": {
"title": "$:/language/Docs/PaletteColours/button-foreground",
"text": "Default button foreground"
},
"$:/language/Docs/PaletteColours/dirty-indicator": {
"title": "$:/language/Docs/PaletteColours/dirty-indicator",
"text": "Unsaved changes indicator"
},
"$:/language/Docs/PaletteColours/code-background": {
"title": "$:/language/Docs/PaletteColours/code-background",
"text": "Code background"
},
"$:/language/Docs/PaletteColours/code-border": {
"title": "$:/language/Docs/PaletteColours/code-border",
"text": "Code border"
},
"$:/language/Docs/PaletteColours/code-foreground": {
"title": "$:/language/Docs/PaletteColours/code-foreground",
"text": "Code foreground"
},
"$:/language/Docs/PaletteColours/download-background": {
"title": "$:/language/Docs/PaletteColours/download-background",
"text": "Download button background"
},
"$:/language/Docs/PaletteColours/download-foreground": {
"title": "$:/language/Docs/PaletteColours/download-foreground",
"text": "Download button foreground"
},
"$:/language/Docs/PaletteColours/dragger-background": {
"title": "$:/language/Docs/PaletteColours/dragger-background",
"text": "Dragger background"
},
"$:/language/Docs/PaletteColours/dragger-foreground": {
"title": "$:/language/Docs/PaletteColours/dragger-foreground",
"text": "Dragger foreground"
},
"$:/language/Docs/PaletteColours/dropdown-background": {
"title": "$:/language/Docs/PaletteColours/dropdown-background",
"text": "Dropdown background"
},
"$:/language/Docs/PaletteColours/dropdown-border": {
"title": "$:/language/Docs/PaletteColours/dropdown-border",
"text": "Dropdown border"
},
"$:/language/Docs/PaletteColours/dropdown-tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/dropdown-tab-background-selected",
"text": "Dropdown tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/dropdown-tab-background": {
"title": "$:/language/Docs/PaletteColours/dropdown-tab-background",
"text": "Dropdown tab background"
},
"$:/language/Docs/PaletteColours/dropzone-background": {
"title": "$:/language/Docs/PaletteColours/dropzone-background",
"text": "Dropzone background"
},
"$:/language/Docs/PaletteColours/external-link-background-hover": {
"title": "$:/language/Docs/PaletteColours/external-link-background-hover",
"text": "External link background hover"
},
"$:/language/Docs/PaletteColours/external-link-background-visited": {
"title": "$:/language/Docs/PaletteColours/external-link-background-visited",
"text": "External link background visited"
},
"$:/language/Docs/PaletteColours/external-link-background": {
"title": "$:/language/Docs/PaletteColours/external-link-background",
"text": "External link background"
},
"$:/language/Docs/PaletteColours/external-link-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground-hover",
"text": "External link foreground hover"
},
"$:/language/Docs/PaletteColours/external-link-foreground-visited": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground-visited",
"text": "External link foreground visited"
},
"$:/language/Docs/PaletteColours/external-link-foreground": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground",
"text": "External link foreground"
},
"$:/language/Docs/PaletteColours/foreground": {
"title": "$:/language/Docs/PaletteColours/foreground",
"text": "General foreground"
},
"$:/language/Docs/PaletteColours/message-background": {
"title": "$:/language/Docs/PaletteColours/message-background",
"text": "Message box background"
},
"$:/language/Docs/PaletteColours/message-border": {
"title": "$:/language/Docs/PaletteColours/message-border",
"text": "Message box border"
},
"$:/language/Docs/PaletteColours/message-foreground": {
"title": "$:/language/Docs/PaletteColours/message-foreground",
"text": "Message box foreground"
},
"$:/language/Docs/PaletteColours/modal-backdrop": {
"title": "$:/language/Docs/PaletteColours/modal-backdrop",
"text": "Modal backdrop"
},
"$:/language/Docs/PaletteColours/modal-background": {
"title": "$:/language/Docs/PaletteColours/modal-background",
"text": "Modal background"
},
"$:/language/Docs/PaletteColours/modal-border": {
"title": "$:/language/Docs/PaletteColours/modal-border",
"text": "Modal border"
},
"$:/language/Docs/PaletteColours/modal-footer-background": {
"title": "$:/language/Docs/PaletteColours/modal-footer-background",
"text": "Modal footer background"
},
"$:/language/Docs/PaletteColours/modal-footer-border": {
"title": "$:/language/Docs/PaletteColours/modal-footer-border",
"text": "Modal footer border"
},
"$:/language/Docs/PaletteColours/modal-header-border": {
"title": "$:/language/Docs/PaletteColours/modal-header-border",
"text": "Modal header border"
},
"$:/language/Docs/PaletteColours/muted-foreground": {
"title": "$:/language/Docs/PaletteColours/muted-foreground",
"text": "General muted foreground"
},
"$:/language/Docs/PaletteColours/notification-background": {
"title": "$:/language/Docs/PaletteColours/notification-background",
"text": "Notification background"
},
"$:/language/Docs/PaletteColours/notification-border": {
"title": "$:/language/Docs/PaletteColours/notification-border",
"text": "Notification border"
},
"$:/language/Docs/PaletteColours/page-background": {
"title": "$:/language/Docs/PaletteColours/page-background",
"text": "Page background"
},
"$:/language/Docs/PaletteColours/pre-background": {
"title": "$:/language/Docs/PaletteColours/pre-background",
"text": "Preformatted code background"
},
"$:/language/Docs/PaletteColours/pre-border": {
"title": "$:/language/Docs/PaletteColours/pre-border",
"text": "Preformatted code border"
},
"$:/language/Docs/PaletteColours/primary": {
"title": "$:/language/Docs/PaletteColours/primary",
"text": "General primary"
},
"$:/language/Docs/PaletteColours/sidebar-button-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-button-foreground",
"text": "Sidebar button foreground"
},
"$:/language/Docs/PaletteColours/sidebar-controls-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-controls-foreground-hover",
"text": "Sidebar controls foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-controls-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-controls-foreground",
"text": "Sidebar controls foreground"
},
"$:/language/Docs/PaletteColours/sidebar-foreground-shadow": {
"title": "$:/language/Docs/PaletteColours/sidebar-foreground-shadow",
"text": "Sidebar foreground shadow"
},
"$:/language/Docs/PaletteColours/sidebar-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-foreground",
"text": "Sidebar foreground"
},
"$:/language/Docs/PaletteColours/sidebar-muted-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-muted-foreground-hover",
"text": "Sidebar muted foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-muted-foreground",
"text": "Sidebar muted foreground"
},
"$:/language/Docs/PaletteColours/sidebar-tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-background-selected",
"text": "Sidebar tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-background": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-background",
"text": "Sidebar tab background"
},
"$:/language/Docs/PaletteColours/sidebar-tab-border-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-border-selected",
"text": "Sidebar tab border for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-border": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-border",
"text": "Sidebar tab border"
},
"$:/language/Docs/PaletteColours/sidebar-tab-divider": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-divider",
"text": "Sidebar tab divider"
},
"$:/language/Docs/PaletteColours/sidebar-tab-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-foreground-selected",
"text": "Sidebar tab foreground for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-foreground",
"text": "Sidebar tab foreground"
},
"$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground-hover",
"text": "Sidebar tiddler link foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground",
"text": "Sidebar tiddler link foreground"
},
"$:/language/Docs/PaletteColours/site-title-foreground": {
"title": "$:/language/Docs/PaletteColours/site-title-foreground",
"text": "Site title foreground"
},
"$:/language/Docs/PaletteColours/static-alert-foreground": {
"title": "$:/language/Docs/PaletteColours/static-alert-foreground",
"text": "Static alert foreground"
},
"$:/language/Docs/PaletteColours/tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/tab-background-selected",
"text": "Tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-background": {
"title": "$:/language/Docs/PaletteColours/tab-background",
"text": "Tab background"
},
"$:/language/Docs/PaletteColours/tab-border-selected": {
"title": "$:/language/Docs/PaletteColours/tab-border-selected",
"text": "Tab border for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-border": {
"title": "$:/language/Docs/PaletteColours/tab-border",
"text": "Tab border"
},
"$:/language/Docs/PaletteColours/tab-divider": {
"title": "$:/language/Docs/PaletteColours/tab-divider",
"text": "Tab divider"
},
"$:/language/Docs/PaletteColours/tab-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/tab-foreground-selected",
"text": "Tab foreground for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-foreground": {
"title": "$:/language/Docs/PaletteColours/tab-foreground",
"text": "Tab foreground"
},
"$:/language/Docs/PaletteColours/table-border": {
"title": "$:/language/Docs/PaletteColours/table-border",
"text": "Table border"
},
"$:/language/Docs/PaletteColours/table-footer-background": {
"title": "$:/language/Docs/PaletteColours/table-footer-background",
"text": "Table footer background"
},
"$:/language/Docs/PaletteColours/table-header-background": {
"title": "$:/language/Docs/PaletteColours/table-header-background",
"text": "Table header background"
},
"$:/language/Docs/PaletteColours/tag-background": {
"title": "$:/language/Docs/PaletteColours/tag-background",
"text": "Tag background"
},
"$:/language/Docs/PaletteColours/tag-foreground": {
"title": "$:/language/Docs/PaletteColours/tag-foreground",
"text": "Tag foreground"
},
"$:/language/Docs/PaletteColours/tiddler-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-background",
"text": "Tiddler background"
},
"$:/language/Docs/PaletteColours/tiddler-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-border",
"text": "Tiddler border"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground-hover",
"text": "Tiddler controls foreground hover"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground-selected",
"text": "Tiddler controls foreground for selected controls"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground",
"text": "Tiddler controls foreground"
},
"$:/language/Docs/PaletteColours/tiddler-editor-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-background",
"text": "Tiddler editor background"
},
"$:/language/Docs/PaletteColours/tiddler-editor-border-image": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-border-image",
"text": "Tiddler editor border image"
},
"$:/language/Docs/PaletteColours/tiddler-editor-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-border",
"text": "Tiddler editor border"
},
"$:/language/Docs/PaletteColours/tiddler-editor-fields-even": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-fields-even",
"text": "Tiddler editor background for even fields"
},
"$:/language/Docs/PaletteColours/tiddler-editor-fields-odd": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-fields-odd",
"text": "Tiddler editor background for odd fields"
},
"$:/language/Docs/PaletteColours/tiddler-info-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-background",
"text": "Tiddler info panel background"
},
"$:/language/Docs/PaletteColours/tiddler-info-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-border",
"text": "Tiddler info panel border"
},
"$:/language/Docs/PaletteColours/tiddler-info-tab-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-tab-background",
"text": "Tiddler info panel tab background"
},
"$:/language/Docs/PaletteColours/tiddler-link-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-link-background",
"text": "Tiddler link background"
},
"$:/language/Docs/PaletteColours/tiddler-link-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-link-foreground",
"text": "Tiddler link foreground"
},
"$:/language/Docs/PaletteColours/tiddler-subtitle-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-subtitle-foreground",
"text": "Tiddler subtitle foreground"
},
"$:/language/Docs/PaletteColours/tiddler-title-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-title-foreground",
"text": "Tiddler title foreground"
},
"$:/language/Docs/PaletteColours/toolbar-new-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-new-button",
"text": "Toolbar 'new tiddler' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-options-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-options-button",
"text": "Toolbar 'options' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-save-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-save-button",
"text": "Toolbar 'save' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-info-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-info-button",
"text": "Toolbar 'info' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-edit-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-edit-button",
"text": "Toolbar 'edit' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-close-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-close-button",
"text": "Toolbar 'close' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-delete-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-delete-button",
"text": "Toolbar 'delete' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-cancel-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-cancel-button",
"text": "Toolbar 'cancel' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-done-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-done-button",
"text": "Toolbar 'done' button foreground"
},
"$:/language/Docs/PaletteColours/untagged-background": {
"title": "$:/language/Docs/PaletteColours/untagged-background",
"text": "Untagged pill background"
},
"$:/language/Docs/PaletteColours/very-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/very-muted-foreground",
"text": "Very muted foreground"
},
"$:/language/EditTemplate/Body/External/Hint": {
"title": "$:/language/EditTemplate/Body/External/Hint",
"text": "This is an external tiddler stored outside of the main TiddlyWiki file. You can edit the tags and fields but cannot directly edit the content itself"
},
"$:/language/EditTemplate/Body/Placeholder": {
"title": "$:/language/EditTemplate/Body/Placeholder",
"text": "Type the text for this tiddler"
},
"$:/language/EditTemplate/Body/Preview/Type/Output": {
"title": "$:/language/EditTemplate/Body/Preview/Type/Output",
"text": "output"
},
"$:/language/EditTemplate/Field/Remove/Caption": {
"title": "$:/language/EditTemplate/Field/Remove/Caption",
"text": "remove field"
},
"$:/language/EditTemplate/Field/Remove/Hint": {
"title": "$:/language/EditTemplate/Field/Remove/Hint",
"text": "Remove field"
},
"$:/language/EditTemplate/Fields/Add/Button": {
"title": "$:/language/EditTemplate/Fields/Add/Button",
"text": "add"
},
"$:/language/EditTemplate/Fields/Add/Name/Placeholder": {
"title": "$:/language/EditTemplate/Fields/Add/Name/Placeholder",
"text": "field name"
},
"$:/language/EditTemplate/Fields/Add/Prompt": {
"title": "$:/language/EditTemplate/Fields/Add/Prompt",
"text": "Add a new field:"
},
"$:/language/EditTemplate/Fields/Add/Value/Placeholder": {
"title": "$:/language/EditTemplate/Fields/Add/Value/Placeholder",
"text": "field value"
},
"$:/language/EditTemplate/Fields/Add/Dropdown/System": {
"title": "$:/language/EditTemplate/Fields/Add/Dropdown/System",
"text": "System fields"
},
"$:/language/EditTemplate/Fields/Add/Dropdown/User": {
"title": "$:/language/EditTemplate/Fields/Add/Dropdown/User",
"text": "User fields"
},
"$:/language/EditTemplate/Shadow/Warning": {
"title": "$:/language/EditTemplate/Shadow/Warning",
"text": "This is a shadow tiddler. Any changes you make will override the default version from the plugin <<pluginLink>>"
},
"$:/language/EditTemplate/Shadow/OverriddenWarning": {
"title": "$:/language/EditTemplate/Shadow/OverriddenWarning",
"text": "This is a modified shadow tiddler. You can revert to the default version in the plugin <<pluginLink>> by deleting this tiddler"
},
"$:/language/EditTemplate/Tags/Add/Button": {
"title": "$:/language/EditTemplate/Tags/Add/Button",
"text": "add"
},
"$:/language/EditTemplate/Tags/Add/Placeholder": {
"title": "$:/language/EditTemplate/Tags/Add/Placeholder",
"text": "tag name"
},
"$:/language/EditTemplate/Tags/Dropdown/Caption": {
"title": "$:/language/EditTemplate/Tags/Dropdown/Caption",
"text": "tag list"
},
"$:/language/EditTemplate/Tags/Dropdown/Hint": {
"title": "$:/language/EditTemplate/Tags/Dropdown/Hint",
"text": "Show tag list"
},
"$:/language/EditTemplate/Title/BadCharacterWarning": {
"title": "$:/language/EditTemplate/Title/BadCharacterWarning",
"text": "Warning: avoid using any of the characters <<bad-chars>> in tiddler titles"
},
"$:/language/EditTemplate/Type/Dropdown/Caption": {
"title": "$:/language/EditTemplate/Type/Dropdown/Caption",
"text": "content type list"
},
"$:/language/EditTemplate/Type/Dropdown/Hint": {
"title": "$:/language/EditTemplate/Type/Dropdown/Hint",
"text": "Show content type list"
},
"$:/language/EditTemplate/Type/Delete/Caption": {
"title": "$:/language/EditTemplate/Type/Delete/Caption",
"text": "delete content type"
},
"$:/language/EditTemplate/Type/Delete/Hint": {
"title": "$:/language/EditTemplate/Type/Delete/Hint",
"text": "Delete content type"
},
"$:/language/EditTemplate/Type/Placeholder": {
"title": "$:/language/EditTemplate/Type/Placeholder",
"text": "content type"
},
"$:/language/EditTemplate/Type/Prompt": {
"title": "$:/language/EditTemplate/Type/Prompt",
"text": "Type:"
},
"$:/language/Exporters/StaticRiver": {
"title": "$:/language/Exporters/StaticRiver",
"text": "Static HTML"
},
"$:/language/Exporters/JsonFile": {
"title": "$:/language/Exporters/JsonFile",
"text": "JSON file"
},
"$:/language/Exporters/CsvFile": {
"title": "$:/language/Exporters/CsvFile",
"text": "CSV file"
},
"$:/language/Exporters/TidFile": {
"title": "$:/language/Exporters/TidFile",
"text": "\".tid\" file"
},
"$:/language/Docs/Fields/_canonical_uri": {
"title": "$:/language/Docs/Fields/_canonical_uri",
"text": "The full URI of an external image tiddler"
},
"$:/language/Docs/Fields/bag": {
"title": "$:/language/Docs/Fields/bag",
"text": "The name of the bag from which a tiddler came"
},
"$:/language/Docs/Fields/caption": {
"title": "$:/language/Docs/Fields/caption",
"text": "The text to be displayed on a tab or button"
},
"$:/language/Docs/Fields/color": {
"title": "$:/language/Docs/Fields/color",
"text": "The CSS color value associated with a tiddler"
},
"$:/language/Docs/Fields/component": {
"title": "$:/language/Docs/Fields/component",
"text": "The name of the component responsible for an [[alert tiddler|AlertMechanism]]"
},
"$:/language/Docs/Fields/current-tiddler": {
"title": "$:/language/Docs/Fields/current-tiddler",
"text": "Used to cache the top tiddler in a [[history list|HistoryMechanism]]"
},
"$:/language/Docs/Fields/created": {
"title": "$:/language/Docs/Fields/created",
"text": "The date a tiddler was created"
},
"$:/language/Docs/Fields/creator": {
"title": "$:/language/Docs/Fields/creator",
"text": "The name of the person who created a tiddler"
},
"$:/language/Docs/Fields/dependents": {
"title": "$:/language/Docs/Fields/dependents",
"text": "For a plugin, lists the dependent plugin titles"
},
"$:/language/Docs/Fields/description": {
"title": "$:/language/Docs/Fields/description",
"text": "The descriptive text for a plugin, or a modal dialogue"
},
"$:/language/Docs/Fields/draft.of": {
"title": "$:/language/Docs/Fields/draft.of",
"text": "For draft tiddlers, contains the title of the tiddler of which this is a draft"
},
"$:/language/Docs/Fields/draft.title": {
"title": "$:/language/Docs/Fields/draft.title",
"text": "For draft tiddlers, contains the proposed new title of the tiddler"
},
"$:/language/Docs/Fields/footer": {
"title": "$:/language/Docs/Fields/footer",
"text": "The footer text for a wizard"
},
"$:/language/Docs/Fields/hack-to-give-us-something-to-compare-against": {
"title": "$:/language/Docs/Fields/hack-to-give-us-something-to-compare-against",
"text": "A temporary storage field used in [[$:/core/templates/static.content]]"
},
"$:/language/Docs/Fields/icon": {
"title": "$:/language/Docs/Fields/icon",
"text": "The title of the tiddler containing the icon associated with a tiddler"
},
"$:/language/Docs/Fields/library": {
"title": "$:/language/Docs/Fields/library",
"text": "If set to \"yes\" indicates that a tiddler should be saved as a JavaScript library"
},
"$:/language/Docs/Fields/list": {
"title": "$:/language/Docs/Fields/list",
"text": "An ordered list of tiddler titles associated with a tiddler"
},
"$:/language/Docs/Fields/list-before": {
"title": "$:/language/Docs/Fields/list-before",
"text": "If set, the title of a tiddler before which this tiddler should be added to the ordered list of tiddler titles, or at the start of the list if this field is present but empty"
},
"$:/language/Docs/Fields/list-after": {
"title": "$:/language/Docs/Fields/list-after",
"text": "If set, the title of the tiddler after which this tiddler should be added to the ordered list of tiddler titles"
},
"$:/language/Docs/Fields/modified": {
"title": "$:/language/Docs/Fields/modified",
"text": "The date and time at which a tiddler was last modified"
},
"$:/language/Docs/Fields/modifier": {
"title": "$:/language/Docs/Fields/modifier",
"text": "The tiddler title associated with the person who last modified a tiddler"
},
"$:/language/Docs/Fields/name": {
"title": "$:/language/Docs/Fields/name",
"text": "The human readable name associated with a plugin tiddler"
},
"$:/language/Docs/Fields/plugin-priority": {
"title": "$:/language/Docs/Fields/plugin-priority",
"text": "A numerical value indicating the priority of a plugin tiddler"
},
"$:/language/Docs/Fields/plugin-type": {
"title": "$:/language/Docs/Fields/plugin-type",
"text": "The type of plugin in a plugin tiddler"
},
"$:/language/Docs/Fields/revision": {
"title": "$:/language/Docs/Fields/revision",
"text": "The revision of the tiddler held at the server"
},
"$:/language/Docs/Fields/released": {
"title": "$:/language/Docs/Fields/released",
"text": "Date of a TiddlyWiki release"
},
"$:/language/Docs/Fields/source": {
"title": "$:/language/Docs/Fields/source",
"text": "The source URL associated with a tiddler"
},
"$:/language/Docs/Fields/subtitle": {
"title": "$:/language/Docs/Fields/subtitle",
"text": "The subtitle text for a wizard"
},
"$:/language/Docs/Fields/tags": {
"title": "$:/language/Docs/Fields/tags",
"text": "A list of tags associated with a tiddler"
},
"$:/language/Docs/Fields/text": {
"title": "$:/language/Docs/Fields/text",
"text": "The body text of a tiddler"
},
"$:/language/Docs/Fields/title": {
"title": "$:/language/Docs/Fields/title",
"text": "The unique name of a tiddler"
},
"$:/language/Docs/Fields/type": {
"title": "$:/language/Docs/Fields/type",
"text": "The content type of a tiddler"
},
"$:/language/Docs/Fields/version": {
"title": "$:/language/Docs/Fields/version",
"text": "Version information for a plugin"
},
"$:/language/Filters/AllTiddlers": {
"title": "$:/language/Filters/AllTiddlers",
"text": "All tiddlers except system tiddlers"
},
"$:/language/Filters/RecentSystemTiddlers": {
"title": "$:/language/Filters/RecentSystemTiddlers",
"text": "Recently modified tiddlers, including system tiddlers"
},
"$:/language/Filters/RecentTiddlers": {
"title": "$:/language/Filters/RecentTiddlers",
"text": "Recently modified tiddlers"
},
"$:/language/Filters/AllTags": {
"title": "$:/language/Filters/AllTags",
"text": "All tags except system tags"
},
"$:/language/Filters/Missing": {
"title": "$:/language/Filters/Missing",
"text": "Missing tiddlers"
},
"$:/language/Filters/Drafts": {
"title": "$:/language/Filters/Drafts",
"text": "Draft tiddlers"
},
"$:/language/Filters/Orphans": {
"title": "$:/language/Filters/Orphans",
"text": "Orphan tiddlers"
},
"$:/language/Filters/SystemTiddlers": {
"title": "$:/language/Filters/SystemTiddlers",
"text": "System tiddlers"
},
"$:/language/Filters/ShadowTiddlers": {
"title": "$:/language/Filters/ShadowTiddlers",
"text": "Shadow tiddlers"
},
"$:/language/Filters/OverriddenShadowTiddlers": {
"title": "$:/language/Filters/OverriddenShadowTiddlers",
"text": "Overridden shadow tiddlers"
},
"$:/language/Filters/SystemTags": {
"title": "$:/language/Filters/SystemTags",
"text": "System tags"
},
"$:/language/Filters/TypedTiddlers": {
"title": "$:/language/Filters/TypedTiddlers",
"text": "Non wiki-text tiddlers"
},
"GettingStarted": {
"title": "GettingStarted",
"text": "\\define lingo-base() $:/language/ControlPanel/Basics/\nWelcome to ~TiddlyWiki and the ~TiddlyWiki community\n\nBefore you start storing important information in ~TiddlyWiki it is important to make sure that you can reliably save changes. See http://tiddlywiki.com/#GettingStarted for details\n\n!! Set up this ~TiddlyWiki\n\n<div class=\"tc-control-panel\">\n\n|<$link to=\"$:/SiteTitle\"><<lingo Title/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteTitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/SiteSubtitle\"><<lingo Subtitle/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteSubtitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/DefaultTiddlers\"><<lingo DefaultTiddlers/Prompt>></$link> |<<lingo DefaultTiddlers/TopHint>><br> <$edit tag=\"textarea\" tiddler=\"$:/DefaultTiddlers\"/><br>//<<lingo DefaultTiddlers/BottomHint>>// |\n</div>\n\nSee the [[control panel|$:/ControlPanel]] for more options.\n"
},
"$:/language/Help/build": {
"title": "$:/language/Help/build",
"description": "Automatically run configured commands",
"text": "Build the specified build targets for the current wiki. If no build targets are specified then all available targets will be built.\n\n```\n--build <target> [<target> ...]\n```\n\nBuild targets are defined in the `tiddlywiki.info` file of a wiki folder.\n\n"
},
"$:/language/Help/clearpassword": {
"title": "$:/language/Help/clearpassword",
"description": "Clear a password for subsequent crypto operations",
"text": "Clear the password for subsequent crypto operations\n\n```\n--clearpassword\n```\n"
},
"$:/language/Help/default": {
"title": "$:/language/Help/default",
"text": "\\define commandTitle()\n$:/language/Help/$(command)$\n\\end\n```\nusage: tiddlywiki [<wikifolder>] [--<command> [<args>...]...]\n```\n\nAvailable commands:\n\n<ul>\n<$list filter=\"[commands[]sort[title]]\" variable=\"command\">\n<li><$link to=<<commandTitle>>><$macrocall $name=\"command\" $type=\"text/plain\" $output=\"text/plain\"/></$link>: <$transclude tiddler=<<commandTitle>> field=\"description\"/></li>\n</$list>\n</ul>\n\nTo get detailed help on a command:\n\n```\ntiddlywiki --help <command>\n```\n"
},
"$:/language/Help/editions": {
"title": "$:/language/Help/editions",
"description": "Lists the available editions of TiddlyWiki",
"text": "Lists the names and descriptions of the available editions. You can create a new wiki of a specified edition with the `--init` command.\n\n```\n--editions\n```\n"
},
"$:/language/Help/help": {
"title": "$:/language/Help/help",
"description": "Display help for TiddlyWiki commands",
"text": "Displays help text for a command:\n\n```\n--help [<command>]\n```\n\nIf the command name is omitted then a list of available commands is displayed.\n"
},
"$:/language/Help/init": {
"title": "$:/language/Help/init",
"description": "Initialise a new wiki folder",
"text": "Initialise an empty [[WikiFolder|WikiFolders]] with a copy of the specified edition.\n\n```\n--init <edition> [<edition> ...]\n```\n\nFor example:\n\n```\ntiddlywiki ./MyWikiFolder --init empty\n```\n\nNote:\n\n* The wiki folder directory will be created if necessary\n* The \"edition\" defaults to ''empty''\n* The init command will fail if the wiki folder is not empty\n* The init command removes any `includeWikis` definitions in the edition's `tiddlywiki.info` file\n* When multiple editions are specified, editions initialised later will overwrite any files shared with earlier editions (so, the final `tiddlywiki.info` file will be copied from the last edition)\n* `--editions` returns a list of available editions\n"
},
"$:/language/Help/load": {
"title": "$:/language/Help/load",
"description": "Load tiddlers from a file",
"text": "Load tiddlers from 2.x.x TiddlyWiki files (`.html`), `.tiddler`, `.tid`, `.json` or other files\n\n```\n--load <filepath>\n```\n\nTo load tiddlers from an encrypted TiddlyWiki file you should first specify the password with the PasswordCommand. For example:\n\n```\ntiddlywiki ./MyWiki --password pa55w0rd --load my_encrypted_wiki.html\n```\n\nNote that TiddlyWiki will not load an older version of an already loaded plugin.\n"
},
"$:/language/Help/makelibrary": {
"title": "$:/language/Help/makelibrary",
"description": "Construct library plugin required by upgrade process",
"text": "Constructs the `$:/UpgradeLibrary` tiddler for the upgrade process.\n\nThe upgrade library is formatted as an ordinary plugin tiddler with the plugin type `library`. It contains a copy of each of the plugins, themes and language packs available within the TiddlyWiki5 repository.\n\nThis command is intended for internal use; it is only relevant to users constructing a custom upgrade procedure.\n\n```\n--makelibrary <title>\n```\n\nThe title argument defaults to `$:/UpgradeLibrary`.\n"
},
"$:/language/Help/notfound": {
"title": "$:/language/Help/notfound",
"text": "No such help item"
},
"$:/language/Help/output": {
"title": "$:/language/Help/output",
"description": "Set the base output directory for subsequent commands",
"text": "Sets the base output directory for subsequent commands. The default output directory is the `output` subdirectory of the edition directory.\n\n```\n--output <pathname>\n```\n\nIf the specified pathname is relative then it is resolved relative to the current working directory. For example `--output .` sets the output directory to the current working directory.\n\n"
},
"$:/language/Help/password": {
"title": "$:/language/Help/password",
"description": "Set a password for subsequent crypto operations",
"text": "Set a password for subsequent crypto operations\n\n```\n--password <password>\n```\n\n''Note'': This should not be used for serving TiddlyWiki with password protection. Instead, see the password option under the [[ServerCommand]].\n"
},
"$:/language/Help/rendertiddler": {
"title": "$:/language/Help/rendertiddler",
"description": "Render an individual tiddler as a specified ContentType",
"text": "Render an individual tiddler as a specified ContentType, defaulting to `text/html` and save it to the specified filename. Optionally a template can be specified, in which case the template tiddler is rendered with the \"currentTiddler\" variable set to the tiddler that is being rendered (the first parameter value).\n\n```\n--rendertiddler <title> <filename> [<type>] [<template>]\n```\n\nBy default, the filename is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny missing directories in the path to the filename are automatically created.\n"
},
"$:/language/Help/rendertiddlers": {
"title": "$:/language/Help/rendertiddlers",
"description": "Render tiddlers matching a filter to a specified ContentType",
"text": "Render a set of tiddlers matching a filter to separate files of a specified ContentType (defaults to `text/html`) and extension (defaults to `.html`).\n\n```\n--rendertiddlers <filter> <template> <pathname> [<type>] [<extension>] [\"noclean\"]\n```\n\nFor example:\n\n```\n--rendertiddlers [!is[system]] $:/core/templates/static.tiddler.html ./static text/plain\n```\n\nBy default, the pathname is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny files in the target directory are deleted unless the ''noclean'' flag is specified. The target directory is recursively created if it is missing.\n"
},
"$:/language/Help/savetiddler": {
"title": "$:/language/Help/savetiddler",
"description": "Saves a raw tiddler to a file",
"text": "Saves an individual tiddler in its raw text or binary format to the specified filename.\n\n```\n--savetiddler <title> <filename>\n```\n\nBy default, the filename is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny missing directories in the path to the filename are automatically created.\n"
},
"$:/language/Help/savetiddlers": {
"title": "$:/language/Help/savetiddlers",
"description": "Saves a group of raw tiddlers to a directory",
"text": "Saves a group of tiddlers in their raw text or binary format to the specified directory.\n\n```\n--savetiddlers <filter> <pathname> [\"noclean\"]\n```\n\nBy default, the pathname is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nThe output directory is cleared of existing files before saving the specified files. The deletion can be disabled by specifying the ''noclean'' flag.\n\nAny missing directories in the pathname are automatically created.\n"
},
"$:/language/Help/server": {
"title": "$:/language/Help/server",
"description": "Provides an HTTP server interface to TiddlyWiki",
"text": "The server built in to TiddlyWiki5 is very simple. Although compatible with TiddlyWeb it doesn't support many of the features needed for robust Internet-facing usage.\n\nAt the root, it serves a rendering of a specified tiddler. Away from the root, it serves individual tiddlers encoded in JSON, and supports the basic HTTP operations for `GET`, `PUT` and `DELETE`.\n\n```\n--server <port> <roottiddler> <rendertype> <servetype> <username> <password> <host> <pathprefix>\n```\n\nThe parameters are:\n\n* ''port'' - port number to serve from (defaults to \"8080\")\n* ''roottiddler'' - the tiddler to serve at the root (defaults to \"$:/core/save/all\")\n* ''rendertype'' - the content type to which the root tiddler should be rendered (defaults to \"text/plain\")\n* ''servetype'' - the content type with which the root tiddler should be served (defaults to \"text/html\")\n* ''username'' - the default username for signing edits\n* ''password'' - optional password for basic authentication\n* ''host'' - optional hostname to serve from (defaults to \"127.0.0.1\" aka \"localhost\")\n* ''pathprefix'' - optional prefix for paths\n\nIf the password parameter is specified then the browser will prompt the user for the username and password. Note that the password is transmitted in plain text so this implementation isn't suitable for general use.\n\nFor example:\n\n```\n--server 8080 $:/core/save/all text/plain text/html MyUserName passw0rd\n```\n\nThe username and password can be specified as empty strings if you need to set the hostname or pathprefix and don't want to require a password:\n\n```\n--server 8080 $:/core/save/all text/plain text/html \"\" \"\" 192.168.0.245\n```\n\nTo run multiple TiddlyWiki servers at the same time you'll need to put each one on a different port.\n"
},
"$:/language/Help/setfield": {
"title": "$:/language/Help/setfield",
"description": "Prepares external tiddlers for use",
"text": "//Note that this command is experimental and may change or be replaced before being finalised//\n\nSets the specified field of a group of tiddlers to the result of wikifying a template tiddler with the `currentTiddler` variable set to the tiddler.\n\n```\n--setfield <filter> <fieldname> <templatetitle> <rendertype>\n```\n\nThe parameters are:\n\n* ''filter'' - filter identifying the tiddlers to be affected\n* ''fieldname'' - the field to modify (defaults to \"text\")\n* ''templatetitle'' - the tiddler to wikify into the specified field. If blank or missing then the specified field is deleted\n* ''rendertype'' - the text type to render (defaults to \"text/plain\"; \"text/html\" can be used to include HTML tags)\n"
},
"$:/language/Help/unpackplugin": {
"title": "$:/language/Help/unpackplugin",
"description": "Unpack the payload tiddlers from a plugin",
"text": "Extract the payload tiddlers from a plugin, creating them as ordinary tiddlers:\n\n```\n--unpackplugin <title>\n```\n"
},
"$:/language/Help/verbose": {
"title": "$:/language/Help/verbose",
"description": "Triggers verbose output mode",
"text": "Triggers verbose output, useful for debugging\n\n```\n--verbose\n```\n"
},
"$:/language/Help/version": {
"title": "$:/language/Help/version",
"description": "Displays the version number of TiddlyWiki",
"text": "Displays the version number of TiddlyWiki.\n\n```\n--version\n```\n"
},
"$:/languages/en-GB/icon": {
"title": "$:/languages/en-GB/icon",
"type": "image/svg+xml",
"text": "<svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 60 30\" width=\"1200\" height=\"600\">\n<clipPath id=\"t\">\n\t<path d=\"M30,15 h30 v15 z v15 h-30 z h-30 v-15 z v-15 h30 z\"/>\n</clipPath>\n<path d=\"M0,0 v30 h60 v-30 z\" fill=\"#00247d\"/>\n<path d=\"M0,0 L60,30 M60,0 L0,30\" stroke=\"#fff\" stroke-width=\"6\"/>\n<path d=\"M0,0 L60,30 M60,0 L0,30\" clip-path=\"url(#t)\" stroke=\"#cf142b\" stroke-width=\"4\"/>\n<path d=\"M30,0 v30 M0,15 h60\" stroke=\"#fff\" stroke-width=\"10\"/>\n<path d=\"M30,0 v30 M0,15 h60\" stroke=\"#cf142b\" stroke-width=\"6\"/>\n</svg>\n"
},
"$:/language/Import/Imported/Hint": {
"title": "$:/language/Import/Imported/Hint",
"text": "The following tiddlers were imported:"
},
"$:/language/Import/Listing/Cancel/Caption": {
"title": "$:/language/Import/Listing/Cancel/Caption",
"text": "Cancel"
},
"$:/language/Import/Listing/Hint": {
"title": "$:/language/Import/Listing/Hint",
"text": "These tiddlers are ready to import:"
},
"$:/language/Import/Listing/Import/Caption": {
"title": "$:/language/Import/Listing/Import/Caption",
"text": "Import"
},
"$:/language/Import/Listing/Select/Caption": {
"title": "$:/language/Import/Listing/Select/Caption",
"text": "Select"
},
"$:/language/Import/Listing/Status/Caption": {
"title": "$:/language/Import/Listing/Status/Caption",
"text": "Status"
},
"$:/language/Import/Listing/Title/Caption": {
"title": "$:/language/Import/Listing/Title/Caption",
"text": "Title"
},
"$:/language/Import/Upgrader/Plugins/Suppressed/Incompatible": {
"title": "$:/language/Import/Upgrader/Plugins/Suppressed/Incompatible",
"text": "Blocked incompatible or obsolete plugin"
},
"$:/language/Import/Upgrader/Plugins/Suppressed/Version": {
"title": "$:/language/Import/Upgrader/Plugins/Suppressed/Version",
"text": "Blocked plugin (due to incoming <<incoming>> being older than existing <<existing>>)"
},
"$:/language/Import/Upgrader/Plugins/Upgraded": {
"title": "$:/language/Import/Upgrader/Plugins/Upgraded",
"text": "Upgraded plugin from <<incoming>> to <<upgraded>>"
},
"$:/language/Import/Upgrader/State/Suppressed": {
"title": "$:/language/Import/Upgrader/State/Suppressed",
"text": "Blocked temporary state tiddler"
},
"$:/language/Import/Upgrader/System/Suppressed": {
"title": "$:/language/Import/Upgrader/System/Suppressed",
"text": "Blocked system tiddler"
},
"$:/language/Import/Upgrader/ThemeTweaks/Created": {
"title": "$:/language/Import/Upgrader/ThemeTweaks/Created",
"text": "Migrated theme tweak from <$text text=<<from>>/>"
},
"$:/language/AboveStory/ClassicPlugin/Warning": {
"title": "$:/language/AboveStory/ClassicPlugin/Warning",
"text": "It looks like you are trying to load a plugin designed for ~TiddlyWiki Classic. Please note that [[these plugins do not work with TiddlyWiki version 5.x.x|http://tiddlywiki.com/#TiddlyWikiClassic]]. ~TiddlyWiki Classic plugins detected:"
},
"$:/language/BinaryWarning/Prompt": {
"title": "$:/language/BinaryWarning/Prompt",
"text": "This tiddler contains binary data"
},
"$:/language/ClassicWarning/Hint": {
"title": "$:/language/ClassicWarning/Hint",
"text": "This tiddler is written in TiddlyWiki Classic wiki text format, which is not fully compatible with TiddlyWiki version 5. See http://tiddlywiki.com/static/Upgrading.html for more details."
},
"$:/language/ClassicWarning/Upgrade/Caption": {
"title": "$:/language/ClassicWarning/Upgrade/Caption",
"text": "upgrade"
},
"$:/language/CloseAll/Button": {
"title": "$:/language/CloseAll/Button",
"text": "close all"
},
"$:/language/ColourPicker/Recent": {
"title": "$:/language/ColourPicker/Recent",
"text": "Recent:"
},
"$:/language/ConfirmCancelTiddler": {
"title": "$:/language/ConfirmCancelTiddler",
"text": "Do you wish to discard changes to the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmDeleteTiddler": {
"title": "$:/language/ConfirmDeleteTiddler",
"text": "Do you wish to delete the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmOverwriteTiddler": {
"title": "$:/language/ConfirmOverwriteTiddler",
"text": "Do you wish to overwrite the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmEditShadowTiddler": {
"title": "$:/language/ConfirmEditShadowTiddler",
"text": "You are about to edit a ShadowTiddler. Any changes will override the default system making future upgrades non-trivial. Are you sure you want to edit \"<$text text=<<title>>/>\"?"
},
"$:/language/Count": {
"title": "$:/language/Count",
"text": "count"
},
"$:/language/DefaultNewTiddlerTitle": {
"title": "$:/language/DefaultNewTiddlerTitle",
"text": "New Tiddler"
},
"$:/language/DropMessage": {
"title": "$:/language/DropMessage",
"text": "Drop here (or use the 'Escape' key to cancel)"
},
"$:/language/Encryption/Cancel": {
"title": "$:/language/Encryption/Cancel",
"text": "Cancel"
},
"$:/language/Encryption/ConfirmClearPassword": {
"title": "$:/language/Encryption/ConfirmClearPassword",
"text": "Do you wish to clear the password? This will remove the encryption applied when saving this wiki"
},
"$:/language/Encryption/PromptSetPassword": {
"title": "$:/language/Encryption/PromptSetPassword",
"text": "Set a new password for this TiddlyWiki"
},
"$:/language/Encryption/Username": {
"title": "$:/language/Encryption/Username",
"text": "Username"
},
"$:/language/Encryption/Password": {
"title": "$:/language/Encryption/Password",
"text": "Password"
},
"$:/language/Encryption/RepeatPassword": {
"title": "$:/language/Encryption/RepeatPassword",
"text": "Repeat password"
},
"$:/language/Encryption/PasswordNoMatch": {
"title": "$:/language/Encryption/PasswordNoMatch",
"text": "Passwords do not match"
},
"$:/language/Encryption/SetPassword": {
"title": "$:/language/Encryption/SetPassword",
"text": "Set password"
},
"$:/language/Error/Caption": {
"title": "$:/language/Error/Caption",
"text": "Error"
},
"$:/language/Error/Filter": {
"title": "$:/language/Error/Filter",
"text": "Filter error"
},
"$:/language/Error/FilterSyntax": {
"title": "$:/language/Error/FilterSyntax",
"text": "Syntax error in filter expression"
},
"$:/language/Error/IsFilterOperator": {
"title": "$:/language/Error/IsFilterOperator",
"text": "Filter Error: Unknown operand for the 'is' filter operator"
},
"$:/language/Error/LoadingPluginLibrary": {
"title": "$:/language/Error/LoadingPluginLibrary",
"text": "Error loading plugin library"
},
"$:/language/Error/RecursiveTransclusion": {
"title": "$:/language/Error/RecursiveTransclusion",
"text": "Recursive transclusion error in transclude widget"
},
"$:/language/Error/RetrievingSkinny": {
"title": "$:/language/Error/RetrievingSkinny",
"text": "Error retrieving skinny tiddler list"
},
"$:/language/Error/SavingToTWEdit": {
"title": "$:/language/Error/SavingToTWEdit",
"text": "Error saving to TWEdit"
},
"$:/language/Error/WhileSaving": {
"title": "$:/language/Error/WhileSaving",
"text": "Error while saving"
},
"$:/language/Error/XMLHttpRequest": {
"title": "$:/language/Error/XMLHttpRequest",
"text": "XMLHttpRequest error code"
},
"$:/language/InternalJavaScriptError/Title": {
"title": "$:/language/InternalJavaScriptError/Title",
"text": "Internal JavaScript Error"
},
"$:/language/InternalJavaScriptError/Hint": {
"title": "$:/language/InternalJavaScriptError/Hint",
"text": "Well, this is embarrassing. It is recommended that you restart TiddlyWiki by refreshing your browser"
},
"$:/language/InvalidFieldName": {
"title": "$:/language/InvalidFieldName",
"text": "Illegal characters in field name \"<$text text=<<fieldName>>/>\". Fields can only contain lowercase letters, digits and the characters underscore (`_`), hyphen (`-`) and period (`.`)"
},
"$:/language/LazyLoadingWarning": {
"title": "$:/language/LazyLoadingWarning",
"text": "<p>Loading external text from ''<$text text={{!!_canonical_uri}}/>''</p><p>If this message doesn't disappear you may be using a browser that doesn't support external text in this configuration. See http://tiddlywiki.com/#ExternalText</p>"
},
"$:/language/LoginToTiddlySpace": {
"title": "$:/language/LoginToTiddlySpace",
"text": "Login to TiddlySpace"
},
"$:/language/MissingTiddler/Hint": {
"title": "$:/language/MissingTiddler/Hint",
"text": "Missing tiddler \"<$text text=<<currentTiddler>>/>\" - click {{$:/core/images/edit-button}} to create"
},
"$:/language/No": {
"title": "$:/language/No",
"text": "No"
},
"$:/language/OfficialPluginLibrary": {
"title": "$:/language/OfficialPluginLibrary",
"text": "Official ~TiddlyWiki Plugin Library"
},
"$:/language/OfficialPluginLibrary/Hint": {
"title": "$:/language/OfficialPluginLibrary/Hint",
"text": "The official ~TiddlyWiki plugin library at tiddlywiki.com. Plugins, themes and language packs are maintained by the core team."
},
"$:/language/PluginReloadWarning": {
"title": "$:/language/PluginReloadWarning",
"text": "Please save {{$:/core/ui/Buttons/save-wiki}} and reload {{$:/core/ui/Buttons/refresh}} to allow changes to plugins to take effect"
},
"$:/language/RecentChanges/DateFormat": {
"title": "$:/language/RecentChanges/DateFormat",
"text": "DDth MMM YYYY"
},
"$:/language/SystemTiddler/Tooltip": {
"title": "$:/language/SystemTiddler/Tooltip",
"text": "This is a system tiddler"
},
"$:/language/TagManager/Colour/Heading": {
"title": "$:/language/TagManager/Colour/Heading",
"text": "Colour"
},
"$:/language/TagManager/Count/Heading": {
"title": "$:/language/TagManager/Count/Heading",
"text": "Count"
},
"$:/language/TagManager/Icon/Heading": {
"title": "$:/language/TagManager/Icon/Heading",
"text": "Icon"
},
"$:/language/TagManager/Info/Heading": {
"title": "$:/language/TagManager/Info/Heading",
"text": "Info"
},
"$:/language/TagManager/Tag/Heading": {
"title": "$:/language/TagManager/Tag/Heading",
"text": "Tag"
},
"$:/language/Tiddler/DateFormat": {
"title": "$:/language/Tiddler/DateFormat",
"text": "DDth MMM YYYY at hh12:0mmam"
},
"$:/language/UnsavedChangesWarning": {
"title": "$:/language/UnsavedChangesWarning",
"text": "You have unsaved changes in TiddlyWiki"
},
"$:/language/Yes": {
"title": "$:/language/Yes",
"text": "Yes"
},
"$:/language/Modals/Download": {
"title": "$:/language/Modals/Download",
"type": "text/vnd.tiddlywiki",
"subtitle": "Download changes",
"footer": "<$button message=\"tm-close-tiddler\">Close</$button>",
"help": "http://tiddlywiki.com/static/DownloadingChanges.html",
"text": "Your browser only supports manual saving.\n\nTo save your modified wiki, right click on the download link below and select \"Download file\" or \"Save file\", and then choose the folder and filename.\n\n//You can marginally speed things up by clicking the link with the control key (Windows) or the options/alt key (Mac OS X). You will not be prompted for the folder or filename, but your browser is likely to give it an unrecognisable name -- you may need to rename the file to include an `.html` extension before you can do anything useful with it.//\n\nOn smartphones that do not allow files to be downloaded you can instead bookmark the link, and then sync your bookmarks to a desktop computer from where the wiki can be saved normally.\n"
},
"$:/language/Modals/SaveInstructions": {
"title": "$:/language/Modals/SaveInstructions",
"type": "text/vnd.tiddlywiki",
"subtitle": "Save your work",
"footer": "<$button message=\"tm-close-tiddler\">Close</$button>",
"help": "http://tiddlywiki.com/static/SavingChanges.html",
"text": "Your changes to this wiki need to be saved as a ~TiddlyWiki HTML file.\n\n!!! Desktop browsers\n\n# Select ''Save As'' from the ''File'' menu\n# Choose a filename and location\n#* Some browsers also require you to explicitly specify the file saving format as ''Webpage, HTML only'' or similar\n# Close this tab\n\n!!! Smartphone browsers\n\n# Create a bookmark to this page\n#* If you've got iCloud or Google Sync set up then the bookmark will automatically sync to your desktop where you can open it and save it as above\n# Close this tab\n\n//If you open the bookmark again in Mobile Safari you will see this message again. If you want to go ahead and use the file, just click the ''close'' button below//\n"
},
"$:/config/NewJournal/Title": {
"title": "$:/config/NewJournal/Title",
"text": "DDth MMM YYYY"
},
"$:/config/NewJournal/Tags": {
"title": "$:/config/NewJournal/Tags",
"text": "Journal"
},
"$:/language/Notifications/Save/Done": {
"title": "$:/language/Notifications/Save/Done",
"text": "Saved wiki"
},
"$:/language/Notifications/Save/Starting": {
"title": "$:/language/Notifications/Save/Starting",
"text": "Starting to save wiki"
},
"$:/language/Search/DefaultResults/Caption": {
"title": "$:/language/Search/DefaultResults/Caption",
"text": "List"
},
"$:/language/Search/Filter/Caption": {
"title": "$:/language/Search/Filter/Caption",
"text": "Filter"
},
"$:/language/Search/Filter/Hint": {
"title": "$:/language/Search/Filter/Hint",
"text": "Search via a [[filter expression|http://tiddlywiki.com/static/Filters.html]]"
},
"$:/language/Search/Filter/Matches": {
"title": "$:/language/Search/Filter/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Matches": {
"title": "$:/language/Search/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Matches/All": {
"title": "$:/language/Search/Matches/All",
"text": "All matches:"
},
"$:/language/Search/Matches/Title": {
"title": "$:/language/Search/Matches/Title",
"text": "Title matches:"
},
"$:/language/Search/Search": {
"title": "$:/language/Search/Search",
"text": "Search"
},
"$:/language/Search/Shadows/Caption": {
"title": "$:/language/Search/Shadows/Caption",
"text": "Shadows"
},
"$:/language/Search/Shadows/Hint": {
"title": "$:/language/Search/Shadows/Hint",
"text": "Search for shadow tiddlers"
},
"$:/language/Search/Shadows/Matches": {
"title": "$:/language/Search/Shadows/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Standard/Caption": {
"title": "$:/language/Search/Standard/Caption",
"text": "Standard"
},
"$:/language/Search/Standard/Hint": {
"title": "$:/language/Search/Standard/Hint",
"text": "Search for standard tiddlers"
},
"$:/language/Search/Standard/Matches": {
"title": "$:/language/Search/Standard/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/System/Caption": {
"title": "$:/language/Search/System/Caption",
"text": "System"
},
"$:/language/Search/System/Hint": {
"title": "$:/language/Search/System/Hint",
"text": "Search for system tiddlers"
},
"$:/language/Search/System/Matches": {
"title": "$:/language/Search/System/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/SideBar/All/Caption": {
"title": "$:/language/SideBar/All/Caption",
"text": "All"
},
"$:/language/SideBar/Contents/Caption": {
"title": "$:/language/SideBar/Contents/Caption",
"text": "Contents"
},
"$:/language/SideBar/Drafts/Caption": {
"title": "$:/language/SideBar/Drafts/Caption",
"text": "Drafts"
},
"$:/language/SideBar/Missing/Caption": {
"title": "$:/language/SideBar/Missing/Caption",
"text": "Missing"
},
"$:/language/SideBar/More/Caption": {
"title": "$:/language/SideBar/More/Caption",
"text": "More"
},
"$:/language/SideBar/Open/Caption": {
"title": "$:/language/SideBar/Open/Caption",
"text": "Open"
},
"$:/language/SideBar/Orphans/Caption": {
"title": "$:/language/SideBar/Orphans/Caption",
"text": "Orphans"
},
"$:/language/SideBar/Recent/Caption": {
"title": "$:/language/SideBar/Recent/Caption",
"text": "Recent"
},
"$:/language/SideBar/Shadows/Caption": {
"title": "$:/language/SideBar/Shadows/Caption",
"text": "Shadows"
},
"$:/language/SideBar/System/Caption": {
"title": "$:/language/SideBar/System/Caption",
"text": "System"
},
"$:/language/SideBar/Tags/Caption": {
"title": "$:/language/SideBar/Tags/Caption",
"text": "Tags"
},
"$:/language/SideBar/Tags/Untagged/Caption": {
"title": "$:/language/SideBar/Tags/Untagged/Caption",
"text": "untagged"
},
"$:/language/SideBar/Tools/Caption": {
"title": "$:/language/SideBar/Tools/Caption",
"text": "Tools"
},
"$:/language/SideBar/Types/Caption": {
"title": "$:/language/SideBar/Types/Caption",
"text": "Types"
},
"$:/SiteSubtitle": {
"title": "$:/SiteSubtitle",
"text": "a non-linear personal web notebook"
},
"$:/SiteTitle": {
"title": "$:/SiteTitle",
"text": "My ~TiddlyWiki"
},
"$:/language/Snippets/ListByTag": {
"title": "$:/language/Snippets/ListByTag",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "List of tiddlers by tag",
"text": "<<list-links \"[tag[task]sort[title]]\">>\n"
},
"$:/language/Snippets/MacroDefinition": {
"title": "$:/language/Snippets/MacroDefinition",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Macro definition",
"text": "\\define macroName(param1:\"default value\",param2)\nText of the macro\n\\end\n"
},
"$:/language/Snippets/Table4x3": {
"title": "$:/language/Snippets/Table4x3",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Table with 4 columns by 3 rows",
"text": "|! |!Alpha |!Beta |!Gamma |!Delta |\n|!One | | | | |\n|!Two | | | | |\n|!Three | | | | |\n"
},
"$:/language/Snippets/TableOfContents": {
"title": "$:/language/Snippets/TableOfContents",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Table of Contents",
"text": "<div class=\"tc-table-of-contents\">\n\n<<toc-selective-expandable 'TableOfContents'>>\n\n</div>"
},
"$:/language/ThemeTweaks/ThemeTweaks": {
"title": "$:/language/ThemeTweaks/ThemeTweaks",
"text": "Theme Tweaks"
},
"$:/language/ThemeTweaks/ThemeTweaks/Hint": {
"title": "$:/language/ThemeTweaks/ThemeTweaks/Hint",
"text": "You can tweak certain aspects of the ''Vanilla'' theme."
},
"$:/language/ThemeTweaks/Options": {
"title": "$:/language/ThemeTweaks/Options",
"text": "Options"
},
"$:/language/ThemeTweaks/Options/SidebarLayout": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout",
"text": "Sidebar layout"
},
"$:/language/ThemeTweaks/Options/SidebarLayout/Fixed-Fluid": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout/Fixed-Fluid",
"text": "Fixed story, fluid sidebar"
},
"$:/language/ThemeTweaks/Options/SidebarLayout/Fluid-Fixed": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout/Fluid-Fixed",
"text": "Fluid story, fixed sidebar"
},
"$:/language/ThemeTweaks/Options/StickyTitles": {
"title": "$:/language/ThemeTweaks/Options/StickyTitles",
"text": "Sticky titles"
},
"$:/language/ThemeTweaks/Options/StickyTitles/Hint": {
"title": "$:/language/ThemeTweaks/Options/StickyTitles/Hint",
"text": "Causes tiddler titles to \"stick\" to the top of the browser window. Caution: Does not work at all with Chrome, and causes some layout issues in Firefox"
},
"$:/language/ThemeTweaks/Options/CodeWrapping": {
"title": "$:/language/ThemeTweaks/Options/CodeWrapping",
"text": "Wrap long lines in code blocks"
},
"$:/language/ThemeTweaks/Settings": {
"title": "$:/language/ThemeTweaks/Settings",
"text": "Settings"
},
"$:/language/ThemeTweaks/Settings/FontFamily": {
"title": "$:/language/ThemeTweaks/Settings/FontFamily",
"text": "Font family"
},
"$:/language/ThemeTweaks/Settings/CodeFontFamily": {
"title": "$:/language/ThemeTweaks/Settings/CodeFontFamily",
"text": "Code font family"
},
"$:/language/ThemeTweaks/Settings/BackgroundImage": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImage",
"text": "Page background image"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment",
"text": "Page background image attachment"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Scroll": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Scroll",
"text": "Scroll with tiddlers"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Fixed": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Fixed",
"text": "Fixed to window"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize",
"text": "Page background image size"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Auto": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Auto",
"text": "Auto"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Cover": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Cover",
"text": "Cover"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Contain": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Contain",
"text": "Contain"
},
"$:/language/ThemeTweaks/Metrics": {
"title": "$:/language/ThemeTweaks/Metrics",
"text": "Sizes"
},
"$:/language/ThemeTweaks/Metrics/FontSize": {
"title": "$:/language/ThemeTweaks/Metrics/FontSize",
"text": "Font size"
},
"$:/language/ThemeTweaks/Metrics/LineHeight": {
"title": "$:/language/ThemeTweaks/Metrics/LineHeight",
"text": "Line height"
},
"$:/language/ThemeTweaks/Metrics/BodyFontSize": {
"title": "$:/language/ThemeTweaks/Metrics/BodyFontSize",
"text": "Font size for tiddler body"
},
"$:/language/ThemeTweaks/Metrics/BodyLineHeight": {
"title": "$:/language/ThemeTweaks/Metrics/BodyLineHeight",
"text": "Line height for tiddler body"
},
"$:/language/ThemeTweaks/Metrics/StoryLeft": {
"title": "$:/language/ThemeTweaks/Metrics/StoryLeft",
"text": "Story left position"
},
"$:/language/ThemeTweaks/Metrics/StoryLeft/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryLeft/Hint",
"text": "how far the left margin of the story river<br>(tiddler area) is from the left of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryTop": {
"title": "$:/language/ThemeTweaks/Metrics/StoryTop",
"text": "Story top position"
},
"$:/language/ThemeTweaks/Metrics/StoryTop/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryTop/Hint",
"text": "how far the top margin of the story river<br>is from the top of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryRight": {
"title": "$:/language/ThemeTweaks/Metrics/StoryRight",
"text": "Story right"
},
"$:/language/ThemeTweaks/Metrics/StoryRight/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryRight/Hint",
"text": "how far the left margin of the sidebar <br>is from the left of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryWidth": {
"title": "$:/language/ThemeTweaks/Metrics/StoryWidth",
"text": "Story width"
},
"$:/language/ThemeTweaks/Metrics/StoryWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryWidth/Hint",
"text": "the overall width of the story river"
},
"$:/language/ThemeTweaks/Metrics/TiddlerWidth": {
"title": "$:/language/ThemeTweaks/Metrics/TiddlerWidth",
"text": "Tiddler width"
},
"$:/language/ThemeTweaks/Metrics/TiddlerWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/TiddlerWidth/Hint",
"text": "within the story river"
},
"$:/language/ThemeTweaks/Metrics/SidebarBreakpoint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarBreakpoint",
"text": "Sidebar breakpoint"
},
"$:/language/ThemeTweaks/Metrics/SidebarBreakpoint/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarBreakpoint/Hint",
"text": "the minimum page width at which the story<br>river and sidebar will appear side by side"
},
"$:/language/ThemeTweaks/Metrics/SidebarWidth": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarWidth",
"text": "Sidebar width"
},
"$:/language/ThemeTweaks/Metrics/SidebarWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarWidth/Hint",
"text": "the width of the sidebar in fluid-fixed layout"
},
"$:/language/TiddlerInfo/Advanced/Caption": {
"title": "$:/language/TiddlerInfo/Advanced/Caption",
"text": "Advanced"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Empty/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Empty/Hint",
"text": "none"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Heading": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Heading",
"text": "Plugin Details"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Hint",
"text": "This plugin contains the following shadow tiddlers:"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Heading": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Heading",
"text": "Shadow Status"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/NotShadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/NotShadow/Hint",
"text": "The tiddler <$link to=<<infoTiddler>>><$text text=<<infoTiddler>>/></$link> is not a shadow tiddler"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Hint",
"text": "The tiddler <$link to=<<infoTiddler>>><$text text=<<infoTiddler>>/></$link> is a shadow tiddler"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Source": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Source",
"text": "It is defined in the plugin <$link to=<<pluginTiddler>>><$text text=<<pluginTiddler>>/></$link>"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/OverriddenShadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/OverriddenShadow/Hint",
"text": "It is overridden by an ordinary tiddler"
},
"$:/language/TiddlerInfo/Fields/Caption": {
"title": "$:/language/TiddlerInfo/Fields/Caption",
"text": "Fields"
},
"$:/language/TiddlerInfo/List/Caption": {
"title": "$:/language/TiddlerInfo/List/Caption",
"text": "List"
},
"$:/language/TiddlerInfo/List/Empty": {
"title": "$:/language/TiddlerInfo/List/Empty",
"text": "This tiddler does not have a list"
},
"$:/language/TiddlerInfo/Listed/Caption": {
"title": "$:/language/TiddlerInfo/Listed/Caption",
"text": "Listed"
},
"$:/language/TiddlerInfo/Listed/Empty": {
"title": "$:/language/TiddlerInfo/Listed/Empty",
"text": "This tiddler is not listed by any others"
},
"$:/language/TiddlerInfo/References/Caption": {
"title": "$:/language/TiddlerInfo/References/Caption",
"text": "References"
},
"$:/language/TiddlerInfo/References/Empty": {
"title": "$:/language/TiddlerInfo/References/Empty",
"text": "No tiddlers link to this one"
},
"$:/language/TiddlerInfo/Tagging/Caption": {
"title": "$:/language/TiddlerInfo/Tagging/Caption",
"text": "Tagging"
},
"$:/language/TiddlerInfo/Tagging/Empty": {
"title": "$:/language/TiddlerInfo/Tagging/Empty",
"text": "No tiddlers are tagged with this one"
},
"$:/language/TiddlerInfo/Tools/Caption": {
"title": "$:/language/TiddlerInfo/Tools/Caption",
"text": "Tools"
},
"$:/language/Docs/Types/application/javascript": {
"title": "$:/language/Docs/Types/application/javascript",
"description": "JavaScript code",
"name": "application/javascript",
"group": "Developer"
},
"$:/language/Docs/Types/application/json": {
"title": "$:/language/Docs/Types/application/json",
"description": "JSON data",
"name": "application/json",
"group": "Developer"
},
"$:/language/Docs/Types/application/x-tiddler-dictionary": {
"title": "$:/language/Docs/Types/application/x-tiddler-dictionary",
"description": "Data dictionary",
"name": "application/x-tiddler-dictionary",
"group": "Developer"
},
"$:/language/Docs/Types/image/gif": {
"title": "$:/language/Docs/Types/image/gif",
"description": "GIF image",
"name": "image/gif",
"group": "Image"
},
"$:/language/Docs/Types/image/jpeg": {
"title": "$:/language/Docs/Types/image/jpeg",
"description": "JPEG image",
"name": "image/jpeg",
"group": "Image"
},
"$:/language/Docs/Types/image/png": {
"title": "$:/language/Docs/Types/image/png",
"description": "PNG image",
"name": "image/png",
"group": "Image"
},
"$:/language/Docs/Types/image/svg+xml": {
"title": "$:/language/Docs/Types/image/svg+xml",
"description": "Structured Vector Graphics image",
"name": "image/svg+xml",
"group": "Image"
},
"$:/language/Docs/Types/image/x-icon": {
"title": "$:/language/Docs/Types/image/x-icon",
"description": "ICO format icon file",
"name": "image/x-icon",
"group": "Image"
},
"$:/language/Docs/Types/text/css": {
"title": "$:/language/Docs/Types/text/css",
"description": "Static stylesheet",
"name": "text/css",
"group": "Developer"
},
"$:/language/Docs/Types/text/html": {
"title": "$:/language/Docs/Types/text/html",
"description": "HTML markup",
"name": "text/html",
"group": "Text"
},
"$:/language/Docs/Types/text/plain": {
"title": "$:/language/Docs/Types/text/plain",
"description": "Plain text",
"name": "text/plain",
"group": "Text"
},
"$:/language/Docs/Types/text/vnd.tiddlywiki": {
"title": "$:/language/Docs/Types/text/vnd.tiddlywiki",
"description": "TiddlyWiki 5",
"name": "text/vnd.tiddlywiki",
"group": "Text"
},
"$:/language/Docs/Types/text/x-tiddlywiki": {
"title": "$:/language/Docs/Types/text/x-tiddlywiki",
"description": "TiddlyWiki Classic",
"name": "text/x-tiddlywiki",
"group": "Text"
},
"$:/languages/en-GB": {
"title": "$:/languages/en-GB",
"name": "en-GB",
"description": "English (British)",
"author": "JeremyRuston",
"core-version": ">=5.0.0\"",
"text": "Stub pseudo-plugin for the default language"
},
"$:/core/modules/commander.js": {
"text": "/*\\\ntitle: $:/core/modules/commander.js\ntype: application/javascript\nmodule-type: global\n\nThe $tw.Commander class is a command interpreter\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nParse a sequence of commands\n\tcommandTokens: an array of command string tokens\n\twiki: reference to the wiki store object\n\tstreams: {output:, error:}, each of which has a write(string) method\n\tcallback: a callback invoked as callback(err) where err is null if there was no error\n*/\nvar Commander = function(commandTokens,callback,wiki,streams) {\n\tvar path = require(\"path\");\n\tthis.commandTokens = commandTokens;\n\tthis.nextToken = 0;\n\tthis.callback = callback;\n\tthis.wiki = wiki;\n\tthis.streams = streams;\n\tthis.outputPath = path.resolve($tw.boot.wikiPath,$tw.config.wikiOutputSubDir);\n};\n\n/*\nAdd a string of tokens to the command queue\n*/\nCommander.prototype.addCommandTokens = function(commandTokens) {\n\tvar params = commandTokens.slice(0);\n\tparams.unshift(0);\n\tparams.unshift(this.nextToken);\n\tArray.prototype.splice.apply(this.commandTokens,params);\n};\n\n/*\nExecute the sequence of commands and invoke a callback on completion\n*/\nCommander.prototype.execute = function() {\n\tthis.executeNextCommand();\n};\n\n/*\nExecute the next command in the sequence\n*/\nCommander.prototype.executeNextCommand = function() {\n\tvar self = this;\n\t// Invoke the callback if there are no more commands\n\tif(this.nextToken >= this.commandTokens.length) {\n\t\tthis.callback(null);\n\t} else {\n\t\t// Get and check the command token\n\t\tvar commandName = this.commandTokens[this.nextToken++];\n\t\tif(commandName.substr(0,2) !== \"--\") {\n\t\t\tthis.callback(\"Missing command: \" + commandName);\n\t\t} else {\n\t\t\tcommandName = commandName.substr(2); // Trim off the --\n\t\t\t// Accumulate the parameters to the command\n\t\t\tvar params = [];\n\t\t\twhile(this.nextToken < this.commandTokens.length && \n\t\t\t\tthis.commandTokens[this.nextToken].substr(0,2) !== \"--\") {\n\t\t\t\tparams.push(this.commandTokens[this.nextToken++]);\n\t\t\t}\n\t\t\t// Get the command info\n\t\t\tvar command = $tw.commands[commandName],\n\t\t\t\tc,err;\n\t\t\tif(!command) {\n\t\t\t\tthis.callback(\"Unknown command: \" + commandName);\n\t\t\t} else {\n\t\t\t\tif(this.verbose) {\n\t\t\t\t\tthis.streams.output.write(\"Executing command: \" + commandName + \" \" + params.join(\" \") + \"\\n\");\n\t\t\t\t}\n\t\t\t\tif(command.info.synchronous) {\n\t\t\t\t\t// Synchronous command\n\t\t\t\t\tc = new command.Command(params,this);\n\t\t\t\t\terr = c.execute();\n\t\t\t\t\tif(err) {\n\t\t\t\t\t\tthis.callback(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthis.executeNextCommand();\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Asynchronous command\n\t\t\t\t\tc = new command.Command(params,this,function(err) {\n\t\t\t\t\t\tif(err) {\n\t\t\t\t\t\t\tself.callback(err);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself.executeNextCommand();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\terr = c.execute();\n\t\t\t\t\tif(err) {\n\t\t\t\t\t\tthis.callback(err);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n};\n\nCommander.initCommands = function(moduleType) {\n\tmoduleType = moduleType || \"command\";\n\t$tw.commands = {};\n\t$tw.modules.forEachModuleOfType(moduleType,function(title,module) {\n\t\tvar c = $tw.commands[module.info.name] = {};\n\t\t// Add the methods defined by the module\n\t\tfor(var f in module) {\n\t\t\tif($tw.utils.hop(module,f)) {\n\t\t\t\tc[f] = module[f];\n\t\t\t}\n\t\t}\n\t});\n};\n\nexports.Commander = Commander;\n\n})();\n",
"title": "$:/core/modules/commander.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/commands/build.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/build.js\ntype: application/javascript\nmodule-type: command\n\nCommand to build a build target\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"build\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\t// Get the build targets defined in the wiki\n\tvar buildTargets = $tw.boot.wikiInfo.build;\n\tif(!buildTargets) {\n\t\treturn \"No build targets defined\";\n\t}\n\t// Loop through each of the specified targets\n\tvar targets;\n\tif(this.params.length > 0) {\n\t\ttargets = this.params;\n\t} else {\n\t\ttargets = Object.keys(buildTargets);\n\t}\n\tfor(var targetIndex=0; targetIndex<targets.length; targetIndex++) {\n\t\tvar target = targets[targetIndex],\n\t\t\tcommands = buildTargets[target];\n\t\tif(!commands) {\n\t\t\treturn \"Build target '\" + target + \"' not found\";\n\t\t}\n\t\t// Add the commands to the queue\n\t\tthis.commander.addCommandTokens(commands);\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/build.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/clearpassword.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/clearpassword.js\ntype: application/javascript\nmodule-type: command\n\nClear password for crypto operations\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"clearpassword\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\t$tw.crypto.setPassword(null);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/clearpassword.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/editions.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/editions.js\ntype: application/javascript\nmodule-type: command\n\nCommand to list the available editions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"editions\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar self = this;\n\t// Output the list\n\tthis.commander.streams.output.write(\"Available editions:\\n\\n\");\n\tvar editionInfo = $tw.utils.getEditionInfo();\n\t$tw.utils.each(editionInfo,function(info,name) {\n\t\tself.commander.streams.output.write(\" \" + name + \": \" + info.description + \"\\n\");\n\t});\n\tthis.commander.streams.output.write(\"\\n\");\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/editions.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/help.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/help.js\ntype: application/javascript\nmodule-type: command\n\nHelp command\n\n\\*/\n(function(){\n\n/*jshint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"help\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar subhelp = this.params[0] || \"default\",\n\t\thelpBase = \"$:/language/Help/\",\n\t\ttext;\n\tif(!this.commander.wiki.getTiddler(helpBase + subhelp)) {\n\t\tsubhelp = \"notfound\";\n\t}\n\t// Wikify the help as formatted text (ie block elements generate newlines)\n\ttext = this.commander.wiki.renderTiddler(\"text/plain-formatted\",helpBase + subhelp);\n\t// Remove any leading linebreaks\n\ttext = text.replace(/^(\\r?\\n)*/g,\"\");\n\tthis.commander.streams.output.write(text);\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/help.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/init.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/init.js\ntype: application/javascript\nmodule-type: command\n\nCommand to initialise an empty wiki folder\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"init\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar fs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\t// Check that we don't already have a valid wiki folder\n\tif($tw.boot.wikiTiddlersPath || ($tw.utils.isDirectory($tw.boot.wikiPath) && !$tw.utils.isDirectoryEmpty($tw.boot.wikiPath))) {\n\t\treturn \"Wiki folder is not empty\";\n\t}\n\t// Loop through each of the specified editions\n\tvar editions = this.params.length > 0 ? this.params : [\"empty\"];\n\tfor(var editionIndex=0; editionIndex<editions.length; editionIndex++) {\n\t\tvar editionName = editions[editionIndex];\n\t\t// Check the edition exists\n\t\tvar editionPath = $tw.findLibraryItem(editionName,$tw.getLibraryItemSearchPaths($tw.config.editionsPath,$tw.config.editionsEnvVar));\n\t\tif(!$tw.utils.isDirectory(editionPath)) {\n\t\t\treturn \"Edition '\" + editionName + \"' not found\";\n\t\t}\n\t\t// Copy the edition content\n\t\tvar err = $tw.utils.copyDirectory(editionPath,$tw.boot.wikiPath);\n\t\tif(!err) {\n\t\t\tthis.commander.streams.output.write(\"Copied edition '\" + editionName + \"' to \" + $tw.boot.wikiPath + \"\\n\");\n\t\t} else {\n\t\t\treturn err;\n\t\t}\n\t}\n\t// Tweak the tiddlywiki.info to remove any included wikis\n\tvar packagePath = $tw.boot.wikiPath + \"/tiddlywiki.info\",\n\t\tpackageJson = JSON.parse(fs.readFileSync(packagePath));\n\tdelete packageJson.includeWikis;\n\tfs.writeFileSync(packagePath,JSON.stringify(packageJson,null,$tw.config.preferences.jsonSpaces));\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/init.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/load.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/load.js\ntype: application/javascript\nmodule-type: command\n\nCommand to load tiddlers from a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"load\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\tif(this.params.length < 1) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar ext = path.extname(self.params[0]);\n\tfs.readFile(this.params[0],$tw.utils.getTypeEncoding(ext),function(err,data) {\n\t\tif (err) {\n\t\t\tself.callback(err);\n\t\t} else {\n\t\t\tvar fields = {title: self.params[0]},\n\t\t\t\ttype = path.extname(self.params[0]);\n\t\t\tvar tiddlers = self.commander.wiki.deserializeTiddlers(type,data,fields);\n\t\t\tif(!tiddlers) {\n\t\t\t\tself.callback(\"No tiddlers found in file \\\"\" + self.params[0] + \"\\\"\");\n\t\t\t} else {\n\t\t\t\tfor(var t=0; t<tiddlers.length; t++) {\n\t\t\t\t\tself.commander.wiki.importTiddler(new $tw.Tiddler(tiddlers[t]));\n\t\t\t\t}\n\t\t\t\tself.callback(null);\t\n\t\t\t}\n\t\t}\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/load.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/makelibrary.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/makelibrary.js\ntype: application/javascript\nmodule-type: command\n\nCommand to pack all of the plugins in the library into a plugin tiddler of type \"library\"\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"makelibrary\",\n\tsynchronous: true\n};\n\nvar UPGRADE_LIBRARY_TITLE = \"$:/UpgradeLibrary\";\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar wiki = this.commander.wiki,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\tupgradeLibraryTitle = this.params[0] || UPGRADE_LIBRARY_TITLE,\n\t\ttiddlers = {};\n\t// Collect up the library plugins\n\tvar collectPlugins = function(folder) {\n\t\t\tvar pluginFolders = fs.readdirSync(folder);\n\t\t\tfor(var p=0; p<pluginFolders.length; p++) {\n\t\t\t\tif(!$tw.boot.excludeRegExp.test(pluginFolders[p])) {\n\t\t\t\t\tpluginFields = $tw.loadPluginFolder(path.resolve(folder,\"./\" + pluginFolders[p]));\n\t\t\t\t\tif(pluginFields && pluginFields.title) {\n\t\t\t\t\t\ttiddlers[pluginFields.title] = pluginFields;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tcollectPublisherPlugins = function(folder) {\n\t\t\tvar publisherFolders = fs.readdirSync(folder);\n\t\t\tfor(var t=0; t<publisherFolders.length; t++) {\n\t\t\t\tif(!$tw.boot.excludeRegExp.test(publisherFolders[t])) {\n\t\t\t\t\tcollectPlugins(path.resolve(folder,\"./\" + publisherFolders[t]));\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\tcollectPublisherPlugins(path.resolve($tw.boot.corePath,$tw.config.pluginsPath));\n\tcollectPublisherPlugins(path.resolve($tw.boot.corePath,$tw.config.themesPath));\n\tcollectPlugins(path.resolve($tw.boot.corePath,$tw.config.languagesPath));\n\t// Save the upgrade library tiddler\n\tvar pluginFields = {\n\t\ttitle: upgradeLibraryTitle,\n\t\ttype: \"application/json\",\n\t\t\"plugin-type\": \"library\",\n\t\t\"text\": JSON.stringify({tiddlers: tiddlers},null,$tw.config.preferences.jsonSpaces)\n\t};\n\twiki.addTiddler(new $tw.Tiddler(pluginFields));\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/makelibrary.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/output.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/output.js\ntype: application/javascript\nmodule-type: command\n\nCommand to set the default output location (defaults to current working directory)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"output\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar fs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\tif(this.params.length < 1) {\n\t\treturn \"Missing output path\";\n\t}\n\tthis.commander.outputPath = path.resolve(process.cwd(),this.params[0]);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/output.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/password.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/password.js\ntype: application/javascript\nmodule-type: command\n\nSave password for crypto operations\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"password\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing password\";\n\t}\n\t$tw.crypto.setPassword(this.params[0]);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/password.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/rendertiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/rendertiddler.js\ntype: application/javascript\nmodule-type: command\n\nCommand to render a tiddler and save it to a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"rendertiddler\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\ttitle = this.params[0],\n\t\tfilename = path.resolve(this.commander.outputPath,this.params[1]),\n\t\ttype = this.params[2] || \"text/html\",\n\t\ttemplate = this.params[3],\n\t\tvariables = {};\n\t$tw.utils.createFileDirectories(filename);\n\tif(template) {\n\t\tvariables.currentTiddler = title;\n\t\ttitle = template;\n\t}\n\tfs.writeFile(filename,this.commander.wiki.renderTiddler(type,title,{variables: variables}),\"utf8\",function(err) {\n\t\tself.callback(err);\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/rendertiddler.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/rendertiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/rendertiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to render several tiddlers to a folder of files\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"rendertiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\ttemplate = this.params[1],\n\t\toutputPath = this.commander.outputPath,\n\t\tpathname = path.resolve(outputPath,this.params[2]),\t\t\n\t\ttype = this.params[3] || \"text/html\",\n\t\textension = this.params[4] || \".html\",\n\t\tdeleteDirectory = (this.params[5] || \"\").toLowerCase() !== \"noclean\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\tif(deleteDirectory) {\n\t\t$tw.utils.deleteDirectory(pathname);\n\t}\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar parser = wiki.parseTiddler(template),\n\t\t\twidgetNode = wiki.makeWidget(parser,{variables: {currentTiddler: title}}),\n\t\t\tcontainer = $tw.fakeDocument.createElement(\"div\");\n\t\twidgetNode.render(container,null);\n\t\tvar text = type === \"text/html\" ? container.innerHTML : container.textContent,\n\t\t\texportPath = null;\n\t\tif($tw.utils.hop($tw.macros,\"tv-get-export-path\")) {\n\t\t\tvar macroPath = $tw.macros[\"tv-get-export-path\"].run.apply(self,[title]);\n\t\t\tif(macroPath) {\n\t\t\t\texportPath = path.resolve(outputPath,macroPath + extension);\n\t\t\t}\n\t\t}\n\t\tvar finalPath = exportPath || path.resolve(pathname,encodeURIComponent(title) + extension);\n\t\t$tw.utils.createFileDirectories(finalPath);\n\t\tfs.writeFileSync(finalPath,text,\"utf8\");\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/rendertiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savelibrarytiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savelibrarytiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save the subtiddlers of a bundle tiddler as a series of JSON files\n\n--savelibrarytiddlers <tiddler> <pathname> <skinnylisting>\n\nThe tiddler identifies the bundle tiddler that contains the subtiddlers.\n\nThe pathname specifies the pathname to the folder in which the JSON files should be saved. The filename is the URL encoded title of the subtiddler.\n\nThe skinnylisting specifies the title of the tiddler to which a JSON catalogue of the subtiddlers will be saved. The JSON file contains the same data as the bundle tiddler but with the `text` field removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"savelibrarytiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\tcontainerTitle = this.params[0],\n\t\tfilter = this.params[1],\n\t\tbasepath = this.params[2],\n\t\tskinnyListTitle = this.params[3];\n\t// Get the container tiddler as data\n\tvar containerData = self.commander.wiki.getTiddlerDataCached(containerTitle,undefined);\n\tif(!containerData) {\n\t\treturn \"'\" + containerTitle + \"' is not a tiddler bundle\";\n\t}\n\t// Filter the list of plugins\n\tvar pluginList = [];\n\t$tw.utils.each(containerData.tiddlers,function(tiddler,title) {\n\t\tpluginList.push(title);\n\t});\n\tvar filteredPluginList;\n\tif(filter) {\n\t\tfilteredPluginList = self.commander.wiki.filterTiddlers(filter,null,self.commander.wiki.makeTiddlerIterator(pluginList));\n\t} else {\n\t\tfilteredPluginList = pluginList;\n\t}\n\t// Iterate through the plugins\n\tvar skinnyList = [];\n\t$tw.utils.each(filteredPluginList,function(title) {\n\t\tvar tiddler = containerData.tiddlers[title];\n\t\t// Save each JSON file and collect the skinny data\n\t\tvar pathname = path.resolve(self.commander.outputPath,basepath + encodeURIComponent(title) + \".json\");\n\t\t$tw.utils.createFileDirectories(pathname);\n\t\tfs.writeFileSync(pathname,JSON.stringify(tiddler,null,$tw.config.preferences.jsonSpaces),\"utf8\");\n\t\t// Collect the skinny list data\n\t\tvar pluginTiddlers = JSON.parse(tiddler.text),\n\t\t\treadmeContent = (pluginTiddlers.tiddlers[title + \"/readme\"] || {}).text,\n\t\t\ticonTiddler = pluginTiddlers.tiddlers[title + \"/icon\"] || {},\n\t\t\ticonType = iconTiddler.type,\n\t\t\ticonText = iconTiddler.text,\n\t\t\ticonContent;\n\t\tif(iconType && iconText) {\n\t\t\ticonContent = $tw.utils.makeDataUri(iconText,iconType);\n\t\t}\n\t\tskinnyList.push($tw.utils.extend({},tiddler,{text: undefined, readme: readmeContent, icon: iconContent}));\n\t});\n\t// Save the catalogue tiddler\n\tif(skinnyListTitle) {\n\t\tself.commander.wiki.setTiddlerData(skinnyListTitle,skinnyList);\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savelibrarytiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savetiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savetiddler.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save the content of a tiddler to a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"savetiddler\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\ttitle = this.params[0],\n\t\tfilename = path.resolve(this.commander.outputPath,this.params[1]),\n\t\ttiddler = this.commander.wiki.getTiddler(title);\n\tif(tiddler) {\n\t\tvar type = tiddler.fields.type || \"text/vnd.tiddlywiki\",\n\t\t\tcontentTypeInfo = $tw.config.contentTypeInfo[type] || {encoding: \"utf8\"};\n\t\t$tw.utils.createFileDirectories(filename);\n\t\tfs.writeFile(filename,tiddler.fields.text,contentTypeInfo.encoding,function(err) {\n\t\t\tself.callback(err);\n\t\t});\n\t} else {\n\t\treturn \"Missing tiddler: \" + title;\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savetiddler.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savetiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savetiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save several tiddlers to a folder of files\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"savetiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\tpathname = path.resolve(this.commander.outputPath,this.params[1]),\n\t\tdeleteDirectory = (this.params[2] || \"\").toLowerCase() !== \"noclean\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\tif(deleteDirectory) {\n\t\t$tw.utils.deleteDirectory(pathname);\n\t}\n\t$tw.utils.createDirectory(pathname);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.commander.wiki.getTiddler(title),\n\t\t\ttype = tiddler.fields.type || \"text/vnd.tiddlywiki\",\n\t\t\tcontentTypeInfo = $tw.config.contentTypeInfo[type] || {encoding: \"utf8\"},\n\t\t\tfilename = path.resolve(pathname,encodeURIComponent(title));\n\t\tfs.writeFileSync(filename,tiddler.fields.text,contentTypeInfo.encoding);\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savetiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/server.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/server.js\ntype: application/javascript\nmodule-type: command\n\nServe tiddlers over http\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nif($tw.node) {\n\tvar util = require(\"util\"),\n\t\tfs = require(\"fs\"),\n\t\turl = require(\"url\"),\n\t\tpath = require(\"path\"),\n\t\thttp = require(\"http\");\n}\n\nexports.info = {\n\tname: \"server\",\n\tsynchronous: true\n};\n\n/*\nA simple HTTP server with regexp-based routes\n*/\nfunction SimpleServer(options) {\n\tthis.routes = options.routes || [];\n\tthis.wiki = options.wiki;\n\tthis.variables = options.variables || {};\n}\n\nSimpleServer.prototype.set = function(obj) {\n\tvar self = this;\n\t$tw.utils.each(obj,function(value,name) {\n\t\tself.variables[name] = value;\n\t});\n};\n\nSimpleServer.prototype.get = function(name) {\n\treturn this.variables[name];\n};\n\nSimpleServer.prototype.addRoute = function(route) {\n\tthis.routes.push(route);\n};\n\nSimpleServer.prototype.findMatchingRoute = function(request,state) {\n\tvar pathprefix = this.get(\"pathprefix\") || \"\";\n\tfor(var t=0; t<this.routes.length; t++) {\n\t\tvar potentialRoute = this.routes[t],\n\t\t\tpathRegExp = potentialRoute.path,\n\t\t\tpathname = state.urlInfo.pathname,\n\t\t\tmatch;\n\t\tif(pathprefix) {\n\t\t\tif(pathname.substr(0,pathprefix.length) === pathprefix) {\n\t\t\t\tpathname = pathname.substr(pathprefix.length);\n\t\t\t\tmatch = potentialRoute.path.exec(pathname);\n\t\t\t} else {\n\t\t\t\tmatch = false;\n\t\t\t}\n\t\t} else {\n\t\t\tmatch = potentialRoute.path.exec(pathname);\n\t\t}\n\t\tif(match && request.method === potentialRoute.method) {\n\t\t\tstate.params = [];\n\t\t\tfor(var p=1; p<match.length; p++) {\n\t\t\t\tstate.params.push(match[p]);\n\t\t\t}\n\t\t\treturn potentialRoute;\n\t\t}\n\t}\n\treturn null;\n};\n\nSimpleServer.prototype.checkCredentials = function(request,incomingUsername,incomingPassword) {\n\tvar header = request.headers.authorization || \"\",\n\t\ttoken = header.split(/\\s+/).pop() || \"\",\n\t\tauth = $tw.utils.base64Decode(token),\n\t\tparts = auth.split(/:/),\n\t\tusername = parts[0],\n\t\tpassword = parts[1];\n\tif(incomingUsername === username && incomingPassword === password) {\n\t\treturn \"ALLOWED\";\n\t} else {\n\t\treturn \"DENIED\";\n\t}\n};\n\nSimpleServer.prototype.listen = function(port,host) {\n\tvar self = this;\n\thttp.createServer(function(request,response) {\n\t\t// Compose the state object\n\t\tvar state = {};\n\t\tstate.wiki = self.wiki;\n\t\tstate.server = self;\n\t\tstate.urlInfo = url.parse(request.url);\n\t\t// Find the route that matches this path\n\t\tvar route = self.findMatchingRoute(request,state);\n\t\t// Check for the username and password if we've got one\n\t\tvar username = self.get(\"username\"),\n\t\t\tpassword = self.get(\"password\");\n\t\tif(username && password) {\n\t\t\t// Check they match\n\t\t\tif(self.checkCredentials(request,username,password) !== \"ALLOWED\") {\n\t\t\t\tvar servername = state.wiki.getTiddlerText(\"$:/SiteTitle\") || \"TiddlyWiki5\";\n\t\t\t\tresponse.writeHead(401,\"Authentication required\",{\n\t\t\t\t\t\"WWW-Authenticate\": 'Basic realm=\"Please provide your username and password to login to ' + servername + '\"'\n\t\t\t\t});\n\t\t\t\tresponse.end();\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t\t// Return a 404 if we didn't find a route\n\t\tif(!route) {\n\t\t\tresponse.writeHead(404);\n\t\t\tresponse.end();\n\t\t\treturn;\n\t\t}\n\t\t// Set the encoding for the incoming request\n\t\t// TODO: Presumably this would need tweaking if we supported PUTting binary tiddlers\n\t\trequest.setEncoding(\"utf8\");\n\t\t// Dispatch the appropriate method\n\t\tswitch(request.method) {\n\t\t\tcase \"GET\": // Intentional fall-through\n\t\t\tcase \"DELETE\":\n\t\t\t\troute.handler(request,response,state);\n\t\t\t\tbreak;\n\t\t\tcase \"PUT\":\n\t\t\t\tvar data = \"\";\n\t\t\t\trequest.on(\"data\",function(chunk) {\n\t\t\t\t\tdata += chunk.toString();\n\t\t\t\t});\n\t\t\t\trequest.on(\"end\",function() {\n\t\t\t\t\tstate.data = data;\n\t\t\t\t\troute.handler(request,response,state);\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t}\n\t}).listen(port,host);\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n\t// Set up server\n\tthis.server = new SimpleServer({\n\t\twiki: this.commander.wiki\n\t});\n\t// Add route handlers\n\tthis.server.addRoute({\n\t\tmethod: \"PUT\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]),\n\t\t\t\tfields = JSON.parse(state.data);\n\t\t\t// Pull up any subfields in the `fields` object\n\t\t\tif(fields.fields) {\n\t\t\t\t$tw.utils.each(fields.fields,function(field,name) {\n\t\t\t\t\tfields[name] = field;\n\t\t\t\t});\n\t\t\t\tdelete fields.fields;\n\t\t\t}\n\t\t\t// Remove any revision field\n\t\t\tif(fields.revision) {\n\t\t\t\tdelete fields.revision;\n\t\t\t}\n\t\t\tstate.wiki.addTiddler(new $tw.Tiddler(state.wiki.getCreationFields(),fields,{title: title},state.wiki.getModificationFields()));\n\t\t\tvar changeCount = state.wiki.getChangeCount(title).toString();\n\t\t\tresponse.writeHead(204, \"OK\",{\n\t\t\t\tEtag: \"\\\"default/\" + encodeURIComponent(title) + \"/\" + changeCount + \":\\\"\",\n\t\t\t\t\"Content-Type\": \"text/plain\"\n\t\t\t});\n\t\t\tresponse.end();\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"DELETE\",\n\t\tpath: /^\\/bags\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]);\n\t\t\tstate.wiki.deleteTiddler(title);\n\t\t\tresponse.writeHead(204, \"OK\", {\n\t\t\t\t\"Content-Type\": \"text/plain\"\n\t\t\t});\n\t\t\tresponse.end();\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": state.server.get(\"serveType\")});\n\t\t\tvar text = state.wiki.renderTiddler(state.server.get(\"renderType\"),state.server.get(\"rootTiddler\"));\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/status$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\tvar text = JSON.stringify({\n\t\t\t\tusername: state.server.get(\"username\"),\n\t\t\t\tspace: {\n\t\t\t\t\trecipe: \"default\"\n\t\t\t\t},\n\t\t\t\ttiddlywiki_version: $tw.version\n\t\t\t});\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/favicon.ico$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"image/x-icon\"});\n\t\t\tvar buffer = state.wiki.getTiddlerText(\"$:/favicon.ico\",\"\");\n\t\t\tresponse.end(buffer,\"base64\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers.json$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\tvar tiddlers = [];\n\t\t\tstate.wiki.forEachTiddler({sortField: \"title\"},function(title,tiddler) {\n\t\t\t\tvar tiddlerFields = {};\n\t\t\t\t$tw.utils.each(tiddler.fields,function(field,name) {\n\t\t\t\t\tif(name !== \"text\") {\n\t\t\t\t\t\ttiddlerFields[name] = tiddler.getFieldString(name);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\ttiddlerFields.revision = state.wiki.getChangeCount(title);\n\t\t\t\ttiddlerFields.type = tiddlerFields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\ttiddlers.push(tiddlerFields);\n\t\t\t});\n\t\t\tvar text = JSON.stringify(tiddlers);\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]),\n\t\t\t\ttiddler = state.wiki.getTiddler(title),\n\t\t\t\ttiddlerFields = {},\n\t\t\t\tknownFields = [\n\t\t\t\t\t\"bag\", \"created\", \"creator\", \"modified\", \"modifier\", \"permissions\", \"recipe\", \"revision\", \"tags\", \"text\", \"title\", \"type\", \"uri\"\n\t\t\t\t];\n\t\t\tif(tiddler) {\n\t\t\t\t$tw.utils.each(tiddler.fields,function(field,name) {\n\t\t\t\t\tvar value = tiddler.getFieldString(name);\n\t\t\t\t\tif(knownFields.indexOf(name) !== -1) {\n\t\t\t\t\t\ttiddlerFields[name] = value;\n\t\t\t\t\t} else {\n\t\t\t\t\t\ttiddlerFields.fields = tiddlerFields.fields || {};\n\t\t\t\t\t\ttiddlerFields.fields[name] = value;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\ttiddlerFields.revision = state.wiki.getChangeCount(title);\n\t\t\t\ttiddlerFields.type = tiddlerFields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\t\tresponse.end(JSON.stringify(tiddlerFields),\"utf8\");\n\t\t\t} else {\n\t\t\t\tresponse.writeHead(404);\n\t\t\t\tresponse.end();\n\t\t\t}\n\t\t}\n\t});\n};\n\nCommand.prototype.execute = function() {\n\tif(!$tw.boot.wikiTiddlersPath) {\n\t\t$tw.utils.warning(\"Warning: Wiki folder '\" + $tw.boot.wikiPath + \"' does not exist or is missing a tiddlywiki.info file\");\n\t}\n\tvar port = this.params[0] || \"8080\",\n\t\trootTiddler = this.params[1] || \"$:/core/save/all\",\n\t\trenderType = this.params[2] || \"text/plain\",\n\t\tserveType = this.params[3] || \"text/html\",\n\t\tusername = this.params[4],\n\t\tpassword = this.params[5],\n\t\thost = this.params[6] || \"127.0.0.1\",\n\t\tpathprefix = this.params[7];\n\tthis.server.set({\n\t\trootTiddler: rootTiddler,\n\t\trenderType: renderType,\n\t\tserveType: serveType,\n\t\tusername: username,\n\t\tpassword: password,\n\t\tpathprefix: pathprefix\n\t});\n\tthis.server.listen(port,host);\n\tconsole.log(\"Serving on \" + host + \":\" + port);\n\tconsole.log(\"(press ctrl-C to exit)\");\n\t// Warn if required plugins are missing\n\tif(!$tw.wiki.getTiddler(\"$:/plugins/tiddlywiki/tiddlyweb\") || !$tw.wiki.getTiddler(\"$:/plugins/tiddlywiki/filesystem\")) {\n\t\t$tw.utils.warning(\"Warning: Plugins required for client-server operation (\\\"tiddlywiki/filesystem\\\" and \\\"tiddlywiki/tiddlyweb\\\") are missing from tiddlywiki.info file\");\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/server.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/setfield.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/setfield.js\ntype: application/javascript\nmodule-type: command\n\nCommand to modify selected tiddlers to set a field to the text of a template tiddler that has been wikified with the selected tiddler as the current tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"setfield\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 4) {\n\t\treturn \"Missing parameters\";\n\t}\n\tvar self = this,\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\tfieldname = this.params[1] || \"text\",\n\t\ttemplatetitle = this.params[2],\n\t\trendertype = this.params[3] || \"text/plain\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar parser = wiki.parseTiddler(templatetitle),\n\t\t\tnewFields = {},\n\t\t\ttiddler = wiki.getTiddler(title);\n\t\tif(parser) {\n\t\t\tvar widgetNode = wiki.makeWidget(parser,{variables: {currentTiddler: title}});\n\t\t\tvar container = $tw.fakeDocument.createElement(\"div\");\n\t\t\twidgetNode.render(container,null);\n\t\t\tnewFields[fieldname] = rendertype === \"text/html\" ? container.innerHTML : container.textContent;\n\t\t} else {\n\t\t\tnewFields[fieldname] = undefined;\n\t\t}\n\t\twiki.addTiddler(new $tw.Tiddler(tiddler,newFields));\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/setfield.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/unpackplugin.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/unpackplugin.js\ntype: application/javascript\nmodule-type: command\n\nCommand to extract the shadow tiddlers from within a plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"unpackplugin\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing plugin name\";\n\t}\n\tvar self = this,\n\t\ttitle = this.params[0],\n\t\tpluginData = this.commander.wiki.getTiddlerDataCached(title);\n\tif(!pluginData) {\n\t\treturn \"Plugin '\" + title + \"' not found\";\n\t}\n\t$tw.utils.each(pluginData.tiddlers,function(tiddler) {\n\t\tself.commander.wiki.addTiddler(new $tw.Tiddler(tiddler));\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/unpackplugin.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/verbose.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/verbose.js\ntype: application/javascript\nmodule-type: command\n\nVerbose command\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"verbose\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tthis.commander.verbose = true;\n\t// Output the boot message log\n\tthis.commander.streams.output.write(\"Boot log:\\n \" + $tw.boot.logMessages.join(\"\\n \") + \"\\n\");\n\treturn null; // No error\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/verbose.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/version.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/version.js\ntype: application/javascript\nmodule-type: command\n\nVersion command\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"version\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tthis.commander.streams.output.write($tw.version + \"\\n\");\n\treturn null; // No error\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/version.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/config.js": {
"text": "/*\\\ntitle: $:/core/modules/config.js\ntype: application/javascript\nmodule-type: config\n\nCore configuration constants\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.preferences = {};\n\nexports.preferences.notificationDuration = 3 * 1000;\nexports.preferences.jsonSpaces = 4;\n\nexports.textPrimitives = {\n\tupperLetter: \"[A-Z\\u00c0-\\u00d6\\u00d8-\\u00de\\u0150\\u0170]\",\n\tlowerLetter: \"[a-z\\u00df-\\u00f6\\u00f8-\\u00ff\\u0151\\u0171]\",\n\tanyLetter: \"[A-Za-z0-9\\u00c0-\\u00d6\\u00d8-\\u00de\\u00df-\\u00f6\\u00f8-\\u00ff\\u0150\\u0170\\u0151\\u0171]\",\n\tblockPrefixLetters:\t\"[A-Za-z0-9-_\\u00c0-\\u00d6\\u00d8-\\u00de\\u00df-\\u00f6\\u00f8-\\u00ff\\u0150\\u0170\\u0151\\u0171]\"\n};\n\nexports.textPrimitives.unWikiLink = \"~\";\nexports.textPrimitives.wikiLink = exports.textPrimitives.upperLetter + \"+\" +\n\texports.textPrimitives.lowerLetter + \"+\" +\n\texports.textPrimitives.upperLetter +\n\texports.textPrimitives.anyLetter + \"*\";\n\nexports.htmlEntities = {quot:34, amp:38, apos:39, lt:60, gt:62, nbsp:160, iexcl:161, cent:162, pound:163, curren:164, yen:165, brvbar:166, sect:167, uml:168, copy:169, ordf:170, laquo:171, not:172, shy:173, reg:174, macr:175, deg:176, plusmn:177, sup2:178, sup3:179, acute:180, micro:181, para:182, middot:183, cedil:184, sup1:185, ordm:186, raquo:187, frac14:188, frac12:189, frac34:190, iquest:191, Agrave:192, Aacute:193, Acirc:194, Atilde:195, Auml:196, Aring:197, AElig:198, Ccedil:199, Egrave:200, Eacute:201, Ecirc:202, Euml:203, Igrave:204, Iacute:205, Icirc:206, Iuml:207, ETH:208, Ntilde:209, Ograve:210, Oacute:211, Ocirc:212, Otilde:213, Ouml:214, times:215, Oslash:216, Ugrave:217, Uacute:218, Ucirc:219, Uuml:220, Yacute:221, THORN:222, szlig:223, agrave:224, aacute:225, acirc:226, atilde:227, auml:228, aring:229, aelig:230, ccedil:231, egrave:232, eacute:233, ecirc:234, euml:235, igrave:236, iacute:237, icirc:238, iuml:239, eth:240, ntilde:241, ograve:242, oacute:243, ocirc:244, otilde:245, ouml:246, divide:247, oslash:248, ugrave:249, uacute:250, ucirc:251, uuml:252, yacute:253, thorn:254, yuml:255, OElig:338, oelig:339, Scaron:352, scaron:353, Yuml:376, fnof:402, circ:710, tilde:732, Alpha:913, Beta:914, Gamma:915, Delta:916, Epsilon:917, Zeta:918, Eta:919, Theta:920, Iota:921, Kappa:922, Lambda:923, Mu:924, Nu:925, Xi:926, Omicron:927, Pi:928, Rho:929, Sigma:931, Tau:932, Upsilon:933, Phi:934, Chi:935, Psi:936, Omega:937, alpha:945, beta:946, gamma:947, delta:948, epsilon:949, zeta:950, eta:951, theta:952, iota:953, kappa:954, lambda:955, mu:956, nu:957, xi:958, omicron:959, pi:960, rho:961, sigmaf:962, sigma:963, tau:964, upsilon:965, phi:966, chi:967, psi:968, omega:969, thetasym:977, upsih:978, piv:982, ensp:8194, emsp:8195, thinsp:8201, zwnj:8204, zwj:8205, lrm:8206, rlm:8207, ndash:8211, mdash:8212, lsquo:8216, rsquo:8217, sbquo:8218, ldquo:8220, rdquo:8221, bdquo:8222, dagger:8224, Dagger:8225, bull:8226, hellip:8230, permil:8240, prime:8242, Prime:8243, lsaquo:8249, rsaquo:8250, oline:8254, frasl:8260, euro:8364, image:8465, weierp:8472, real:8476, trade:8482, alefsym:8501, larr:8592, uarr:8593, rarr:8594, darr:8595, harr:8596, crarr:8629, lArr:8656, uArr:8657, rArr:8658, dArr:8659, hArr:8660, forall:8704, part:8706, exist:8707, empty:8709, nabla:8711, isin:8712, notin:8713, ni:8715, prod:8719, sum:8721, minus:8722, lowast:8727, radic:8730, prop:8733, infin:8734, ang:8736, and:8743, or:8744, cap:8745, cup:8746, int:8747, there4:8756, sim:8764, cong:8773, asymp:8776, ne:8800, equiv:8801, le:8804, ge:8805, sub:8834, sup:8835, nsub:8836, sube:8838, supe:8839, oplus:8853, otimes:8855, perp:8869, sdot:8901, lceil:8968, rceil:8969, lfloor:8970, rfloor:8971, lang:9001, rang:9002, loz:9674, spades:9824, clubs:9827, hearts:9829, diams:9830 };\n\nexports.htmlVoidElements = \"area,base,br,col,command,embed,hr,img,input,keygen,link,meta,param,source,track,wbr\".split(\",\");\n\nexports.htmlBlockElements = \"address,article,aside,audio,blockquote,canvas,dd,div,dl,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,hr,li,noscript,ol,output,p,pre,section,table,tfoot,ul,video\".split(\",\");\n\nexports.htmlUnsafeElements = \"script\".split(\",\");\n\n})();\n",
"title": "$:/core/modules/config.js",
"type": "application/javascript",
"module-type": "config"
},
"$:/core/modules/deserializers.js": {
"text": "/*\\\ntitle: $:/core/modules/deserializers.js\ntype: application/javascript\nmodule-type: tiddlerdeserializer\n\nFunctions to deserialise tiddlers from a block of text\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nUtility function to parse an old-style tiddler DIV in a *.tid file. It looks like this:\n\n<div title=\"Title\" creator=\"JoeBloggs\" modifier=\"JoeBloggs\" created=\"201102111106\" modified=\"201102111310\" tags=\"myTag [[my long tag]]\">\n<pre>The text of the tiddler (without the expected HTML encoding).\n</pre>\n</div>\n\nNote that the field attributes are HTML encoded, but that the body of the <PRE> tag is not encoded.\n\nWhen these tiddler DIVs are encountered within a TiddlyWiki HTML file then the body is encoded in the usual way.\n*/\nvar parseTiddlerDiv = function(text /* [,fields] */) {\n\t// Slot together the default results\n\tvar result = {};\n\tif(arguments.length > 1) {\n\t\tfor(var f=1; f<arguments.length; f++) {\n\t\t\tvar fields = arguments[f];\n\t\t\tfor(var t in fields) {\n\t\t\t\tresult[t] = fields[t];\t\t\n\t\t\t}\n\t\t}\n\t}\n\t// Parse the DIV body\n\tvar startRegExp = /^\\s*<div\\s+([^>]*)>(\\s*<pre>)?/gi,\n\t\tendRegExp,\n\t\tmatch = startRegExp.exec(text);\n\tif(match) {\n\t\t// Old-style DIVs don't have the <pre> tag\n\t\tif(match[2]) {\n\t\t\tendRegExp = /<\\/pre>\\s*<\\/div>\\s*$/gi;\n\t\t} else {\n\t\t\tendRegExp = /<\\/div>\\s*$/gi;\n\t\t}\n\t\tvar endMatch = endRegExp.exec(text);\n\t\tif(endMatch) {\n\t\t\t// Extract the text\n\t\t\tresult.text = text.substring(match.index + match[0].length,endMatch.index);\n\t\t\t// Process the attributes\n\t\t\tvar attrRegExp = /\\s*([^=\\s]+)\\s*=\\s*(?:\"([^\"]*)\"|'([^']*)')/gi,\n\t\t\t\tattrMatch;\n\t\t\tdo {\n\t\t\t\tattrMatch = attrRegExp.exec(match[1]);\n\t\t\t\tif(attrMatch) {\n\t\t\t\t\tvar name = attrMatch[1];\n\t\t\t\t\tvar value = attrMatch[2] !== undefined ? attrMatch[2] : attrMatch[3];\n\t\t\t\t\tresult[name] = value;\n\t\t\t\t}\n\t\t\t} while(attrMatch);\n\t\t\treturn result;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports[\"application/x-tiddler-html-div\"] = function(text,fields) {\n\treturn [parseTiddlerDiv(text,fields)];\n};\n\nexports[\"application/json\"] = function(text,fields) {\n\tvar incoming = JSON.parse(text),\n\t\tresults = [];\n\tif($tw.utils.isArray(incoming)) {\n\t\tfor(var t=0; t<incoming.length; t++) {\n\t\t\tvar incomingFields = incoming[t],\n\t\t\t\tfields = {};\n\t\t\tfor(var f in incomingFields) {\n\t\t\t\tif(typeof incomingFields[f] === \"string\") {\n\t\t\t\t\tfields[f] = incomingFields[f];\n\t\t\t\t}\n\t\t\t}\n\t\t\tresults.push(fields);\n\t\t}\n\t}\n\treturn results;\n};\n\n/*\nParse an HTML file into tiddlers. There are three possibilities:\n# A TiddlyWiki classic HTML file containing `text/x-tiddlywiki` tiddlers\n# A TiddlyWiki5 HTML file containing `text/vnd.tiddlywiki` tiddlers\n# An ordinary HTML file\n*/\nexports[\"text/html\"] = function(text,fields) {\n\t// Check if we've got a store area\n\tvar storeAreaMarkerRegExp = /<div id=[\"']?storeArea['\"]?( style=[\"']?display:none;[\"']?)?>/gi,\n\t\tmatch = storeAreaMarkerRegExp.exec(text);\n\tif(match) {\n\t\t// If so, it's either a classic TiddlyWiki file or an unencrypted TW5 file\n\t\t// First read the normal tiddlers\n\t\tvar results = deserializeTiddlyWikiFile(text,storeAreaMarkerRegExp.lastIndex,!!match[1],fields);\n\t\t// Then any system tiddlers\n\t\tvar systemAreaMarkerRegExp = /<div id=[\"']?systemArea['\"]?( style=[\"']?display:none;[\"']?)?>/gi,\n\t\t\tsysMatch = systemAreaMarkerRegExp.exec(text);\n\t\tif(sysMatch) {\n\t\t\tresults.push.apply(results,deserializeTiddlyWikiFile(text,systemAreaMarkerRegExp.lastIndex,!!sysMatch[1],fields));\n\t\t}\n\t\treturn results;\n\t} else {\n\t\t// Check whether we've got an encrypted file\n\t\tvar encryptedStoreArea = $tw.utils.extractEncryptedStoreArea(text);\n\t\tif(encryptedStoreArea) {\n\t\t\t// If so, attempt to decrypt it using the current password\n\t\t\treturn $tw.utils.decryptStoreArea(encryptedStoreArea);\n\t\t} else {\n\t\t\t// It's not a TiddlyWiki so we'll return the entire HTML file as a tiddler\n\t\t\treturn deserializeHtmlFile(text,fields);\n\t\t}\n\t}\n};\n\nfunction deserializeHtmlFile(text,fields) {\n\tvar result = {};\n\t$tw.utils.each(fields,function(value,name) {\n\t\tresult[name] = value;\n\t});\n\tresult.text = text;\n\tresult.type = \"text/html\";\n\treturn [result];\n}\n\nfunction deserializeTiddlyWikiFile(text,storeAreaEnd,isTiddlyWiki5,fields) {\n\tvar results = [],\n\t\tendOfDivRegExp = /(<\\/div>\\s*)/gi,\n\t\tstartPos = storeAreaEnd,\n\t\tdefaultType = isTiddlyWiki5 ? undefined : \"text/x-tiddlywiki\";\n\tendOfDivRegExp.lastIndex = startPos;\n\tvar match = endOfDivRegExp.exec(text);\n\twhile(match) {\n\t\tvar endPos = endOfDivRegExp.lastIndex,\n\t\t\ttiddlerFields = parseTiddlerDiv(text.substring(startPos,endPos),fields,{type: defaultType});\n\t\tif(!tiddlerFields) {\n\t\t\tbreak;\n\t\t}\n\t\t$tw.utils.each(tiddlerFields,function(value,name) {\n\t\t\tif(typeof value === \"string\") {\n\t\t\t\ttiddlerFields[name] = $tw.utils.htmlDecode(value);\n\t\t\t}\n\t\t});\n\t\tif(tiddlerFields.text !== null) {\n\t\t\tresults.push(tiddlerFields);\n\t\t}\n\t\tstartPos = endPos;\n\t\tmatch = endOfDivRegExp.exec(text);\n\t}\n\treturn results;\n}\n\n})();\n",
"title": "$:/core/modules/deserializers.js",
"type": "application/javascript",
"module-type": "tiddlerdeserializer"
},
"$:/core/modules/editor/engines/framed.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/engines/framed.js\ntype: application/javascript\nmodule-type: library\n\nText editor engine based on a simple input or textarea within an iframe. This is done so that the selection is preserved even when clicking away from the textarea\n\n\\*/\n(function(){\n\n/*jslint node: true,browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HEIGHT_VALUE_TITLE = \"$:/config/TextEditor/EditorHeight/Height\";\n\nfunction FramedEngine(options) {\n\t// Save our options\n\toptions = options || {};\n\tthis.widget = options.widget;\n\tthis.value = options.value;\n\tthis.parentNode = options.parentNode;\n\tthis.nextSibling = options.nextSibling;\n\t// Create our hidden dummy text area for reading styles\n\tthis.dummyTextArea = this.widget.document.createElement(\"textarea\");\n\tif(this.widget.editClass) {\n\t\tthis.dummyTextArea.className = this.widget.editClass;\n\t}\n\tthis.dummyTextArea.setAttribute(\"hidden\",\"true\");\n\tthis.parentNode.insertBefore(this.dummyTextArea,this.nextSibling);\n\tthis.widget.domNodes.push(this.dummyTextArea);\n\t// Create the iframe\n\tthis.iframeNode = this.widget.document.createElement(\"iframe\");\n\tthis.parentNode.insertBefore(this.iframeNode,this.nextSibling);\n\tthis.iframeDoc = this.iframeNode.contentWindow.document;\n\t// (Firefox requires us to put some empty content in the iframe)\n\tthis.iframeDoc.open();\n\tthis.iframeDoc.write(\"\");\n\tthis.iframeDoc.close();\n\t// Style the iframe\n\tthis.iframeNode.className = this.dummyTextArea.className;\n\tthis.iframeNode.style.border = \"none\";\n\tthis.iframeNode.style.padding = \"0\";\n\tthis.iframeNode.style.resize = \"none\";\n\tthis.iframeDoc.body.style.margin = \"0\";\n\tthis.iframeDoc.body.style.padding = \"0\";\n\tthis.widget.domNodes.push(this.iframeNode);\n\t// Construct the textarea or input node\n\tvar tag = this.widget.editTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"input\";\n\t}\n\tthis.domNode = this.iframeDoc.createElement(tag);\n\t// Set the text\n\tif(this.widget.editTag === \"textarea\") {\n\t\tthis.domNode.appendChild(this.iframeDoc.createTextNode(this.value));\n\t} else {\n\t\tthis.domNode.value = this.value;\n\t}\n\t// Set the attributes\n\tif(this.widget.editType) {\n\t\tthis.domNode.setAttribute(\"type\",this.widget.editType);\n\t}\n\tif(this.widget.editPlaceholder) {\n\t\tthis.domNode.setAttribute(\"placeholder\",this.widget.editPlaceholder);\n\t}\n\tif(this.widget.editSize) {\n\t\tthis.domNode.setAttribute(\"size\",this.widget.editSize);\n\t}\n\tif(this.widget.editRows) {\n\t\tthis.domNode.setAttribute(\"rows\",this.widget.editRows);\n\t}\n\t// Copy the styles from the dummy textarea\n\tthis.copyStyles();\n\t// Add event listeners\n\t$tw.utils.addEventListeners(this.domNode,[\n\t\t{name: \"input\",handlerObject: this,handlerMethod: \"handleInputEvent\"},\n\t\t{name: \"keydown\",handlerObject: this.widget,handlerMethod: \"handleKeydownEvent\"}\n\t]);\n\t// Insert the element into the DOM\n\tthis.iframeDoc.body.appendChild(this.domNode);\n}\n\n/*\nCopy styles from the dummy text area to the textarea in the iframe\n*/\nFramedEngine.prototype.copyStyles = function() {\n\t// Copy all styles\n\t$tw.utils.copyStyles(this.dummyTextArea,this.domNode);\n\t// Override the ones that should not be set the same as the dummy textarea\n\tthis.domNode.style.display = \"block\";\n\tthis.domNode.style.width = \"100%\";\n\tthis.domNode.style.margin = \"0\";\n\t// In Chrome setting -webkit-text-fill-color overrides the placeholder text colour\n\tthis.domNode.style[\"-webkit-text-fill-color\"] = \"currentcolor\";\n};\n\n/*\nSet the text of the engine if it doesn't currently have focus\n*/\nFramedEngine.prototype.setText = function(text,type) {\n\tif(!this.domNode.isTiddlyWikiFakeDom) {\n\t\tif(this.domNode.ownerDocument.activeElement !== this.domNode) {\n\t\t\tthis.domNode.value = text;\n\t\t}\n\t\t// Fix the height if needed\n\t\tthis.fixHeight();\n\t}\n};\n\n/*\nGet the text of the engine\n*/\nFramedEngine.prototype.getText = function() {\n\treturn this.domNode.value;\n};\n\n/*\nFix the height of textarea to fit content\n*/\nFramedEngine.prototype.fixHeight = function() {\n\t// Make sure styles are updated\n\tthis.copyStyles();\n\t// Adjust height\n\tif(this.widget.editTag === \"textarea\") {\n\t\tif(this.widget.editAutoHeight) {\n\t\t\tif(this.domNode && !this.domNode.isTiddlyWikiFakeDom) {\n\t\t\t\tvar newHeight = $tw.utils.resizeTextAreaToFit(this.domNode,this.widget.editMinHeight);\n\t\t\t\tthis.iframeNode.style.height = (newHeight + 14) + \"px\"; // +14 for the border on the textarea\n\t\t\t}\n\t\t} else {\n\t\t\tvar fixedHeight = parseInt(this.widget.wiki.getTiddlerText(HEIGHT_VALUE_TITLE,\"400px\"),10);\n\t\t\tfixedHeight = Math.max(fixedHeight,20);\n\t\t\tthis.domNode.style.height = fixedHeight + \"px\";\n\t\t\tthis.iframeNode.style.height = (fixedHeight + 14) + \"px\";\n\t\t}\n\t}\n};\n\n/*\nFocus the engine node\n*/\nFramedEngine.prototype.focus = function() {\n\tif(this.domNode.focus && this.domNode.select) {\n\t\tthis.domNode.focus();\n\t\tthis.domNode.select();\n\t}\n};\n\n/*\nHandle a dom \"input\" event which occurs when the text has changed\n*/\nFramedEngine.prototype.handleInputEvent = function(event) {\n\tthis.widget.saveChanges(this.getText());\n\tthis.fixHeight();\n\treturn true;\n};\n\n/*\nCreate a blank structure representing a text operation\n*/\nFramedEngine.prototype.createTextOperation = function() {\n\tvar operation = {\n\t\ttext: this.domNode.value,\n\t\tselStart: this.domNode.selectionStart,\n\t\tselEnd: this.domNode.selectionEnd,\n\t\tcutStart: null,\n\t\tcutEnd: null,\n\t\treplacement: null,\n\t\tnewSelStart: null,\n\t\tnewSelEnd: null\n\t};\n\toperation.selection = operation.text.substring(operation.selStart,operation.selEnd);\n\treturn operation;\n};\n\n/*\nExecute a text operation\n*/\nFramedEngine.prototype.executeTextOperation = function(operation) {\n\t// Perform the required changes to the text area and the underlying tiddler\n\tvar newText = operation.text;\n\tif(operation.replacement !== null) {\n\t\tnewText = operation.text.substring(0,operation.cutStart) + operation.replacement + operation.text.substring(operation.cutEnd);\n\t\t// Attempt to use a execCommand to modify the value of the control\n\t\tif(this.iframeDoc.queryCommandSupported(\"insertText\") && this.iframeDoc.queryCommandSupported(\"delete\") && !$tw.browser.isFirefox) {\n\t\t\tthis.domNode.focus();\n\t\t\tthis.domNode.setSelectionRange(operation.cutStart,operation.cutEnd);\n\t\t\tif(operation.replacement === \"\") {\n\t\t\t\tthis.iframeDoc.execCommand(\"delete\",false,\"\");\n\t\t\t} else {\n\t\t\t\tthis.iframeDoc.execCommand(\"insertText\",false,operation.replacement);\n\t\t\t}\n\t\t} else {\n\t\t\tthis.domNode.value = newText;\n\t\t}\n\t\tthis.domNode.focus();\n\t\tthis.domNode.setSelectionRange(operation.newSelStart,operation.newSelEnd);\n\t}\n\tthis.domNode.focus();\n\treturn newText;\n};\n\nexports.FramedEngine = FramedEngine;\n\n})();\n",
"title": "$:/core/modules/editor/engines/framed.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/engines/simple.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/engines/simple.js\ntype: application/javascript\nmodule-type: library\n\nText editor engine based on a simple input or textarea tag\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HEIGHT_VALUE_TITLE = \"$:/config/TextEditor/EditorHeight/Height\";\n\nfunction SimpleEngine(options) {\n\t// Save our options\n\toptions = options || {};\n\tthis.widget = options.widget;\n\tthis.value = options.value;\n\tthis.parentNode = options.parentNode;\n\tthis.nextSibling = options.nextSibling;\n\t// Construct the textarea or input node\n\tvar tag = this.widget.editTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"input\";\n\t}\n\tthis.domNode = this.widget.document.createElement(tag);\n\t// Set the text\n\tif(this.widget.editTag === \"textarea\") {\n\t\tthis.domNode.appendChild(this.widget.document.createTextNode(this.value));\n\t} else {\n\t\tthis.domNode.value = this.value;\n\t}\n\t// Set the attributes\n\tif(this.widget.editType) {\n\t\tthis.domNode.setAttribute(\"type\",this.widget.editType);\n\t}\n\tif(this.widget.editPlaceholder) {\n\t\tthis.domNode.setAttribute(\"placeholder\",this.widget.editPlaceholder);\n\t}\n\tif(this.widget.editSize) {\n\t\tthis.domNode.setAttribute(\"size\",this.widget.editSize);\n\t}\n\tif(this.widget.editRows) {\n\t\tthis.domNode.setAttribute(\"rows\",this.widget.editRows);\n\t}\n\tif(this.widget.editClass) {\n\t\tthis.domNode.className = this.widget.editClass;\n\t}\n\t// Add an input event handler\n\t$tw.utils.addEventListeners(this.domNode,[\n\t\t{name: \"focus\", handlerObject: this, handlerMethod: \"handleFocusEvent\"},\n\t\t{name: \"input\", handlerObject: this, handlerMethod: \"handleInputEvent\"}\n\t]);\n\t// Insert the element into the DOM\n\tthis.parentNode.insertBefore(this.domNode,this.nextSibling);\n\tthis.widget.domNodes.push(this.domNode);\n}\n\n/*\nSet the text of the engine if it doesn't currently have focus\n*/\nSimpleEngine.prototype.setText = function(text,type) {\n\tif(!this.domNode.isTiddlyWikiFakeDom) {\n\t\tif(this.domNode.ownerDocument.activeElement !== this.domNode) {\n\t\t\tthis.domNode.value = text;\n\t\t}\n\t\t// Fix the height if needed\n\t\tthis.fixHeight();\n\t}\n};\n\n/*\nGet the text of the engine\n*/\nSimpleEngine.prototype.getText = function() {\n\treturn this.domNode.value;\n};\n\n/*\nFix the height of textarea to fit content\n*/\nSimpleEngine.prototype.fixHeight = function() {\n\tif(this.widget.editTag === \"textarea\") {\n\t\tif(this.widget.editAutoHeight) {\n\t\t\tif(this.domNode && !this.domNode.isTiddlyWikiFakeDom) {\n\t\t\t\t$tw.utils.resizeTextAreaToFit(this.domNode,this.widget.editMinHeight);\n\t\t\t}\n\t\t} else {\n\t\t\tvar fixedHeight = parseInt(this.widget.wiki.getTiddlerText(HEIGHT_VALUE_TITLE,\"400px\"),10);\n\t\t\tfixedHeight = Math.max(fixedHeight,20);\n\t\t\tthis.domNode.style.height = fixedHeight + \"px\";\n\t\t}\n\t}\n};\n\n/*\nFocus the engine node\n*/\nSimpleEngine.prototype.focus = function() {\n\tif(this.domNode.focus && this.domNode.select) {\n\t\tthis.domNode.focus();\n\t\tthis.domNode.select();\n\t}\n};\n\n/*\nHandle a dom \"input\" event which occurs when the text has changed\n*/\nSimpleEngine.prototype.handleInputEvent = function(event) {\n\tthis.widget.saveChanges(this.getText());\n\tthis.fixHeight();\n\treturn true;\n};\n\n/*\nHandle a dom \"focus\" event\n*/\nSimpleEngine.prototype.handleFocusEvent = function(event) {\n\tif(this.widget.editFocusPopup) {\n\t\t$tw.popup.triggerPopup({\n\t\t\tdomNode: this.domNode,\n\t\t\ttitle: this.widget.editFocusPopup,\n\t\t\twiki: this.widget.wiki,\n\t\t\tforce: true\n\t\t});\n\t}\n\treturn true;\n};\n\n/*\nCreate a blank structure representing a text operation\n*/\nSimpleEngine.prototype.createTextOperation = function() {\n\treturn null;\n};\n\n/*\nExecute a text operation\n*/\nSimpleEngine.prototype.executeTextOperation = function(operation) {\n};\n\nexports.SimpleEngine = SimpleEngine;\n\n})();\n",
"title": "$:/core/modules/editor/engines/simple.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/factory.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/factory.js\ntype: application/javascript\nmodule-type: library\n\nFactory for constructing text editor widgets with specified engines for the toolbar and non-toolbar cases\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar DEFAULT_MIN_TEXT_AREA_HEIGHT = \"100px\"; // Minimum height of textareas in pixels\n\n// Configuration tiddlers\nvar HEIGHT_MODE_TITLE = \"$:/config/TextEditor/EditorHeight/Mode\";\nvar ENABLE_TOOLBAR_TITLE = \"$:/config/TextEditor/EnableToolbar\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nfunction editTextWidgetFactory(toolbarEngine,nonToolbarEngine) {\n\n\tvar EditTextWidget = function(parseTreeNode,options) {\n\t\t// Initialise the editor operations if they've not been done already\n\t\tif(!this.editorOperations) {\n\t\t\tEditTextWidget.prototype.editorOperations = {};\n\t\t\t$tw.modules.applyMethods(\"texteditoroperation\",this.editorOperations);\n\t\t}\n\t\tthis.initialise(parseTreeNode,options);\n\t};\n\n\t/*\n\tInherit from the base widget class\n\t*/\n\tEditTextWidget.prototype = new Widget();\n\n\t/*\n\tRender this widget into the DOM\n\t*/\n\tEditTextWidget.prototype.render = function(parent,nextSibling) {\n\t\t// Save the parent dom node\n\t\tthis.parentDomNode = parent;\n\t\t// Compute our attributes\n\t\tthis.computeAttributes();\n\t\t// Execute our logic\n\t\tthis.execute();\n\t\t// Create the wrapper for the toolbar and render its content\n\t\tif(this.editShowToolbar) {\n\t\t\tthis.toolbarNode = this.document.createElement(\"div\");\n\t\t\tthis.toolbarNode.className = \"tc-editor-toolbar\";\n\t\t\tparent.insertBefore(this.toolbarNode,nextSibling);\n\t\t\tthis.renderChildren(this.toolbarNode,null);\n\t\t\tthis.domNodes.push(this.toolbarNode);\n\t\t}\n\t\t// Create our element\n\t\tvar editInfo = this.getEditInfo(),\n\t\t\tEngine = this.editShowToolbar ? toolbarEngine : nonToolbarEngine;\n\t\tthis.engine = new Engine({\n\t\t\t\twidget: this,\n\t\t\t\tvalue: editInfo.value,\n\t\t\t\ttype: editInfo.type,\n\t\t\t\tparentNode: parent,\n\t\t\t\tnextSibling: nextSibling\n\t\t\t});\n\t\t// Call the postRender hook\n\t\tif(this.postRender) {\n\t\t\tthis.postRender();\n\t\t}\n\t\t// Fix height\n\t\tthis.engine.fixHeight();\n\t\t// Focus if required\n\t\tif(this.editFocus === \"true\" || this.editFocus === \"yes\") {\n\t\t\tthis.engine.focus();\n\t\t}\n\t\t// Add widget message listeners\n\t\tthis.addEventListeners([\n\t\t\t{type: \"tm-edit-text-operation\", handler: \"handleEditTextOperationMessage\"}\n\t\t]);\n\t};\n\n\t/*\n\tGet the tiddler being edited and current value\n\t*/\n\tEditTextWidget.prototype.getEditInfo = function() {\n\t\t// Get the edit value\n\t\tvar self = this,\n\t\t\tvalue,\n\t\t\ttype = \"text/plain\",\n\t\t\tupdate;\n\t\tif(this.editIndex) {\n\t\t\tvalue = this.wiki.extractTiddlerDataItem(this.editTitle,this.editIndex,this.editDefault);\n\t\t\tupdate = function(value) {\n\t\t\t\tvar data = self.wiki.getTiddlerData(self.editTitle,{});\n\t\t\t\tif(data[self.editIndex] !== value) {\n\t\t\t\t\tdata[self.editIndex] = value;\n\t\t\t\t\tself.wiki.setTiddlerData(self.editTitle,data);\n\t\t\t\t}\n\t\t\t};\n\t\t} else {\n\t\t\t// Get the current tiddler and the field name\n\t\t\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\t\t\tif(tiddler) {\n\t\t\t\t// If we've got a tiddler, the value to display is the field string value\n\t\t\t\tvalue = tiddler.getFieldString(this.editField);\n\t\t\t\tif(this.editField === \"text\") {\n\t\t\t\t\ttype = tiddler.fields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Otherwise, we need to construct a default value for the editor\n\t\t\t\tswitch(this.editField) {\n\t\t\t\t\tcase \"text\":\n\t\t\t\t\t\tvalue = \"Type the text for the tiddler '\" + this.editTitle + \"'\";\n\t\t\t\t\t\ttype = \"text/vnd.tiddlywiki\";\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"title\":\n\t\t\t\t\t\tvalue = this.editTitle;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tvalue = \"\";\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tif(this.editDefault !== undefined) {\n\t\t\t\t\tvalue = this.editDefault;\n\t\t\t\t}\n\t\t\t}\n\t\t\tupdate = function(value) {\n\t\t\t\tvar tiddler = self.wiki.getTiddler(self.editTitle),\n\t\t\t\t\tupdateFields = {\n\t\t\t\t\t\ttitle: self.editTitle\n\t\t\t\t\t};\n\t\t\t\tupdateFields[self.editField] = value;\n\t\t\t\tself.wiki.addTiddler(new $tw.Tiddler(self.wiki.getCreationFields(),tiddler,updateFields,self.wiki.getModificationFields()));\n\t\t\t};\n\t\t}\n\t\tif(this.editType) {\n\t\t\ttype = this.editType;\n\t\t}\n\t\treturn {value: value || \"\", type: type, update: update};\n\t};\n\n\t/*\n\tHandle an edit text operation message from the toolbar\n\t*/\n\tEditTextWidget.prototype.handleEditTextOperationMessage = function(event) {\n\t\t// Prepare information about the operation\n\t\tvar operation = this.engine.createTextOperation();\n\t\t// Invoke the handler for the selected operation\n\t\tvar handler = this.editorOperations[event.param];\n\t\tif(handler) {\n\t\t\thandler.call(this,event,operation);\n\t\t}\n\t\t// Execute the operation via the engine\n\t\tvar newText = this.engine.executeTextOperation(operation);\n\t\t// Fix the tiddler height and save changes\n\t\tthis.engine.fixHeight();\n\t\tthis.saveChanges(newText);\n\t};\n\n\t/*\n\tCompute the internal state of the widget\n\t*/\n\tEditTextWidget.prototype.execute = function() {\n\t\t// Get our parameters\n\t\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t\tthis.editField = this.getAttribute(\"field\",\"text\");\n\t\tthis.editIndex = this.getAttribute(\"index\");\n\t\tthis.editDefault = this.getAttribute(\"default\");\n\t\tthis.editClass = this.getAttribute(\"class\");\n\t\tthis.editPlaceholder = this.getAttribute(\"placeholder\");\n\t\tthis.editSize = this.getAttribute(\"size\");\n\t\tthis.editRows = this.getAttribute(\"rows\");\n\t\tthis.editAutoHeight = this.wiki.getTiddlerText(HEIGHT_MODE_TITLE,\"auto\");\n\t\tthis.editAutoHeight = this.getAttribute(\"autoHeight\",this.editAutoHeight === \"auto\" ? \"yes\" : \"no\") === \"yes\";\n\t\tthis.editMinHeight = this.getAttribute(\"minHeight\",DEFAULT_MIN_TEXT_AREA_HEIGHT);\n\t\tthis.editFocusPopup = this.getAttribute(\"focusPopup\");\n\t\tthis.editFocus = this.getAttribute(\"focus\");\n\t\t// Get the default editor element tag and type\n\t\tvar tag,type;\n\t\tif(this.editField === \"text\") {\n\t\t\ttag = \"textarea\";\n\t\t} else {\n\t\t\ttag = \"input\";\n\t\t\tvar fieldModule = $tw.Tiddler.fieldModules[this.editField];\n\t\t\tif(fieldModule && fieldModule.editTag) {\n\t\t\t\ttag = fieldModule.editTag;\n\t\t\t}\n\t\t\tif(fieldModule && fieldModule.editType) {\n\t\t\t\ttype = fieldModule.editType;\n\t\t\t}\n\t\t\ttype = type || \"text\";\n\t\t}\n\t\t// Get the rest of our parameters\n\t\tthis.editTag = this.getAttribute(\"tag\",tag);\n\t\tthis.editType = this.getAttribute(\"type\",type);\n\t\t// Make the child widgets\n\t\tthis.makeChildWidgets();\n\t\t// Determine whether to show the toolbar\n\t\tthis.editShowToolbar = this.wiki.getTiddlerText(ENABLE_TOOLBAR_TITLE,\"yes\");\n\t\tthis.editShowToolbar = (this.editShowToolbar === \"yes\") && !!(this.children && this.children.length > 0);\n\t};\n\n\t/*\n\tSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n\t*/\n\tEditTextWidget.prototype.refresh = function(changedTiddlers) {\n\t\tvar changedAttributes = this.computeAttributes();\n\t\t// Completely rerender if any of our attributes have changed\n\t\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes[\"default\"] || changedAttributes[\"class\"] || changedAttributes.placeholder || changedAttributes.size || changedAttributes.autoHeight || changedAttributes.minHeight || changedAttributes.focusPopup || changedAttributes.rows || changedTiddlers[HEIGHT_MODE_TITLE] || changedTiddlers[ENABLE_TOOLBAR_TITLE]) {\n\t\t\tthis.refreshSelf();\n\t\t\treturn true;\n\t\t} else if(changedTiddlers[this.editTitle]) {\n\t\t\tvar editInfo = this.getEditInfo();\n\t\t\tthis.updateEditor(editInfo.value,editInfo.type);\n\t\t}\n\t\tthis.engine.fixHeight();\n\t\tif(this.editShowToolbar) {\n\t\t\treturn this.refreshChildren(changedTiddlers);\t\t\t\n\t\t} else {\n\t\t\treturn false;\n\t\t}\n\t};\n\n\t/*\n\tUpdate the editor with new text. This method is separate from updateEditorDomNode()\n\tso that subclasses can override updateEditor() and still use updateEditorDomNode()\n\t*/\n\tEditTextWidget.prototype.updateEditor = function(text,type) {\n\t\tthis.updateEditorDomNode(text,type);\n\t};\n\n\t/*\n\tUpdate the editor dom node with new text\n\t*/\n\tEditTextWidget.prototype.updateEditorDomNode = function(text,type) {\n\t\tthis.engine.setText(text,type);\n\t};\n\n\t/*\n\tSave changes back to the tiddler store\n\t*/\n\tEditTextWidget.prototype.saveChanges = function(text) {\n\t\tvar editInfo = this.getEditInfo();\n\t\tif(text !== editInfo.value) {\n\t\t\teditInfo.update(text);\n\t\t}\n\t};\n\n\t/*\n\tHandle a dom \"keydown\" event, which we'll bubble up to our container for the keyboard widgets benefit\n\t*/\n\tEditTextWidget.prototype.handleKeydownEvent = function(event) {\n\t\t// Check for a keyboard shortcut\n\t\tif(this.toolbarNode) {\n\t\t\tvar shortcutElements = this.toolbarNode.querySelectorAll(\"[data-tw-keyboard-shortcut]\");\n\t\t\tfor(var index=0; index<shortcutElements.length; index++) {\n\t\t\t\tvar el = shortcutElements[index],\n\t\t\t\t\tshortcutData = el.getAttribute(\"data-tw-keyboard-shortcut\"),\n\t\t\t\t\tkeyInfoArray = $tw.keyboardManager.parseKeyDescriptors(shortcutData,{\n\t\t\t\t\t\twiki: this.wiki\n\t\t\t\t\t});\n\t\t\t\tif($tw.keyboardManager.checkKeyDescriptors(event,keyInfoArray)) {\n\t\t\t\t\tvar clickEvent = this.document.createEvent(\"Events\");\n\t\t\t\t clickEvent.initEvent(\"click\",true,false);\n\t\t\t\t el.dispatchEvent(clickEvent);\n\t\t\t\t\tevent.preventDefault();\n\t\t\t\t\tevent.stopPropagation();\n\t\t\t\t\treturn true;\t\t\t\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t// Propogate the event to the container\n\t\tif(this.propogateKeydownEvent(event)) {\n\t\t\t// Ignore the keydown if it was already handled\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t\treturn true;\n\t\t}\n\t\t// Otherwise, process the keydown normally\n\t\treturn false;\n\t};\n\n\t/*\n\tPropogate keydown events to our container for the keyboard widgets benefit\n\t*/\n\tEditTextWidget.prototype.propogateKeydownEvent = function(event) {\n\t\tvar newEvent = this.document.createEventObject ? this.document.createEventObject() : this.document.createEvent(\"Events\");\n\t\tif(newEvent.initEvent) {\n\t\t\tnewEvent.initEvent(\"keydown\", true, true);\n\t\t}\n\t\tnewEvent.keyCode = event.keyCode;\n\t\tnewEvent.which = event.which;\n\t\tnewEvent.metaKey = event.metaKey;\n\t\tnewEvent.ctrlKey = event.ctrlKey;\n\t\tnewEvent.altKey = event.altKey;\n\t\tnewEvent.shiftKey = event.shiftKey;\n\t\treturn !this.parentDomNode.dispatchEvent(newEvent);\n\t};\n\n\treturn EditTextWidget;\n\n}\n\nexports.editTextWidgetFactory = editTextWidgetFactory;\n\n})();\n",
"title": "$:/core/modules/editor/factory.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/operations/bitmap/clear.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/bitmap/clear.js\ntype: application/javascript\nmodule-type: bitmapeditoroperation\n\nBitmap editor operation to clear the image\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"clear\"] = function(event) {\n\tvar ctx = this.canvasDomNode.getContext(\"2d\");\n\tctx.globalAlpha = 1;\n\tctx.fillStyle = event.paramObject.colour || \"white\";\n\tctx.fillRect(0,0,this.canvasDomNode.width,this.canvasDomNode.height);\n\t// Save changes\n\tthis.strokeEnd();\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/bitmap/clear.js",
"type": "application/javascript",
"module-type": "bitmapeditoroperation"
},
"$:/core/modules/editor/operations/bitmap/resize.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/bitmap/resize.js\ntype: application/javascript\nmodule-type: bitmapeditoroperation\n\nBitmap editor operation to resize the image\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"resize\"] = function(event) {\n\t// Get the new width\n\tvar newWidth = parseInt(event.paramObject.width || this.canvasDomNode.width,10),\n\t\tnewHeight = parseInt(event.paramObject.height || this.canvasDomNode.height,10);\n\t// Update if necessary\n\tif(newWidth > 0 && newHeight > 0 && !(newWidth === this.currCanvas.width && newHeight === this.currCanvas.height)) {\n\t\tthis.changeCanvasSize(newWidth,newHeight);\n\t}\n\t// Update the input controls\n\tthis.refreshToolbar();\n\t// Save the image into the tiddler\n\tthis.saveChanges();\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/bitmap/resize.js",
"type": "application/javascript",
"module-type": "bitmapeditoroperation"
},
"$:/core/modules/editor/operations/text/excise.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/excise.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to excise the selection to a new tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"excise\"] = function(event,operation) {\n\tvar editTiddler = this.wiki.getTiddler(this.editTitle),\n\t\teditTiddlerTitle = this.editTitle;\n\tif(editTiddler && editTiddler.fields[\"draft.of\"]) {\n\t\teditTiddlerTitle = editTiddler.fields[\"draft.of\"];\n\t}\n\tvar excisionTitle = event.paramObject.title || this.wiki.generateNewTitle(\"New Excision\");\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\tthis.wiki.getCreationFields(),\n\t\tthis.wiki.getModificationFields(),\n\t\t{\n\t\t\ttitle: excisionTitle,\n\t\t\ttext: operation.selection,\n\t\t\ttags: event.paramObject.tagnew === \"yes\" ? [editTiddlerTitle] : []\n\t\t}\n\t));\n\toperation.replacement = excisionTitle;\n\tswitch(event.paramObject.type || \"transclude\") {\n\t\tcase \"transclude\":\n\t\t\toperation.replacement = \"{{\" + operation.replacement+ \"}}\";\n\t\t\tbreak;\n\t\tcase \"link\":\n\t\t\toperation.replacement = \"[[\" + operation.replacement+ \"]]\";\n\t\t\tbreak;\n\t\tcase \"macro\":\n\t\t\toperation.replacement = \"<<\" + (event.paramObject.macro || \"translink\") + \" \\\"\\\"\\\"\" + operation.replacement + \"\\\"\\\"\\\">>\";\n\t\t\tbreak;\n\t}\n\toperation.cutStart = operation.selStart;\n\toperation.cutEnd = operation.selEnd;\n\toperation.newSelStart = operation.selStart;\n\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/excise.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/make-link.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/make-link.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to make a link\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"make-link\"] = function(event,operation) {\n\tif(operation.selection) {\n\t\toperation.replacement = \"[[\" + operation.selection + \"|\" + event.paramObject.text + \"]]\";\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t} else {\n\t\toperation.replacement = \"[[\" + event.paramObject.text + \"]]\";\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t}\n\toperation.newSelStart = operation.selStart + operation.replacement.length;\n\toperation.newSelEnd = operation.newSelStart;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/make-link.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/prefix-lines.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/prefix-lines.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to add a prefix to the selected lines\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"prefix-lines\"] = function(event,operation) {\n\t// Cut just past the preceding line break, or the start of the text\n\toperation.cutStart = $tw.utils.findPrecedingLineBreak(operation.text,operation.selStart);\n\t// Cut to just past the following line break, or to the end of the text\n\toperation.cutEnd = $tw.utils.findFollowingLineBreak(operation.text,operation.selEnd);\n\t// Compose the required prefix\n\tvar prefix = $tw.utils.repeat(event.paramObject.character,event.paramObject.count);\n\t// Process each line\n\tvar lines = operation.text.substring(operation.cutStart,operation.cutEnd).split(/\\r?\\n/mg);\n\t$tw.utils.each(lines,function(line,index) {\n\t\t// Remove and count any existing prefix characters\n\t\tvar count = 0;\n\t\twhile(line.charAt(0) === event.paramObject.character) {\n\t\t\tline = line.substring(1);\n\t\t\tcount++;\n\t\t}\n\t\t// Remove any whitespace\n\t\twhile(line.charAt(0) === \" \") {\n\t\t\tline = line.substring(1);\n\t\t}\n\t\t// We're done if we removed the exact required prefix, otherwise add it\n\t\tif(count !== event.paramObject.count) {\n\t\t\t// Apply the prefix\n\t\t\tline = prefix + \" \" + line;\n\t\t}\n\t\t// Save the modified line\n\t\tlines[index] = line;\n\t});\n\t// Stitch the replacement text together and set the selection\n\toperation.replacement = lines.join(\"\\n\");\n\tif(lines.length === 1) {\n\t\toperation.newSelStart = operation.cutStart + operation.replacement.length;\n\t\toperation.newSelEnd = operation.newSelStart;\n\t} else {\n\t\toperation.newSelStart = operation.cutStart;\n\t\toperation.newSelEnd = operation.newSelStart + operation.replacement.length;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/prefix-lines.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/replace-all.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/replace-all.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to replace the entire text\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"replace-all\"] = function(event,operation) {\n\toperation.cutStart = 0;\n\toperation.cutEnd = operation.text.length;\n\toperation.replacement = event.paramObject.text;\n\toperation.newSelStart = 0;\n\toperation.newSelEnd = operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/replace-all.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/replace-selection.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/replace-selection.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to replace the selection\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"replace-selection\"] = function(event,operation) {\n\toperation.replacement = event.paramObject.text;\n\toperation.cutStart = operation.selStart;\n\toperation.cutEnd = operation.selEnd;\n\toperation.newSelStart = operation.selStart;\n\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/replace-selection.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/wrap-lines.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/wrap-lines.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to wrap the selected lines with a prefix and suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"wrap-lines\"] = function(event,operation) {\n\t// Cut just past the preceding line break, or the start of the text\n\toperation.cutStart = $tw.utils.findPrecedingLineBreak(operation.text,operation.selStart);\n\t// Cut to just past the following line break, or to the end of the text\n\toperation.cutEnd = $tw.utils.findFollowingLineBreak(operation.text,operation.selEnd);\n\t// Add the prefix and suffix\n\toperation.replacement = event.paramObject.prefix + \"\\n\" +\n\t\t\t\toperation.text.substring(operation.cutStart,operation.cutEnd) + \"\\n\" +\n\t\t\t\tevent.paramObject.suffix + \"\\n\";\n\toperation.newSelStart = operation.cutStart + event.paramObject.prefix.length + 1;\n\toperation.newSelEnd = operation.newSelStart + (operation.cutEnd - operation.cutStart);\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/wrap-lines.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/wrap-selection.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/wrap-selection.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to wrap the selection with the specified prefix and suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"wrap-selection\"] = function(event,operation) {\n\tif(operation.selStart === operation.selEnd) {\n\t\t// No selection; check if we're within the prefix/suffix\n\t\tif(operation.text.substring(operation.selStart - event.paramObject.prefix.length,operation.selStart + event.paramObject.suffix.length) === event.paramObject.prefix + event.paramObject.suffix) {\n\t\t\t// Remove the prefix and suffix unless they comprise the entire text\n\t\t\tif(operation.selStart > event.paramObject.prefix.length || (operation.selEnd + event.paramObject.suffix.length) < operation.text.length ) {\n\t\t\t\toperation.cutStart = operation.selStart - event.paramObject.prefix.length;\n\t\t\t\toperation.cutEnd = operation.selEnd + event.paramObject.suffix.length;\n\t\t\t\toperation.replacement = \"\";\n\t\t\t\toperation.newSelStart = operation.cutStart;\n\t\t\t\toperation.newSelEnd = operation.newSelStart;\n\t\t\t}\n\t\t} else {\n\t\t\t// Wrap the cursor instead\n\t\t\toperation.cutStart = operation.selStart;\n\t\t\toperation.cutEnd = operation.selEnd;\n\t\t\toperation.replacement = event.paramObject.prefix + event.paramObject.suffix;\n\t\t\toperation.newSelStart = operation.selStart + event.paramObject.prefix.length;\n\t\t\toperation.newSelEnd = operation.newSelStart;\n\t\t}\n\t} else if(operation.text.substring(operation.selStart,operation.selStart + event.paramObject.prefix.length) === event.paramObject.prefix && operation.text.substring(operation.selEnd - event.paramObject.suffix.length,operation.selEnd) === event.paramObject.suffix) {\n\t\t// Prefix and suffix are already present, so remove them\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t\toperation.replacement = operation.selection.substring(event.paramObject.prefix.length,operation.selection.length - event.paramObject.suffix.length);\n\t\toperation.newSelStart = operation.selStart;\n\t\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n\t} else {\n\t\t// Add the prefix and suffix\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t\toperation.replacement = event.paramObject.prefix + operation.selection + event.paramObject.suffix;\n\t\toperation.newSelStart = operation.selStart;\n\t\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/wrap-selection.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/filters/addprefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/addprefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for adding a prefix to each title in the list. This is\nespecially useful in contexts where only a filter expression is allowed\nand macro substitution isn't available.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.addprefix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(operator.operand + title);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/addprefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/addsuffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/addsuffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for adding a suffix to each title in the list. This is\nespecially useful in contexts where only a filter expression is allowed\nand macro substitution isn't available.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.addsuffix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title + operator.operand);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/addsuffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/after.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/after.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler from the current list that is after the tiddler named in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.after = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\tvar index = results.indexOf(operator.operand);\n\tif(index === -1 || index > (results.length - 2)) {\n\t\treturn [];\n\t} else {\n\t\treturn [results[index + 1]];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/after.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/all/current.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/current.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[current]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.current = function(source,prefix,options) {\n\tvar currTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\");\n\tif(currTiddlerTitle) {\n\t\treturn [currTiddlerTitle];\n\t} else {\n\t\treturn [];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/current.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/missing.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/missing.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[missing]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.missing = function(source,prefix,options) {\n\treturn options.wiki.getMissingTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/missing.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/orphans.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/orphans.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[orphans]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.orphans = function(source,prefix,options) {\n\treturn options.wiki.getOrphanTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/orphans.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/shadows.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/shadows.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[shadows]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadows = function(source,prefix,options) {\n\treturn options.wiki.allShadowTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/shadows.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/tiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/tiddlers.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[tiddlers]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tiddlers = function(source,prefix,options) {\n\treturn options.wiki.allTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/tiddlers.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for selecting tiddlers\n\n[all[shadows+tiddlers]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar allFilterOperators;\n\nfunction getAllFilterOperators() {\n\tif(!allFilterOperators) {\n\t\tallFilterOperators = {};\n\t\t$tw.modules.applyMethods(\"allfilteroperator\",allFilterOperators);\n\t}\n\treturn allFilterOperators;\n}\n\n/*\nExport our filter function\n*/\nexports.all = function(source,operator,options) {\n\t// Get our suboperators\n\tvar allFilterOperators = getAllFilterOperators();\n\t// Cycle through the suboperators accumulating their results\n\tvar results = [],\n\t\tsubops = operator.operand.split(\"+\");\n\t// Check for common optimisations\n\tif(subops.length === 1 && subops[0] === \"\") {\n\t\treturn source;\n\t} else if(subops.length === 1 && subops[0] === \"tiddlers\") {\n\t\treturn options.wiki.each;\n\t} else if(subops.length === 1 && subops[0] === \"shadows\") {\n\t\treturn options.wiki.eachShadow;\n\t} else if(subops.length === 2 && subops[0] === \"tiddlers\" && subops[1] === \"shadows\") {\n\t\treturn options.wiki.eachTiddlerPlusShadows;\n\t} else if(subops.length === 2 && subops[0] === \"shadows\" && subops[1] === \"tiddlers\") {\n\t\treturn options.wiki.eachShadowPlusTiddlers;\n\t}\n\t// Do it the hard way\n\tfor(var t=0; t<subops.length; t++) {\n\t\tvar subop = allFilterOperators[subops[t]];\n\t\tif(subop) {\n\t\t\t$tw.utils.pushTop(results,subop(source,operator.prefix,options));\n\t\t}\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/all.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/backlinks.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/backlinks.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning all the backlinks from a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.backlinks = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlerBacklinks(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/backlinks.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/before.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/before.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler from the current list that is before the tiddler named in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.before = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\tvar index = results.indexOf(operator.operand);\n\tif(index <= 0) {\n\t\treturn [];\n\t} else {\n\t\treturn [results[index - 1]];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/before.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/commands.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/commands.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the commands available in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.commands = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.commands,function(commandInfo,name) {\n\t\tresults.push(name);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/commands.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/days.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/days.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects tiddlers with a specified date field within a specified date interval.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.days = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldName = operator.suffix || \"modified\",\n\t\tdayInterval = (parseInt(operator.operand,10)||0),\n\t\tdayIntervalSign = $tw.utils.sign(dayInterval),\n\t\ttargetTimeStamp = (new Date()).setHours(0,0,0,0) + 1000*60*60*24*dayInterval,\n\t\tisWithinDays = function(dateField) {\n\t\t\tvar sign = $tw.utils.sign(targetTimeStamp - (new Date(dateField)).setHours(0,0,0,0));\n\t\t\treturn sign === 0 || sign === dayIntervalSign;\n\t\t};\n\n\tif(operator.prefix === \"!\") {\n\t\ttargetTimeStamp = targetTimeStamp - 1000*60*60*24*dayIntervalSign;\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\t\tif(!isWithinDays($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\t\tif(isWithinDays($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/days.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/each.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/each.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects one tiddler for each unique value of the specified field.\nWith suffix \"list\", selects all tiddlers that are values in a specified list field.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.each = function(source,operator,options) {\n\tvar results =[] ,\n\t\tvalue,values = {},\n\t\tfield = operator.operand || \"title\";\n\tif(operator.suffix !== \"list-item\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\tvalue = (field === \"title\") ? title : tiddler.getFieldString(field);\n\t\t\t\tif(!$tw.utils.hop(values,value)) {\n\t\t\t\t\tvalues[value] = true;\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\t$tw.utils.each(\n\t\t\t\t\toptions.wiki.getTiddlerList(title,field),\n\t\t\t\t\tfunction(value) {\n\t\t\t\t\t\tif(!$tw.utils.hop(values,value)) {\n\t\t\t\t\t\t\tvalues[value] = true;\n\t\t\t\t\t\t\tresults.push(value);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/each.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/eachday.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/eachday.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects one tiddler for each unique day covered by the specified date field\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.eachday = function(source,operator,options) {\n\tvar results = [],\n\t\tvalues = [],\n\t\tfieldName = operator.operand || \"modified\";\n\t// Function to convert a date/time to a date integer\n\tvar toDate = function(value) {\n\t\tvalue = (new Date(value)).setHours(0,0,0,0);\n\t\treturn value+0;\n\t};\n\tsource(function(tiddler,title) {\n\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\tvar value = toDate($tw.utils.parseDate(tiddler.fields[fieldName]));\n\t\t\tif(values.indexOf(value) === -1) {\n\t\t\t\tvalues.push(value);\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/eachday.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/editiondescription.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/editiondescription.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the descriptions of the specified edition names\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.editiondescription = function(source,operator,options) {\n\tvar results = [],\n\t\teditionInfo = $tw.utils.getEditionInfo();\n\tif(editionInfo) {\n\t\tsource(function(tiddler,title) {\n\t\t\tif($tw.utils.hop(editionInfo,title)) {\n\t\t\t\tresults.push(editionInfo[title].description || \"\");\t\t\t\t\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/editiondescription.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/editions.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/editions.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the available editions in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.editions = function(source,operator,options) {\n\tvar results = [],\n\t\teditionInfo = $tw.utils.getEditionInfo();\n\tif(editionInfo) {\n\t\t$tw.utils.each(editionInfo,function(info,name) {\n\t\t\tresults.push(name);\n\t\t});\n\t}\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/editions.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/field.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/field.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for comparing fields for equality\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.field = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldname = (operator.suffix || operator.operator || \"title\").toLowerCase();\n\tif(operator.prefix === \"!\") {\n\t\tif(operator.regexp) {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && !operator.regexp.exec(text)) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t});\n\t\t} else {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && text !== operator.operand) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t} else {\n\t\tif(operator.regexp) {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && !!operator.regexp.exec(text)) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t} else {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && text === operator.operand) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/field.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/fields.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/fields.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the fields on the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.fields = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(tiddler) {\n\t\t\tfor(var fieldName in tiddler.fields) {\n\t\t\t\t$tw.utils.pushTop(results,fieldName);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/fields.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/get.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/get.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for replacing tiddler titles by the value of the field specified in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.get = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(tiddler) {\n\t\t\tvar value = tiddler.getFieldString(operator.operand);\n\t\t\tif(value) {\n\t\t\t\tresults.push(value);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/get.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/getindex.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/getindex.js\ntype: application/javascript\nmodule-type: filteroperator\n\nreturns the value at a given index of datatiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.getindex = function(source,operator,options) {\n\tvar data,title,results = [];\n\tif(operator.operand){\n\t\tsource(function(tiddler,title) {\n\t\t\ttitle = tiddler ? tiddler.fields.title : title;\n\t\t\tdata = options.wiki.extractTiddlerDataItem(tiddler,operator.operand);\n\t\t\tif(data) {\n\t\t\t\tresults.push(data);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/getindex.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/has.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/has.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a tiddler has the specified field\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.has = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!tiddler || (tiddler && (!$tw.utils.hop(tiddler.fields,operator.operand) || tiddler.fields[operator.operand] === \"\"))) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && $tw.utils.hop(tiddler.fields,operator.operand) && !(tiddler.fields[operator.operand] === \"\" || tiddler.fields[operator.operand].length === 0)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/has.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/haschanged.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/haschanged.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returns tiddlers from the list that have a non-zero changecount.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.haschanged = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.getChangeCount(title) === 0) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.getChangeCount(title) > 0) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/haschanged.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/indexes.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/indexes.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the indexes of a data tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.indexes = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar data = options.wiki.getTiddlerDataCached(title);\n\t\tif(data) {\n\t\t\t$tw.utils.pushTop(results,Object.keys(data));\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/indexes.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/is/current.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/current.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[current]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.current = function(source,prefix,options) {\n\tvar results = [],\n\t\tcurrTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\");\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title !== currTiddlerTitle) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title === currTiddlerTitle) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/current.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/image.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/image.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[image]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.image = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isImageTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isImageTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/image.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/missing.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/missing.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[missing]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.missing = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/missing.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/orphan.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/orphan.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[orphan]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.orphan = function(source,prefix,options) {\n\tvar results = [],\n\t\torphanTitles = options.wiki.getOrphanTitles();\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(orphanTitles.indexOf(title) === -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(orphanTitles.indexOf(title) !== -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/orphan.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/shadow.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/shadow.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[shadow]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadow = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isShadowTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isShadowTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/shadow.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/system.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/system.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[system]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.system = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isSystemTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isSystemTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/system.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/tag.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/tag.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[tag]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tag = function(source,prefix,options) {\n\tvar results = [],\n\t\ttagMap = options.wiki.getTagMap();\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!$tw.utils.hop(tagMap,title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif($tw.utils.hop(tagMap,title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/tag.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/tiddler.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[tiddler]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tiddler = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/tiddler.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking tiddler properties\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar isFilterOperators;\n\nfunction getIsFilterOperators() {\n\tif(!isFilterOperators) {\n\t\tisFilterOperators = {};\n\t\t$tw.modules.applyMethods(\"isfilteroperator\",isFilterOperators);\n\t}\n\treturn isFilterOperators;\n}\n\n/*\nExport our filter function\n*/\nexports.is = function(source,operator,options) {\n\t// Dispatch to the correct isfilteroperator\n\tvar isFilterOperators = getIsFilterOperators();\n\tvar isFilterOperator = isFilterOperators[operator.operand];\n\tif(isFilterOperator) {\n\t\treturn isFilterOperator(source,operator.prefix,options);\n\t} else {\n\t\treturn [$tw.language.getString(\"Error/IsFilterOperator\")];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/is.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/limit.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/limit.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for chopping the results to a specified maximum number of entries\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.limit = function(source,operator,options) {\n\tvar results = [];\n\t// Convert to an array\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\t// Slice the array if necessary\n\tvar limit = Math.min(results.length,parseInt(operator.operand,10));\n\tif(operator.prefix === \"!\") {\n\t\tresults = results.slice(-limit);\n\t} else {\n\t\tresults = results.slice(0,limit);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/limit.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/links.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/links.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning all the links from a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.links = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlerLinks(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/links.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/list.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/list.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddlers whose title is listed in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.list = function(source,operator,options) {\n\tvar results = [],\n\t\ttr = $tw.utils.parseTextReference(operator.operand),\n\t\tcurrTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\"),\n\t\tlist = options.wiki.getTiddlerList(tr.title || currTiddlerTitle,tr.field,tr.index);\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(list.indexOf(title) === -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tresults = list;\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/list.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/listed.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/listed.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all tiddlers that have the selected tiddlers in a list\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.listed = function(source,operator,options) {\n\tvar field = operator.operand || \"list\",\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.findListingsOfTiddler(title,field));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/listed.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/listops.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/listops.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operators for manipulating the current selection list\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nReverse list\n*/\nexports.reverse = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.unshift(title);\n\t});\n\treturn results;\n};\n\n/*\nFirst entry/entries in list\n*/\nexports.first = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(0,count);\n};\n\n/*\nLast entry/entries in list\n*/\nexports.last = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(-count);\n};\n\n/*\nAll but the first entry/entries of the list\n*/\nexports.rest = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(count);\n};\nexports.butfirst = exports.rest;\nexports.bf = exports.rest;\n\n/*\nAll but the last entry/entries of the list\n*/\nexports.butlast = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(0,-count);\n};\nexports.bl = exports.butlast;\n\n/*\nThe nth member of the list\n*/\nexports.nth = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(count - 1,count);\n};\n\n})();\n",
"title": "$:/core/modules/filters/listops.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/modules.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/modules.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the titles of the modules of a given type in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.modules = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.each($tw.modules.types[title],function(moduleInfo,moduleName) {\n\t\t\tresults.push(moduleName);\n\t\t});\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/modules.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/moduletypes.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/moduletypes.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the module types in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.moduletypes = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.modules.types,function(moduleInfo,type) {\n\t\tresults.push(type);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/moduletypes.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/next.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/next.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler whose title occurs next in the list supplied in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.next = function(source,operator,options) {\n\tvar results = [],\n\t\tlist = options.wiki.getTiddlerList(operator.operand);\n\tsource(function(tiddler,title) {\n\t\tvar match = list.indexOf(title);\n\t\t// increment match and then test if result is in range\n\t\tmatch++;\n\t\tif(match > 0 && match < list.length) {\n\t\t\tresults.push(list[match]);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/next.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/plugintiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/plugintiddlers.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the titles of the shadow tiddlers within a plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.plugintiddlers = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar pluginInfo = options.wiki.getPluginInfo(title) || options.wiki.getTiddlerDataCached(title,{tiddlers:[]});\n\t\tif(pluginInfo && pluginInfo.tiddlers) {\n\t\t\t$tw.utils.each(pluginInfo.tiddlers,function(fields,title) {\n\t\t\t\tresults.push(title);\n\t\t\t});\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/plugintiddlers.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/prefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/prefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a title starts with a prefix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.prefix = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(0,operator.operand.length) !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(0,operator.operand.length) === operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/prefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/previous.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/previous.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler whose title occurs immediately prior in the list supplied in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.previous = function(source,operator,options) {\n\tvar results = [],\n\t\tlist = options.wiki.getTiddlerList(operator.operand);\n\tsource(function(tiddler,title) {\n\t\tvar match = list.indexOf(title);\n\t\t// increment match and then test if result is in range\n\t\tmatch--;\n\t\tif(match >= 0) {\n\t\t\tresults.push(list[match]);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/previous.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/regexp.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/regexp.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for regexp matching\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.regexp = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldname = (operator.suffix || \"title\").toLowerCase(),\n\t\tregexpString, regexp, flags = \"\", match,\n\t\tgetFieldString = function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\treturn tiddler.getFieldString(fieldname);\n\t\t\t} else if(fieldname === \"title\") {\n\t\t\t\treturn title;\n\t\t\t} else {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t};\n\t// Process flags and construct regexp\n\tregexpString = operator.operand;\n\tmatch = /^\\(\\?([gim]+)\\)/.exec(regexpString);\n\tif(match) {\n\t\tflags = match[1];\n\t\tregexpString = regexpString.substr(match[0].length);\n\t} else {\n\t\tmatch = /\\(\\?([gim]+)\\)$/.exec(regexpString);\n\t\tif(match) {\n\t\t\tflags = match[1];\n\t\t\tregexpString = regexpString.substr(0,regexpString.length - match[0].length);\n\t\t}\n\t}\n\ttry {\n\t\tregexp = new RegExp(regexpString,flags);\n\t} catch(e) {\n\t\treturn [\"\" + e];\n\t}\n\t// Process the incoming tiddlers\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tvar text = getFieldString(tiddler,title);\n\t\t\tif(text !== null) {\n\t\t\t\tif(!regexp.exec(text)) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tvar text = getFieldString(tiddler,title);\n\t\t\tif(text !== null) {\n\t\t\t\tif(!!regexp.exec(text)) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/regexp.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/removeprefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/removeprefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for removing a prefix from each title in the list. Titles that do not start with the prefix are removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.removeprefix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(title.substr(0,operator.operand.length) === operator.operand) {\n\t\t\tresults.push(title.substr(operator.operand.length));\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/removeprefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/removesuffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/removesuffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for removing a suffix from each title in the list. Titles that do not end with the suffix are removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.removesuffix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(title.substr(-operator.operand.length) === operator.operand) {\n\t\t\tresults.push(title.substr(0,title.length - operator.operand.length));\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/removesuffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/sameday.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/sameday.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects tiddlers with a modified date field on the same day as the provided value.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.sameday = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldName = operator.suffix || \"modified\",\n\t\ttargetDate = (new Date($tw.utils.parseDate(operator.operand))).setHours(0,0,0,0);\n\t// Function to convert a date/time to a date integer\n\tvar isSameDay = function(dateField) {\n\t\t\treturn (new Date(dateField)).setHours(0,0,0,0) === targetDate;\n\t\t};\n\tsource(function(tiddler,title) {\n\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\tif(isSameDay($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/sameday.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/search.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/search.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for searching for the text in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.search = function(source,operator,options) {\n\tvar invert = operator.prefix === \"!\";\n\tif(operator.suffix) {\n\t\treturn options.wiki.search(operator.operand,{\n\t\t\tsource: source,\n\t\t\tinvert: invert,\n\t\t\tfield: operator.suffix\n\t\t});\n\t} else {\n\t\treturn options.wiki.search(operator.operand,{\n\t\t\tsource: source,\n\t\t\tinvert: invert\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/search.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/shadowsource.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/shadowsource.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the source plugins for shadow tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadowsource = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar source = options.wiki.getShadowSource(title);\n\t\tif(source) {\n\t\t\t$tw.utils.pushTop(results,source);\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/shadowsource.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/sort.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/sort.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for sorting\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.sort = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",false,false);\n\treturn results;\n};\n\nexports.nsort = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",false,true);\n\treturn results;\n};\n\nexports.sortcs = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",true,false);\n\treturn results;\n};\n\nexports.nsortcs = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",true,true);\n\treturn results;\n};\n\nvar prepare_results = function (source) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/sort.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/splitbefore.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/splitbefore.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that splits each result on the first occurance of the specified separator and returns the unique values.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.splitbefore = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar parts = title.split(operator.operand);\n\t\tif(parts.length === 1) {\n\t\t\t$tw.utils.pushTop(results,parts[0]);\n\t\t} else {\n\t\t\t$tw.utils.pushTop(results,parts[0] + operator.operand);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/splitbefore.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/storyviews.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/storyviews.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the story views in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.storyviews = function(source,operator,options) {\n\tvar results = [],\n\t\tstoryviews = {};\n\t$tw.modules.applyMethods(\"storyview\",storyviews);\n\t$tw.utils.each(storyviews,function(info,name) {\n\t\tresults.push(name);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/storyviews.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/suffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/suffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a title ends with a suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.suffix = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(-operator.operand.length) !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(-operator.operand.length) === operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/suffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tag.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tag.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking for the presence of a tag\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tag = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && !tiddler.hasTag(operator.operand)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.hasTag(operator.operand)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t\tresults = options.wiki.sortByList(results,operator.operand);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/tag.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tagging.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tagging.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all tiddlers that are tagged with the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tagging = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlersWithTag(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/tagging.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tags.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tags.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all the tags of the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tags = function(source,operator,options) {\n\tvar tags = {};\n\tsource(function(tiddler,title) {\n\t\tvar t, length;\n\t\tif(tiddler && tiddler.fields.tags) {\n\t\t\tfor(t=0, length=tiddler.fields.tags.length; t<length; t++) {\n\t\t\t\ttags[tiddler.fields.tags[t]] = true;\n\t\t\t}\n\t\t}\n\t});\n\treturn Object.keys(tags);\n};\n\n})();\n",
"title": "$:/core/modules/filters/tags.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/title.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/title.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for comparing title fields for equality\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.title = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields.title !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tresults.push(operator.operand);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/title.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/untagged.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/untagged.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all the selected tiddlers that are untagged\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.untagged = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && $tw.utils.isArray(tiddler.fields.tags) && tiddler.fields.tags.length > 0) {\n\t\t\t\t$tw.utils.pushTop(results,title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!tiddler || !tiddler.hasField(\"tags\") || ($tw.utils.isArray(tiddler.fields.tags) && tiddler.fields.tags.length === 0)) {\n\t\t\t\t$tw.utils.pushTop(results,title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/untagged.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/wikiparserrules.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/wikiparserrules.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the wiki parser rules in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.wikiparserrules = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.modules.types.wikirule,function(mod) {\n\t\tvar exp = mod.exports;\n\t\tif(exp.types[operator.operand]) {\n\t\t\tresults.push(exp.name);\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/wikiparserrules.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/x-listops.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/x-listops.js\ntype: application/javascript\nmodule-type: filteroperator\n\nExtended filter operators to manipulate the current list.\n\n\\*/\n(function () {\n\n /*jslint node: true, browser: true */\n /*global $tw: false */\n \"use strict\";\n\n /*\n Fetch titles from the current list\n */\n var prepare_results = function (source) {\n var results = [];\n source(function (tiddler, title) {\n results.push(title);\n });\n return results;\n };\n\n /*\n Moves a number of items from the tail of the current list before the item named in the operand\n */\n exports.putbefore = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -1) :\n results.slice(0, index).concat(results.slice(-count)).concat(results.slice(index, -count));\n };\n\n /*\n Moves a number of items from the tail of the current list after the item named in the operand\n */\n exports.putafter = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -1) :\n results.slice(0, index + 1).concat(results.slice(-count)).concat(results.slice(index + 1, -count));\n };\n\n /*\n Replaces the item named in the operand with a number of items from the tail of the current list\n */\n exports.replace = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -count) :\n results.slice(0, index).concat(results.slice(-count)).concat(results.slice(index + 1, -count));\n };\n\n /*\n Moves a number of items from the tail of the current list to the head of the list\n */\n exports.putfirst = function (source, operator) {\n var results = prepare_results(source),\n count = parseInt(operator.suffix) || 1;\n return results.slice(-count).concat(results.slice(0, -count));\n };\n\n /*\n Moves a number of items from the head of the current list to the tail of the list\n */\n exports.putlast = function (source, operator) {\n var results = prepare_results(source),\n count = parseInt(operator.suffix) || 1;\n return results.slice(count).concat(results.slice(0, count));\n };\n\n /*\n Moves the item named in the operand a number of places forward or backward in the list\n */\n exports.move = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1,\n marker = results.splice(index, 1);\n return results.slice(0, index + count).concat(marker).concat(results.slice(index + count));\n };\n\n /*\n Returns the items from the current list that are after the item named in the operand\n */\n exports.allafter = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand);\n return (index === -1 || index > (results.length - 2)) ? [] :\n (operator.suffix) ? results.slice(index) :\n results.slice(index + 1);\n };\n\n /*\n Returns the items from the current list that are before the item named in the operand\n */\n exports.allbefore = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand);\n return (index <= 0) ? [] :\n (operator.suffix) ? results.slice(0, index + 1) :\n results.slice(0, index);\n };\n\n /*\n Appends the items listed in the operand array to the tail of the current list\n */\n exports.append = function (source, operator) {\n var append = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || append.length;\n return (append.length === 0) ? results :\n (operator.prefix) ? results.concat(append.slice(-count)) :\n results.concat(append.slice(0, count));\n };\n\n /*\n Prepends the items listed in the operand array to the head of the current list\n */\n exports.prepend = function (source, operator) {\n var prepend = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || prepend.length;\n return (prepend.length === 0) ? results :\n (operator.prefix) ? prepend.slice(-count).concat(results) :\n prepend.slice(0, count).concat(results);\n };\n\n /*\n Returns all items from the current list except the items listed in the operand array\n */\n exports.remove = function (source, operator) {\n var array = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || array.length,\n p,\n len,\n index;\n len = array.length - 1;\n for (p = 0; p < count; ++p) {\n if (operator.prefix) {\n index = results.indexOf(array[len - p]);\n } else {\n index = results.indexOf(array[p]);\n }\n if (index !== -1) {\n results.splice(index, 1);\n }\n }\n return results;\n };\n\n /*\n Returns all items from the current list sorted in the order of the items in the operand array\n */\n exports.sortby = function (source, operator) {\n var results = prepare_results(source);\n if (!results || results.length < 2) {\n return results;\n }\n var lookup = $tw.utils.parseStringArray(operator.operand, \"true\");\n results.sort(function (a, b) {\n return lookup.indexOf(a) - lookup.indexOf(b);\n });\n return results;\n };\n\n /*\n Removes all duplicate items from the current list\n */\n exports.unique = function (source, operator) {\n var results = prepare_results(source);\n var set = results.reduce(function (a, b) {\n if (a.indexOf(b) < 0) {\n a.push(b);\n }\n return a;\n }, []);\n return set;\n };\n})();\n",
"title": "$:/core/modules/filters/x-listops.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters.js": {
"text": "/*\\\ntitle: $:/core/modules/filters.js\ntype: application/javascript\nmodule-type: wikimethod\n\nAdds tiddler filtering methods to the $tw.Wiki object.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nParses an operation (i.e. a run) within a filter string\n\toperators: Array of array of operator nodes into which results should be inserted\n\tfilterString: filter string\n\tp: start position within the string\nReturns the new start position, after the parsed operation\n*/\nfunction parseFilterOperation(operators,filterString,p) {\n\tvar operator, operand, bracketPos, curlyBracketPos;\n\t// Skip the starting square bracket\n\tif(filterString.charAt(p++) !== \"[\") {\n\t\tthrow \"Missing [ in filter expression\";\n\t}\n\t// Process each operator in turn\n\tdo {\n\t\toperator = {};\n\t\t// Check for an operator prefix\n\t\tif(filterString.charAt(p) === \"!\") {\n\t\t\toperator.prefix = filterString.charAt(p++);\n\t\t}\n\t\t// Get the operator name\n\t\tvar nextBracketPos = filterString.substring(p).search(/[\\[\\{<\\/]/);\n\t\tif(nextBracketPos === -1) {\n\t\t\tthrow \"Missing [ in filter expression\";\n\t\t}\n\t\tnextBracketPos += p;\n\t\tvar bracket = filterString.charAt(nextBracketPos);\n\t\toperator.operator = filterString.substring(p,nextBracketPos);\n\t\t\n\t\t// Any suffix?\n\t\tvar colon = operator.operator.indexOf(':');\n\t\tif(colon > -1) {\n\t\t\toperator.suffix = operator.operator.substring(colon + 1);\n\t\t\toperator.operator = operator.operator.substring(0,colon) || \"field\";\n\t\t}\n\t\t// Empty operator means: title\n\t\telse if(operator.operator === \"\") {\n\t\t\toperator.operator = \"title\";\n\t\t}\n\n\t\tp = nextBracketPos + 1;\n\t\tswitch (bracket) {\n\t\t\tcase \"{\": // Curly brackets\n\t\t\t\toperator.indirect = true;\n\t\t\t\tnextBracketPos = filterString.indexOf(\"}\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"[\": // Square brackets\n\t\t\t\tnextBracketPos = filterString.indexOf(\"]\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"<\": // Angle brackets\n\t\t\t\toperator.variable = true;\n\t\t\t\tnextBracketPos = filterString.indexOf(\">\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"/\": // regexp brackets\n\t\t\t\tvar rex = /^((?:[^\\\\\\/]*|\\\\.)*)\\/(?:\\(([mygi]+)\\))?/g,\n\t\t\t\t\trexMatch = rex.exec(filterString.substring(p));\n\t\t\t\tif(rexMatch) {\n\t\t\t\t\toperator.regexp = new RegExp(rexMatch[1], rexMatch[2]);\n// DEPRECATION WARNING\nconsole.log(\"WARNING: Filter\",operator.operator,\"has a deprecated regexp operand\",operator.regexp);\n\t\t\t\t\tnextBracketPos = p + rex.lastIndex - 1;\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\tthrow \"Unterminated regular expression in filter expression\";\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t\t\n\t\tif(nextBracketPos === -1) {\n\t\t\tthrow \"Missing closing bracket in filter expression\";\n\t\t}\n\t\tif(!operator.regexp) {\n\t\t\toperator.operand = filterString.substring(p,nextBracketPos);\n\t\t}\n\t\tp = nextBracketPos + 1;\n\t\t\t\n\t\t// Push this operator\n\t\toperators.push(operator);\n\t} while(filterString.charAt(p) !== \"]\");\n\t// Skip the ending square bracket\n\tif(filterString.charAt(p++) !== \"]\") {\n\t\tthrow \"Missing ] in filter expression\";\n\t}\n\t// Return the parsing position\n\treturn p;\n}\n\n/*\nParse a filter string\n*/\nexports.parseFilter = function(filterString) {\n\tfilterString = filterString || \"\";\n\tvar results = [], // Array of arrays of operator nodes {operator:,operand:}\n\t\tp = 0, // Current position in the filter string\n\t\tmatch;\n\tvar whitespaceRegExp = /(\\s+)/mg,\n\t\toperandRegExp = /((?:\\+|\\-)?)(?:(\\[)|(?:\"([^\"]*)\")|(?:'([^']*)')|([^\\s\\[\\]]+))/mg;\n\twhile(p < filterString.length) {\n\t\t// Skip any whitespace\n\t\twhitespaceRegExp.lastIndex = p;\n\t\tmatch = whitespaceRegExp.exec(filterString);\n\t\tif(match && match.index === p) {\n\t\t\tp = p + match[0].length;\n\t\t}\n\t\t// Match the start of the operation\n\t\tif(p < filterString.length) {\n\t\t\toperandRegExp.lastIndex = p;\n\t\t\tmatch = operandRegExp.exec(filterString);\n\t\t\tif(!match || match.index !== p) {\n\t\t\t\tthrow $tw.language.getString(\"Error/FilterSyntax\");\n\t\t\t}\n\t\t\tvar operation = {\n\t\t\t\tprefix: \"\",\n\t\t\t\toperators: []\n\t\t\t};\n\t\t\tif(match[1]) {\n\t\t\t\toperation.prefix = match[1];\n\t\t\t\tp++;\n\t\t\t}\n\t\t\tif(match[2]) { // Opening square bracket\n\t\t\t\tp = parseFilterOperation(operation.operators,filterString,p);\n\t\t\t} else {\n\t\t\t\tp = match.index + match[0].length;\n\t\t\t}\n\t\t\tif(match[3] || match[4] || match[5]) { // Double quoted string, single quoted string or unquoted title\n\t\t\t\toperation.operators.push(\n\t\t\t\t\t{operator: \"title\", operand: match[3] || match[4] || match[5]}\n\t\t\t\t);\n\t\t\t}\n\t\t\tresults.push(operation);\n\t\t}\n\t}\n\treturn results;\n};\n\nexports.getFilterOperators = function() {\n\tif(!this.filterOperators) {\n\t\t$tw.Wiki.prototype.filterOperators = {};\n\t\t$tw.modules.applyMethods(\"filteroperator\",this.filterOperators);\n\t}\n\treturn this.filterOperators;\n};\n\nexports.filterTiddlers = function(filterString,widget,source) {\n\tvar fn = this.compileFilter(filterString);\n\treturn fn.call(this,source,widget);\n};\n\n/*\nCompile a filter into a function with the signature fn(source,widget) where:\nsource: an iterator function for the source tiddlers, called source(iterator), where iterator is called as iterator(tiddler,title)\nwidget: an optional widget node for retrieving the current tiddler etc.\n*/\nexports.compileFilter = function(filterString) {\n\tvar filterParseTree;\n\ttry {\n\t\tfilterParseTree = this.parseFilter(filterString);\n\t} catch(e) {\n\t\treturn function(source,widget) {\n\t\t\treturn [$tw.language.getString(\"Error/Filter\") + \": \" + e];\n\t\t};\n\t}\n\t// Get the hashmap of filter operator functions\n\tvar filterOperators = this.getFilterOperators();\n\t// Assemble array of functions, one for each operation\n\tvar operationFunctions = [];\n\t// Step through the operations\n\tvar self = this;\n\t$tw.utils.each(filterParseTree,function(operation) {\n\t\t// Create a function for the chain of operators in the operation\n\t\tvar operationSubFunction = function(source,widget) {\n\t\t\tvar accumulator = source,\n\t\t\t\tresults = [],\n\t\t\t\tcurrTiddlerTitle = widget && widget.getVariable(\"currentTiddler\");\n\t\t\t$tw.utils.each(operation.operators,function(operator) {\n\t\t\t\tvar operand = operator.operand,\n\t\t\t\t\toperatorFunction;\n\t\t\t\tif(!operator.operator) {\n\t\t\t\t\toperatorFunction = filterOperators.title;\n\t\t\t\t} else if(!filterOperators[operator.operator]) {\n\t\t\t\t\toperatorFunction = filterOperators.field;\n\t\t\t\t} else {\n\t\t\t\t\toperatorFunction = filterOperators[operator.operator];\n\t\t\t\t}\n\t\t\t\tif(operator.indirect) {\n\t\t\t\t\toperand = self.getTextReference(operator.operand,\"\",currTiddlerTitle);\n\t\t\t\t}\n\t\t\t\tif(operator.variable) {\n\t\t\t\t\toperand = widget.getVariable(operator.operand,{defaultValue: \"\"});\n\t\t\t\t}\n\t\t\t\t// Invoke the appropriate filteroperator module\n\t\t\t\tresults = operatorFunction(accumulator,{\n\t\t\t\t\t\t\toperator: operator.operator,\n\t\t\t\t\t\t\toperand: operand,\n\t\t\t\t\t\t\tprefix: operator.prefix,\n\t\t\t\t\t\t\tsuffix: operator.suffix,\n\t\t\t\t\t\t\tregexp: operator.regexp\n\t\t\t\t\t\t},{\n\t\t\t\t\t\t\twiki: self,\n\t\t\t\t\t\t\twidget: widget\n\t\t\t\t\t\t});\n\t\t\t\tif($tw.utils.isArray(results)) {\n\t\t\t\t\taccumulator = self.makeTiddlerIterator(results);\n\t\t\t\t} else {\n\t\t\t\t\taccumulator = results;\n\t\t\t\t}\n\t\t\t});\n\t\t\tif($tw.utils.isArray(results)) {\n\t\t\t\treturn results;\n\t\t\t} else {\n\t\t\t\tvar resultArray = [];\n\t\t\t\tresults(function(tiddler,title) {\n\t\t\t\t\tresultArray.push(title);\n\t\t\t\t});\n\t\t\t\treturn resultArray;\n\t\t\t}\n\t\t};\n\t\t// Wrap the operator functions in a wrapper function that depends on the prefix\n\t\toperationFunctions.push((function() {\n\t\t\tswitch(operation.prefix || \"\") {\n\t\t\t\tcase \"\": // No prefix means that the operation is unioned into the result\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t$tw.utils.pushTop(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t\tcase \"-\": // The results of this operation are removed from the main result\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t$tw.utils.removeArrayEntries(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t\tcase \"+\": // This operation is applied to the main results so far\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t// This replaces all the elements of the array, but keeps the actual array so that references to it are preserved\n\t\t\t\t\t\tsource = self.makeTiddlerIterator(results);\n\t\t\t\t\t\tresults.splice(0,results.length);\n\t\t\t\t\t\t$tw.utils.pushTop(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t}\n\t\t})());\n\t});\n\t// Return a function that applies the operations to a source iterator of tiddler titles\n\treturn $tw.perf.measure(\"filter\",function filterFunction(source,widget) {\n\t\tif(!source) {\n\t\t\tsource = self.each;\n\t\t} else if(typeof source === \"object\") { // Array or hashmap\n\t\t\tsource = self.makeTiddlerIterator(source);\n\t\t}\n\t\tvar results = [];\n\t\t$tw.utils.each(operationFunctions,function(operationFunction) {\n\t\t\toperationFunction(results,source,widget);\n\t\t});\n\t\treturn results;\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/filters.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/core/modules/info/platform.js": {
"text": "/*\\\ntitle: $:/core/modules/info/platform.js\ntype: application/javascript\nmodule-type: info\n\nInitialise basic platform $:/info/ tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.getInfoTiddlerFields = function() {\n\tvar mapBoolean = function(value) {return value ? \"yes\" : \"no\";},\n\t\tinfoTiddlerFields = [];\n\t// Basics\n\tinfoTiddlerFields.push({title: \"$:/info/browser\", text: mapBoolean(!!$tw.browser)});\n\tinfoTiddlerFields.push({title: \"$:/info/node\", text: mapBoolean(!!$tw.node)});\n\treturn infoTiddlerFields;\n};\n\n})();\n",
"title": "$:/core/modules/info/platform.js",
"type": "application/javascript",
"module-type": "info"
},
"$:/core/modules/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/keyboard.js\ntype: application/javascript\nmodule-type: global\n\nKeyboard handling utilities\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar namedKeys = {\n\t\"cancel\": 3,\n\t\"help\": 6,\n\t\"backspace\": 8,\n\t\"tab\": 9,\n\t\"clear\": 12,\n\t\"return\": 13,\n\t\"enter\": 13,\n\t\"pause\": 19,\n\t\"escape\": 27,\n\t\"space\": 32,\n\t\"page_up\": 33,\n\t\"page_down\": 34,\n\t\"end\": 35,\n\t\"home\": 36,\n\t\"left\": 37,\n\t\"up\": 38,\n\t\"right\": 39,\n\t\"down\": 40,\n\t\"printscreen\": 44,\n\t\"insert\": 45,\n\t\"delete\": 46,\n\t\"0\": 48,\n\t\"1\": 49,\n\t\"2\": 50,\n\t\"3\": 51,\n\t\"4\": 52,\n\t\"5\": 53,\n\t\"6\": 54,\n\t\"7\": 55,\n\t\"8\": 56,\n\t\"9\": 57,\n\t\"firefoxsemicolon\": 59,\n\t\"firefoxequals\": 61,\n\t\"a\": 65,\n\t\"b\": 66,\n\t\"c\": 67,\n\t\"d\": 68,\n\t\"e\": 69,\n\t\"f\": 70,\n\t\"g\": 71,\n\t\"h\": 72,\n\t\"i\": 73,\n\t\"j\": 74,\n\t\"k\": 75,\n\t\"l\": 76,\n\t\"m\": 77,\n\t\"n\": 78,\n\t\"o\": 79,\n\t\"p\": 80,\n\t\"q\": 81,\n\t\"r\": 82,\n\t\"s\": 83,\n\t\"t\": 84,\n\t\"u\": 85,\n\t\"v\": 86,\n\t\"w\": 87,\n\t\"x\": 88,\n\t\"y\": 89,\n\t\"z\": 90,\n\t\"numpad0\": 96,\n\t\"numpad1\": 97,\n\t\"numpad2\": 98,\n\t\"numpad3\": 99,\n\t\"numpad4\": 100,\n\t\"numpad5\": 101,\n\t\"numpad6\": 102,\n\t\"numpad7\": 103,\n\t\"numpad8\": 104,\n\t\"numpad9\": 105,\n\t\"multiply\": 106,\n\t\"add\": 107,\n\t\"separator\": 108,\n\t\"subtract\": 109,\n\t\"decimal\": 110,\n\t\"divide\": 111,\n\t\"f1\": 112,\n\t\"f2\": 113,\n\t\"f3\": 114,\n\t\"f4\": 115,\n\t\"f5\": 116,\n\t\"f6\": 117,\n\t\"f7\": 118,\n\t\"f8\": 119,\n\t\"f9\": 120,\n\t\"f10\": 121,\n\t\"f11\": 122,\n\t\"f12\": 123,\n\t\"f13\": 124,\n\t\"f14\": 125,\n\t\"f15\": 126,\n\t\"f16\": 127,\n\t\"f17\": 128,\n\t\"f18\": 129,\n\t\"f19\": 130,\n\t\"f20\": 131,\n\t\"f21\": 132,\n\t\"f22\": 133,\n\t\"f23\": 134,\n\t\"f24\": 135,\n\t\"firefoxminus\": 173,\n\t\"semicolon\": 186,\n\t\"equals\": 187,\n\t\"comma\": 188,\n\t\"dash\": 189,\n\t\"period\": 190,\n\t\"slash\": 191,\n\t\"backquote\": 192,\n\t\"openbracket\": 219,\n\t\"backslash\": 220,\n\t\"closebracket\": 221,\n\t\"quote\": 222\n};\n\nfunction KeyboardManager(options) {\n\tvar self = this;\n\toptions = options || \"\";\n\t// Save the named key hashmap\n\tthis.namedKeys = namedKeys;\n\t// Create a reverse mapping of code to keyname\n\tthis.keyNames = [];\n\t$tw.utils.each(namedKeys,function(keyCode,name) {\n\t\tself.keyNames[keyCode] = name.substr(0,1).toUpperCase() + name.substr(1);\n\t});\n\t// Save the platform-specific name of the \"meta\" key\n\tthis.metaKeyName = $tw.platform.isMac ? \"cmd-\" : \"win-\";\n}\n\n/*\nReturn an array of keycodes for the modifier keys ctrl, shift, alt, meta\n*/\nKeyboardManager.prototype.getModifierKeys = function() {\n\treturn [\n\t\t16, // Shift\n\t\t17, // Ctrl\n\t\t18, // Alt\n\t\t20, // CAPS LOCK\n\t\t91, // Meta (left)\n\t\t93, // Meta (right)\n\t\t224 // Meta (Firefox)\n\t]\n};\n\n/*\nParses a key descriptor into the structure:\n{\n\tkeyCode: numeric keycode\n\tshiftKey: boolean\n\taltKey: boolean\n\tctrlKey: boolean\n\tmetaKey: boolean\n}\nKey descriptors have the following format:\n\tctrl+enter\n\tctrl+shift+alt+A\n*/\nKeyboardManager.prototype.parseKeyDescriptor = function(keyDescriptor) {\n\tvar components = keyDescriptor.split(/\\+|\\-/),\n\t\tinfo = {\n\t\t\tkeyCode: 0,\n\t\t\tshiftKey: false,\n\t\t\taltKey: false,\n\t\t\tctrlKey: false,\n\t\t\tmetaKey: false\n\t\t};\n\tfor(var t=0; t<components.length; t++) {\n\t\tvar s = components[t].toLowerCase(),\n\t\t\tc = s.charCodeAt(0);\n\t\t// Look for modifier keys\n\t\tif(s === \"ctrl\") {\n\t\t\tinfo.ctrlKey = true;\n\t\t} else if(s === \"shift\") {\n\t\t\tinfo.shiftKey = true;\n\t\t} else if(s === \"alt\") {\n\t\t\tinfo.altKey = true;\n\t\t} else if(s === \"meta\" || s === \"cmd\" || s === \"win\") {\n\t\t\tinfo.metaKey = true;\n\t\t}\n\t\t// Replace named keys with their code\n\t\tif(this.namedKeys[s]) {\n\t\t\tinfo.keyCode = this.namedKeys[s];\n\t\t}\n\t}\n\tif(info.keyCode) {\n\t\treturn info;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nParse a list of key descriptors into an array of keyInfo objects. The key descriptors can be passed as an array of strings or a space separated string\n*/\nKeyboardManager.prototype.parseKeyDescriptors = function(keyDescriptors,options) {\n\tvar self = this;\n\toptions = options || {};\n\toptions.stack = options.stack || [];\n\tvar wiki = options.wiki || $tw.wiki;\n\tif(typeof keyDescriptors === \"string\" && keyDescriptors === \"\") {\n\t\treturn [];\n\t}\n\tif(!$tw.utils.isArray(keyDescriptors)) {\n\t\tkeyDescriptors = keyDescriptors.split(\" \");\n\t}\n\tvar result = [];\n\t$tw.utils.each(keyDescriptors,function(keyDescriptor) {\n\t\t// Look for a named shortcut\n\t\tif(keyDescriptor.substr(0,2) === \"((\" && keyDescriptor.substr(-2,2) === \"))\") {\n\t\t\tif(options.stack.indexOf(keyDescriptor) === -1) {\n\t\t\t\toptions.stack.push(keyDescriptor);\n\t\t\t\tvar name = keyDescriptor.substring(2,keyDescriptor.length - 2),\n\t\t\t\t\tlookupName = function(configName) {\n\t\t\t\t\t\tvar keyDescriptors = wiki.getTiddlerText(\"$:/config/\" + configName + \"/\" + name);\n\t\t\t\t\t\tif(keyDescriptors) {\n\t\t\t\t\t\t\tresult.push.apply(result,self.parseKeyDescriptors(keyDescriptors,options));\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\t\t\t\tlookupName(\"shortcuts\");\n\t\t\t\tlookupName($tw.platform.isMac ? \"shortcuts-mac\" : \"shortcuts-not-mac\");\n\t\t\t\tlookupName($tw.platform.isWindows ? \"shortcuts-windows\" : \"shortcuts-not-windows\");\n\t\t\t\tlookupName($tw.platform.isLinux ? \"shortcuts-linux\" : \"shortcuts-not-linux\");\n\t\t\t}\n\t\t} else {\n\t\t\tresult.push(self.parseKeyDescriptor(keyDescriptor));\n\t\t}\n\t});\n\treturn result;\n};\n\nKeyboardManager.prototype.getPrintableShortcuts = function(keyInfoArray) {\n\tvar self = this,\n\t\tresult = [];\n\t$tw.utils.each(keyInfoArray,function(keyInfo) {\n\t\tif(keyInfo) {\n\t\t\tresult.push((keyInfo.ctrlKey ? \"ctrl-\" : \"\") + \n\t\t\t\t (keyInfo.shiftKey ? \"shift-\" : \"\") + \n\t\t\t\t (keyInfo.altKey ? \"alt-\" : \"\") + \n\t\t\t\t (keyInfo.metaKey ? self.metaKeyName : \"\") + \n\t\t\t\t (self.keyNames[keyInfo.keyCode]));\n\t\t}\n\t});\n\treturn result;\n}\n\nKeyboardManager.prototype.checkKeyDescriptor = function(event,keyInfo) {\n\treturn keyInfo &&\n\t\t\tevent.keyCode === keyInfo.keyCode && \n\t\t\tevent.shiftKey === keyInfo.shiftKey && \n\t\t\tevent.altKey === keyInfo.altKey && \n\t\t\tevent.ctrlKey === keyInfo.ctrlKey && \n\t\t\tevent.metaKey === keyInfo.metaKey;\n};\n\nKeyboardManager.prototype.checkKeyDescriptors = function(event,keyInfoArray) {\n\tfor(var t=0; t<keyInfoArray.length; t++) {\n\t\tif(this.checkKeyDescriptor(event,keyInfoArray[t])) {\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n};\n\nexports.KeyboardManager = KeyboardManager;\n\n})();\n",
"title": "$:/core/modules/keyboard.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/language.js": {
"text": "/*\\\ntitle: $:/core/modules/language.js\ntype: application/javascript\nmodule-type: global\n\nThe $tw.Language() manages translateable strings\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreate an instance of the language manager. Options include:\nwiki: wiki from which to retrieve translation tiddlers\n*/\nfunction Language(options) {\n\toptions = options || \"\";\n\tthis.wiki = options.wiki || $tw.wiki;\n}\n\n/*\nReturn a wikified translateable string. The title is automatically prefixed with \"$:/language/\"\nOptions include:\nvariables: optional hashmap of variables to supply to the language wikification\n*/\nLanguage.prototype.getString = function(title,options) {\n\toptions = options || {};\n\ttitle = \"$:/language/\" + title;\n\treturn this.wiki.renderTiddler(\"text/plain\",title,{variables: options.variables});\n};\n\n/*\nReturn a raw, unwikified translateable string. The title is automatically prefixed with \"$:/language/\"\n*/\nLanguage.prototype.getRawString = function(title) {\n\ttitle = \"$:/language/\" + title;\n\treturn this.wiki.getTiddlerText(title);\n};\n\nexports.Language = Language;\n\n})();\n",
"title": "$:/core/modules/language.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/macros/changecount.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/changecount.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return the changecount for the current tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"changecount\";\n\nexports.params = [];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\treturn this.wiki.getChangeCount(this.getVariable(\"currentTiddler\")) + \"\";\n};\n\n})();\n",
"title": "$:/core/modules/macros/changecount.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/contrastcolour.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/contrastcolour.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to choose which of two colours has the highest contrast with a base colour\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"contrastcolour\";\n\nexports.params = [\n\t{name: \"target\"},\n\t{name: \"fallbackTarget\"},\n\t{name: \"colourA\"},\n\t{name: \"colourB\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(target,fallbackTarget,colourA,colourB) {\n\tvar rgbTarget = $tw.utils.parseCSSColor(target) || $tw.utils.parseCSSColor(fallbackTarget);\n\tif(!rgbTarget) {\n\t\treturn colourA;\n\t}\n\tvar rgbColourA = $tw.utils.parseCSSColor(colourA),\n\t\trgbColourB = $tw.utils.parseCSSColor(colourB);\n\tif(rgbColourA && !rgbColourB) {\n\t\treturn rgbColourA;\n\t}\n\tif(rgbColourB && !rgbColourA) {\n\t\treturn rgbColourB;\n\t}\n\tif(!rgbColourA && !rgbColourB) {\n\t\t// If neither colour is readable, return a crude inverse of the target\n\t\treturn [255 - rgbTarget[0],255 - rgbTarget[1],255 - rgbTarget[2],rgbTarget[3]];\n\t}\n\t// Colour brightness formula derived from http://www.w3.org/WAI/ER/WD-AERT/#color-contrast\n\tvar brightnessTarget = rgbTarget[0] * 0.299 + rgbTarget[1] * 0.587 + rgbTarget[2] * 0.114,\n\t\tbrightnessA = rgbColourA[0] * 0.299 + rgbColourA[1] * 0.587 + rgbColourA[2] * 0.114,\n\t\tbrightnessB = rgbColourB[0] * 0.299 + rgbColourB[1] * 0.587 + rgbColourB[2] * 0.114;\n\treturn Math.abs(brightnessTarget - brightnessA) > Math.abs(brightnessTarget - brightnessB) ? colourA : colourB;\n};\n\n})();\n",
"title": "$:/core/modules/macros/contrastcolour.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/csvtiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/csvtiddlers.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to output tiddlers matching a filter to CSV\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"csvtiddlers\";\n\nexports.params = [\n\t{name: \"filter\"},\n\t{name: \"format\"},\n];\n\n/*\nRun the macro\n*/\nexports.run = function(filter,format) {\n\tvar self = this,\n\t\ttiddlers = this.wiki.filterTiddlers(filter),\n\t\ttiddler,\n\t\tfields = [],\n\t\tt,f;\n\t// Collect all the fields\n\tfor(t=0;t<tiddlers.length; t++) {\n\t\ttiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\tfor(f in tiddler.fields) {\n\t\t\tif(fields.indexOf(f) === -1) {\n\t\t\t\tfields.push(f);\n\t\t\t}\n\t\t}\n\t}\n\t// Sort the fields and bring the standard ones to the front\n\tfields.sort();\n\t\"title text modified modifier created creator\".split(\" \").reverse().forEach(function(value,index) {\n\t\tvar p = fields.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tfields.splice(p,1);\n\t\t\tfields.unshift(value)\n\t\t}\n\t});\n\t// Output the column headings\n\tvar output = [], row = [];\n\tfields.forEach(function(value) {\n\t\trow.push(quoteAndEscape(value))\n\t});\n\toutput.push(row.join(\",\"));\n\t// Output each tiddler\n\tfor(var t=0;t<tiddlers.length; t++) {\n\t\trow = [];\n\t\ttiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\t\tfor(f=0; f<fields.length; f++) {\n\t\t\t\trow.push(quoteAndEscape(tiddler ? tiddler.getFieldString(fields[f]) || \"\" : \"\"));\n\t\t\t}\n\t\toutput.push(row.join(\",\"));\n\t}\n\treturn output.join(\"\\n\");\n};\n\nfunction quoteAndEscape(value) {\n\treturn \"\\\"\" + value.replace(/\"/mg,\"\\\"\\\"\") + \"\\\"\";\n}\n\n})();\n",
"title": "$:/core/modules/macros/csvtiddlers.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/displayshortcuts.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/displayshortcuts.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to display a list of keyboard shortcuts in human readable form. Notably, it resolves named shortcuts like `((bold))` to the underlying keystrokes.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"displayshortcuts\";\n\nexports.params = [\n\t{name: \"shortcuts\"},\n\t{name: \"prefix\"},\n\t{name: \"separator\"},\n\t{name: \"suffix\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(shortcuts,prefix,separator,suffix) {\n\tvar shortcutArray = $tw.keyboardManager.getPrintableShortcuts($tw.keyboardManager.parseKeyDescriptors(shortcuts,{\n\t\twiki: this.wiki\n\t}));\n\tif(shortcutArray.length > 0) {\n\t\tshortcutArray.sort(function(a,b) {\n\t\t return a.toLowerCase().localeCompare(b.toLowerCase());\n\t\t})\n\t\treturn prefix + shortcutArray.join(separator) + suffix;\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/macros/displayshortcuts.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/dumpvariables.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/dumpvariables.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to dump all active variable values\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"dumpvariables\";\n\nexports.params = [\n];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\tvar output = [\"|!Variable |!Value |\"],\n\t\tvariables = [], variable;\n\tfor(variable in this.variables) {\n\t\tvariables.push(variable);\n\t}\n\tvariables.sort();\n\tfor(var index=0; index<variables.length; index++) {\n\t\tvar variable = variables[index];\n\t\toutput.push(\"|\" + variable + \" |<input size=50 value=<<\" + variable + \">>/> |\")\n\t}\n\treturn output.join(\"\\n\");\n};\n\n})();\n",
"title": "$:/core/modules/macros/dumpvariables.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/jsontiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/jsontiddlers.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to output tiddlers matching a filter to JSON\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"jsontiddlers\";\n\nexports.params = [\n\t{name: \"filter\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(filter) {\n\tvar tiddlers = this.wiki.filterTiddlers(filter),\n\t\tdata = [];\n\tfor(var t=0;t<tiddlers.length; t++) {\n\t\tvar tiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\tif(tiddler) {\n\t\t\tvar fields = new Object();\n\t\t\tfor(var field in tiddler.fields) {\n\t\t\t\tfields[field] = tiddler.getFieldString(field);\n\t\t\t}\n\t\t\tdata.push(fields);\n\t\t}\n\t}\n\treturn JSON.stringify(data,null,$tw.config.preferences.jsonSpaces);\n};\n\n})();\n",
"title": "$:/core/modules/macros/jsontiddlers.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/makedatauri.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/makedatauri.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to convert a string of text to a data URI\n\n<<makedatauri text:\"Text to be converted\" type:\"text/vnd.tiddlywiki\">>\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"makedatauri\";\n\nexports.params = [\n\t{name: \"text\"},\n\t{name: \"type\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(text,type) {\n\treturn $tw.utils.makeDataUri(text,type);\n};\n\n})();\n",
"title": "$:/core/modules/macros/makedatauri.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/now.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/now.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return a formatted version of the current time\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"now\";\n\nexports.params = [\n\t{name: \"format\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(format) {\n\treturn $tw.utils.formatDateString(new Date(),format || \"0hh:0mm, DDth MMM YYYY\");\n};\n\n})();\n",
"title": "$:/core/modules/macros/now.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/qualify.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/qualify.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to qualify a state tiddler title according\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"qualify\";\n\nexports.params = [\n\t{name: \"title\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(title) {\n\treturn title + \"-\" + this.getStateQualifier();\n};\n\n})();\n",
"title": "$:/core/modules/macros/qualify.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/resolvepath.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/resolvepath.js\ntype: application/javascript\nmodule-type: macro\n\nResolves a relative path for an absolute rootpath.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"resolvepath\";\n\nexports.params = [\n\t{name: \"source\"},\n\t{name: \"root\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(source, root) {\n\treturn $tw.utils.resolvePath(source, root);\n};\n\n})();\n",
"title": "$:/core/modules/macros/resolvepath.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/version.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/version.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return the TiddlyWiki core version number\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"version\";\n\nexports.params = [];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\treturn $tw.version;\n};\n\n})();\n",
"title": "$:/core/modules/macros/version.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/parsers/audioparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/audioparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe audio parser parses an audio tiddler into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar AudioParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"audio\",\n\t\t\tattributes: {\n\t\t\t\tcontrols: {type: \"string\", value: \"controls\"}\n\t\t\t}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t} else if(text) {\n\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"audio/ogg\"] = AudioParser;\nexports[\"audio/mpeg\"] = AudioParser;\nexports[\"audio/mp3\"] = AudioParser;\nexports[\"audio/mp4\"] = AudioParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/audioparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/csvparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/csvparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe CSV text parser processes CSV files into a table wrapped in a scrollable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar CsvParser = function(type,text,options) {\n\t// Table framework\n\tthis.tree = [{\n\t\t\"type\": \"scrollable\", \"children\": [{\n\t\t\t\"type\": \"element\", \"tag\": \"table\", \"children\": [{\n\t\t\t\t\"type\": \"element\", \"tag\": \"tbody\", \"children\": []\n\t\t\t}], \"attributes\": {\n\t\t\t\t\"class\": {\"type\": \"string\", \"value\": \"tc-csv-table\"}\n\t\t\t}\n\t\t}]\n\t}];\n\t// Split the text into lines\n\tvar lines = text.split(/\\r?\\n/mg),\n\t\ttag = \"th\";\n\tfor(var line=0; line<lines.length; line++) {\n\t\tvar lineText = lines[line];\n\t\tif(lineText) {\n\t\t\tvar row = {\n\t\t\t\t\t\"type\": \"element\", \"tag\": \"tr\", \"children\": []\n\t\t\t\t};\n\t\t\tvar columns = lineText.split(\",\");\n\t\t\tfor(var column=0; column<columns.length; column++) {\n\t\t\t\trow.children.push({\n\t\t\t\t\t\t\"type\": \"element\", \"tag\": tag, \"children\": [{\n\t\t\t\t\t\t\t\"type\": \"text\",\n\t\t\t\t\t\t\t\"text\": columns[column]\n\t\t\t\t\t\t}]\n\t\t\t\t\t});\n\t\t\t}\n\t\t\ttag = \"td\";\n\t\t\tthis.tree[0].children[0].children[0].children.push(row);\n\t\t}\n\t}\n};\n\nexports[\"text/csv\"] = CsvParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/csvparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/htmlparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/htmlparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe HTML parser displays text as raw HTML\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HtmlParser = function(type,text,options) {\n\tvar src;\n\tif(options._canonical_uri) {\n\t\tsrc = options._canonical_uri;\n\t} else if(text) {\n\t\tsrc = \"data:text/html;charset=utf-8,\" + encodeURIComponent(text);\n\t}\n\tthis.tree = [{\n\t\ttype: \"element\",\n\t\ttag: \"iframe\",\n\t\tattributes: {\n\t\t\tsrc: {type: \"string\", value: src},\n\t\t\tsandbox: {type: \"string\", value: \"\"}\n\t\t}\n\t}];\n};\n\nexports[\"text/html\"] = HtmlParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/htmlparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/imageparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/imageparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe image parser parses an image into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar ImageParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"img\",\n\t\t\tattributes: {}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t\tif(type === \"application/pdf\" || type === \".pdf\") {\n\t\t\telement.tag = \"embed\";\n\t\t}\n\t} else if(text) {\n\t\tif(type === \"application/pdf\" || type === \".pdf\") {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:application/pdf;base64,\" + text};\n\t\t\telement.tag = \"embed\";\n\t\t} else if(type === \"image/svg+xml\" || type === \".svg\") {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:image/svg+xml,\" + encodeURIComponent(text)};\n\t\t} else {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t\t}\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"image/svg+xml\"] = ImageParser;\nexports[\"image/jpg\"] = ImageParser;\nexports[\"image/jpeg\"] = ImageParser;\nexports[\"image/png\"] = ImageParser;\nexports[\"image/gif\"] = ImageParser;\nexports[\"application/pdf\"] = ImageParser;\nexports[\"image/x-icon\"] = ImageParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/imageparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/utils/parseutils.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/parseutils.js\ntype: application/javascript\nmodule-type: utils\n\nUtility functions concerned with parsing text into tokens.\n\nMost functions have the following pattern:\n\n* The parameters are:\n** `source`: the source string being parsed\n** `pos`: the current parse position within the string\n** Any further parameters are used to identify the token that is being parsed\n* The return value is:\n** null if the token was not found at the specified position\n** an object representing the token with the following standard fields:\n*** `type`: string indicating the type of the token\n*** `start`: start position of the token in the source string\n*** `end`: end position of the token in the source string\n*** Any further fields required to describe the token\n\nThe exception is `skipWhiteSpace`, which just returns the position after the whitespace.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nLook for a whitespace token. Returns null if not found, otherwise returns {type: \"whitespace\", start:, end:,}\n*/\nexports.parseWhiteSpace = function(source,pos) {\n\tvar p = pos,c;\n\twhile(true) {\n\t\tc = source.charAt(p);\n\t\tif((c === \" \") || (c === \"\\f\") || (c === \"\\n\") || (c === \"\\r\") || (c === \"\\t\") || (c === \"\\v\") || (c === \"\\u00a0\")) { // Ignores some obscure unicode spaces\n\t\t\tp++;\n\t\t} else {\n\t\t\tbreak;\n\t\t}\n\t}\n\tif(p === pos) {\n\t\treturn null;\n\t} else {\n\t\treturn {\n\t\t\ttype: \"whitespace\",\n\t\t\tstart: pos,\n\t\t\tend: p\n\t\t}\n\t}\n};\n\n/*\nConvenience wrapper for parseWhiteSpace. Returns the position after the whitespace\n*/\nexports.skipWhiteSpace = function(source,pos) {\n\tvar c;\n\twhile(true) {\n\t\tc = source.charAt(pos);\n\t\tif((c === \" \") || (c === \"\\f\") || (c === \"\\n\") || (c === \"\\r\") || (c === \"\\t\") || (c === \"\\v\") || (c === \"\\u00a0\")) { // Ignores some obscure unicode spaces\n\t\t\tpos++;\n\t\t} else {\n\t\t\treturn pos;\n\t\t}\n\t}\n};\n\n/*\nLook for a given string token. Returns null if not found, otherwise returns {type: \"token\", value:, start:, end:,}\n*/\nexports.parseTokenString = function(source,pos,token) {\n\tvar match = source.indexOf(token,pos) === pos;\n\tif(match) {\n\t\treturn {\n\t\t\ttype: \"token\",\n\t\t\tvalue: token,\n\t\t\tstart: pos,\n\t\t\tend: pos + token.length\n\t\t};\n\t}\n\treturn null;\n};\n\n/*\nLook for a token matching a regex. Returns null if not found, otherwise returns {type: \"regexp\", match:, start:, end:,}\n*/\nexports.parseTokenRegExp = function(source,pos,reToken) {\n\tvar node = {\n\t\ttype: \"regexp\",\n\t\tstart: pos\n\t};\n\treToken.lastIndex = pos;\n\tnode.match = reToken.exec(source);\n\tif(node.match && node.match.index === pos) {\n\t\tnode.end = pos + node.match[0].length;\n\t\treturn node;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLook for a string literal. Returns null if not found, otherwise returns {type: \"string\", value:, start:, end:,}\n*/\nexports.parseStringLiteral = function(source,pos) {\n\tvar node = {\n\t\ttype: \"string\",\n\t\tstart: pos\n\t};\n\tvar reString = /(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\")|(?:'([^']*)')/g;\n\treString.lastIndex = pos;\n\tvar match = reString.exec(source);\n\tif(match && match.index === pos) {\n\t\tnode.value = match[1] !== undefined ? match[1] :(\n\t\t\tmatch[2] !== undefined ? match[2] : match[3] \n\t\t\t\t\t);\n\t\tnode.end = pos + match[0].length;\n\t\treturn node;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLook for a macro invocation parameter. Returns null if not found, or {type: \"macro-parameter\", name:, value:, start:, end:}\n*/\nexports.parseMacroParameter = function(source,pos) {\n\tvar node = {\n\t\ttype: \"macro-parameter\",\n\t\tstart: pos\n\t};\n\t// Define our regexp\n\tvar reMacroParameter = /(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\\s>\"'=]+)))/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the parameter\n\tvar token = $tw.utils.parseTokenRegExp(source,pos,reMacroParameter);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the parameter details\n\tnode.value = token.match[2] !== undefined ? token.match[2] : (\n\t\t\t\t\ttoken.match[3] !== undefined ? token.match[3] : (\n\t\t\t\t\t\ttoken.match[4] !== undefined ? token.match[4] : (\n\t\t\t\t\t\t\ttoken.match[5] !== undefined ? token.match[5] : (\n\t\t\t\t\t\t\t\ttoken.match[6] !== undefined ? token.match[6] : (\n\t\t\t\t\t\t\t\t\t\"\"\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t)\n\t\t\t\t\t)\n\t\t\t\t);\n\tif(token.match[1]) {\n\t\tnode.name = token.match[1];\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n/*\nLook for a macro invocation. Returns null if not found, or {type: \"macrocall\", name:, parameters:, start:, end:}\n*/\nexports.parseMacroInvocation = function(source,pos) {\n\tvar node = {\n\t\ttype: \"macrocall\",\n\t\tstart: pos,\n\t\tparams: []\n\t};\n\t// Define our regexps\n\tvar reMacroName = /([^\\s>\"'=]+)/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a double less than sign\n\tvar token = $tw.utils.parseTokenString(source,pos,\"<<\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the macro name\n\tvar name = $tw.utils.parseTokenRegExp(source,pos,reMacroName);\n\tif(!name) {\n\t\treturn null;\n\t}\n\tnode.name = name.match[1];\n\tpos = name.end;\n\t// Process parameters\n\tvar parameter = $tw.utils.parseMacroParameter(source,pos);\n\twhile(parameter) {\n\t\tnode.params.push(parameter);\n\t\tpos = parameter.end;\n\t\t// Get the next parameter\n\t\tparameter = $tw.utils.parseMacroParameter(source,pos);\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a double greater than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\">>\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n/*\nLook for an HTML attribute definition. Returns null if not found, otherwise returns {type: \"attribute\", name:, valueType: \"string|indirect|macro\", value:, start:, end:,}\n*/\nexports.parseAttribute = function(source,pos) {\n\tvar node = {\n\t\tstart: pos\n\t};\n\t// Define our regexps\n\tvar reAttributeName = /([^\\/\\s>\"'=]+)/g,\n\t\treUnquotedAttribute = /([^\\/\\s<>\"'=]+)/g,\n\t\treIndirectValue = /\\{\\{([^\\}]+)\\}\\}/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Get the attribute name\n\tvar name = $tw.utils.parseTokenRegExp(source,pos,reAttributeName);\n\tif(!name) {\n\t\treturn null;\n\t}\n\tnode.name = name.match[1];\n\tpos = name.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for an equals sign\n\tvar token = $tw.utils.parseTokenString(source,pos,\"=\");\n\tif(token) {\n\t\tpos = token.end;\n\t\t// Skip whitespace\n\t\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t\t// Look for a string literal\n\t\tvar stringLiteral = $tw.utils.parseStringLiteral(source,pos);\n\t\tif(stringLiteral) {\n\t\t\tpos = stringLiteral.end;\n\t\t\tnode.type = \"string\";\n\t\t\tnode.value = stringLiteral.value;\n\t\t} else {\n\t\t\t// Look for an indirect value\n\t\t\tvar indirectValue = $tw.utils.parseTokenRegExp(source,pos,reIndirectValue);\n\t\t\tif(indirectValue) {\n\t\t\t\tpos = indirectValue.end;\n\t\t\t\tnode.type = \"indirect\";\n\t\t\t\tnode.textReference = indirectValue.match[1];\n\t\t\t} else {\n\t\t\t\t// Look for a unquoted value\n\t\t\t\tvar unquotedValue = $tw.utils.parseTokenRegExp(source,pos,reUnquotedAttribute);\n\t\t\t\tif(unquotedValue) {\n\t\t\t\t\tpos = unquotedValue.end;\n\t\t\t\t\tnode.type = \"string\";\n\t\t\t\t\tnode.value = unquotedValue.match[1];\n\t\t\t\t} else {\n\t\t\t\t\t// Look for a macro invocation value\n\t\t\t\t\tvar macroInvocation = $tw.utils.parseMacroInvocation(source,pos);\n\t\t\t\t\tif(macroInvocation) {\n\t\t\t\t\t\tpos = macroInvocation.end;\n\t\t\t\t\t\tnode.type = \"macro\";\n\t\t\t\t\t\tnode.value = macroInvocation;\n\t\t\t\t\t} else {\n\t\t\t\t\t\tnode.type = \"string\";\n\t\t\t\t\t\tnode.value = \"true\";\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else {\n\t\tnode.type = \"string\";\n\t\tnode.value = \"true\";\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/utils/parseutils.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/parsers/textparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/textparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe plain text parser processes blocks of source text into a degenerate parse tree consisting of a single text node\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar TextParser = function(type,text,options) {\n\tthis.tree = [{\n\t\ttype: \"codeblock\",\n\t\tattributes: {\n\t\t\tcode: {type: \"string\", value: text},\n\t\t\tlanguage: {type: \"string\", value: type}\n\t\t}\n\t}];\n};\n\nexports[\"text/plain\"] = TextParser;\nexports[\"text/x-tiddlywiki\"] = TextParser;\nexports[\"application/javascript\"] = TextParser;\nexports[\"application/json\"] = TextParser;\nexports[\"text/css\"] = TextParser;\nexports[\"application/x-tiddler-dictionary\"] = TextParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/textparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/videoparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/videoparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe video parser parses a video tiddler into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar AudioParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"video\",\n\t\t\tattributes: {\n\t\t\t\tcontrols: {type: \"string\", value: \"controls\"}\n\t\t\t}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t} else if(text) {\n\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"video/mp4\"] = AudioParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/videoparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/wikiparser/rules/codeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/codeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for code blocks. For example:\n\n```\n\t```\n\tThis text will not be //wikified//\n\t```\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"codeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match and get language if defined\n\tthis.matchRegExp = /```([\\w-]*)\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /(\\r?\\n```$)/mg;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Look for the end of the block\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the block\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\t// Return the $codeblock widget\n\treturn [{\n\t\t\ttype: \"codeblock\",\n\t\t\tattributes: {\n\t\t\t\t\tcode: {type: \"string\", value: text},\n\t\t\t\t\tlanguage: {type: \"string\", value: this.match[1]}\n\t\t\t}\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/codeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/codeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/codeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for code runs. For example:\n\n```\n\tThis is a `code run`.\n\tThis is another ``code run``\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"codeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(``?)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar reEnd = new RegExp(this.match[1], \"mg\");\n\t// Look for the end marker\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the text\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"code\",\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\ttext: text\n\t\t}]\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/codeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/commentblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/commentblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for HTML comments. For example:\n\n```\n<!-- This is a comment -->\n```\n\nNote that the syntax for comments is simplified to an opening \"<!--\" sequence and a closing \"-->\" sequence -- HTML itself implements a more complex format (see http://ostermiller.org/findhtmlcomment.html)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"commentblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\tthis.matchRegExp = /<!--/mg;\n\tthis.endMatchRegExp = /-->/mg;\n};\n\nexports.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\tif(this.match) {\n\t\tthis.endMatchRegExp.lastIndex = startPos + this.match[0].length;\n\t\tthis.endMatch = this.endMatchRegExp.exec(this.parser.source);\n\t\tif(this.endMatch) {\n\t\t\treturn this.match.index;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.endMatchRegExp.lastIndex;\n\t// Don't return any elements\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/commentblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/commentinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/commentinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for HTML comments. For example:\n\n```\n<!-- This is a comment -->\n```\n\nNote that the syntax for comments is simplified to an opening \"<!--\" sequence and a closing \"-->\" sequence -- HTML itself implements a more complex format (see http://ostermiller.org/findhtmlcomment.html)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"commentinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\tthis.matchRegExp = /<!--/mg;\n\tthis.endMatchRegExp = /-->/mg;\n};\n\nexports.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\tif(this.match) {\n\t\tthis.endMatchRegExp.lastIndex = startPos + this.match[0].length;\n\t\tthis.endMatch = this.endMatchRegExp.exec(this.parser.source);\n\t\tif(this.endMatch) {\n\t\t\treturn this.match.index;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.endMatchRegExp.lastIndex;\n\t// Don't return any elements\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/commentinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/dash.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/dash.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for dashes. For example:\n\n```\nThis is an en-dash: --\n\nThis is an em-dash: ---\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"dash\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /-{2,3}(?!-)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar dash = this.match[0].length === 2 ? \"–\" : \"—\";\n\treturn [{\n\t\ttype: \"entity\",\n\t\tentity: dash\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/dash.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/bold.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/bold.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - bold. For example:\n\n```\n\tThis is ''bold'' text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except bold \n\\rules only bold \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"bold\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /''/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/''/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"strong\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/bold.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/italic.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/italic.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - italic. For example:\n\n```\n\tThis is //italic// text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except italic\n\\rules only italic\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"italic\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\/\\//mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/\\/\\//mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"em\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/italic.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - strikethrough. For example:\n\n```\n\tThis is ~~strikethrough~~ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except strikethrough \n\\rules only strikethrough \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"strikethrough\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~~/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/~~/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"strike\",\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - subscript. For example:\n\n```\n\tThis is ,,subscript,, text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except subscript \n\\rules only subscript \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"subscript\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /,,/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/,,/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"sub\",\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - superscript. For example:\n\n```\n\tThis is ^^superscript^^ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except superscript \n\\rules only superscript \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"superscript\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\^\\^/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/\\^\\^/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"sup\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - underscore. For example:\n\n```\n\tThis is __underscore__ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except underscore \n\\rules only underscore\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"underscore\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /__/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/__/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"u\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/entity.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/entity.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for HTML entities. For example:\n\n```\n\tThis is a copyright symbol: ©\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"entity\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(&#?[a-zA-Z0-9]{2,8};)/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar entityString = this.match[1];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Return the entity\n\treturn [{type: \"entity\", entity: this.match[0]}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/entity.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/extlink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/extlink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for external links. For example:\n\n```\nAn external link: http://www.tiddlywiki.com/\n\nA suppressed external link: ~http://www.tiddlyspace.com/\n```\n\nExternal links can be suppressed by preceding them with `~`.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"extlink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~?(?:file|http|https|mailto|ftp|irc|news|data|skype):[^\\s<>{}\\[\\]`|\"\\\\^]+(?:\\/|\\b)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Create the link unless it is suppressed\n\tif(this.match[0].substr(0,1) === \"~\") {\n\t\treturn [{type: \"text\", text: this.match[0].substr(1)}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tattributes: {\n\t\t\t\thref: {type: \"string\", value: this.match[0]},\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t\ttarget: {type: \"string\", value: \"_blank\"},\n\t\t\t\trel: {type: \"string\", value: \"noopener noreferrer\"}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: this.match[0]\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/extlink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for block-level filtered transclusion. For example:\n\n```\n{{{ [tag[docs]] }}}\n{{{ [tag[docs]] |tooltip}}}\n{{{ [tag[docs]] ||TemplateTitle}}}\n{{{ [tag[docs]] |tooltip||TemplateTitle}}}\n{{{ [tag[docs]] }}width:40;height:50;}.class.class\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"filteredtranscludeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{\\{([^\\|]+?)(?:\\|([^\\|\\{\\}]+))?(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}([^\\}]*)\\}(?:\\.(\\S+))?(?:\\r?\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar filter = this.match[1],\n\t\ttooltip = this.match[2],\n\t\ttemplate = $tw.utils.trim(this.match[3]),\n\t\tstyle = this.match[4],\n\t\tclasses = this.match[5];\n\t// Return the list widget\n\tvar node = {\n\t\ttype: \"list\",\n\t\tattributes: {\n\t\t\tfilter: {type: \"string\", value: filter}\n\t\t},\n\t\tisBlock: true\n\t};\n\tif(tooltip) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: tooltip};\n\t}\n\tif(template) {\n\t\tnode.attributes.template = {type: \"string\", value: template};\n\t}\n\tif(style) {\n\t\tnode.attributes.style = {type: \"string\", value: style};\n\t}\n\tif(classes) {\n\t\tnode.attributes.itemClass = {type: \"string\", value: classes.split(\".\").join(\" \")};\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for inline filtered transclusion. For example:\n\n```\n{{{ [tag[docs]] }}}\n{{{ [tag[docs]] |tooltip}}}\n{{{ [tag[docs]] ||TemplateTitle}}}\n{{{ [tag[docs]] |tooltip||TemplateTitle}}}\n{{{ [tag[docs]] }}width:40;height:50;}.class.class\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"filteredtranscludeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{\\{([^\\|]+?)(?:\\|([^\\|\\{\\}]+))?(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}([^\\}]*)\\}(?:\\.(\\S+))?/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar filter = this.match[1],\n\t\ttooltip = this.match[2],\n\t\ttemplate = $tw.utils.trim(this.match[3]),\n\t\tstyle = this.match[4],\n\t\tclasses = this.match[5];\n\t// Return the list widget\n\tvar node = {\n\t\ttype: \"list\",\n\t\tattributes: {\n\t\t\tfilter: {type: \"string\", value: filter}\n\t\t}\n\t};\n\tif(tooltip) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: tooltip};\n\t}\n\tif(template) {\n\t\tnode.attributes.template = {type: \"string\", value: template};\n\t}\n\tif(style) {\n\t\tnode.attributes.style = {type: \"string\", value: style};\n\t}\n\tif(classes) {\n\t\tnode.attributes.itemClass = {type: \"string\", value: classes.split(\".\").join(\" \")};\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for marking areas with hard line breaks. For example:\n\n```\n\"\"\"\nThis is some text\nThat is set like\nIt is a Poem\nWhen it is\nClearly\nNot\n\"\"\"\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"hardlinebreaks\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\"\"\"(?:\\r?\\n)?/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /(\"\"\")|(\\r?\\n)/mg,\n\t\ttree = [],\n\t\tmatch;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tdo {\n\t\t// Parse the run up to the terminator\n\t\ttree.push.apply(tree,this.parser.parseInlineRun(reEnd,{eatTerminator: false}));\n\t\t// Redo the terminator match\n\t\treEnd.lastIndex = this.parser.pos;\n\t\tmatch = reEnd.exec(this.parser.source);\n\t\tif(match) {\n\t\t\tthis.parser.pos = reEnd.lastIndex;\n\t\t\t// Add a line break if the terminator was a line break\n\t\t\tif(match[2]) {\n\t\t\t\ttree.push({type: \"element\", tag: \"br\"});\n\t\t\t}\n\t\t}\n\t} while(match && !match[1]);\n\t// Return the nodes\n\treturn tree;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/heading.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/heading.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for headings\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"heading\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(!{1,6})/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar headingLevel = this.match[1].length;\n\t// Move past the !s\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse any classes, whitespace and then the heading itself\n\tvar classes = this.parser.parseClasses();\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tvar tree = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// Return the heading\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"h\" + headingLevel, \n\t\tattributes: {\n\t\t\t\"class\": {type: \"string\", value: classes.join(\" \")}\n\t\t},\n\t\tchildren: tree\n\t}];\n};\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/heading.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/horizrule.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/horizrule.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for rules. For example:\n\n```\n---\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"horizrule\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /-{3,}\\r?(?:\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\treturn [{type: \"element\", tag: \"hr\"}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/horizrule.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/html.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/html.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for HTML elements and widgets. For example:\n\n{{{\n<aside>\nThis is an HTML5 aside element\n</aside>\n\n<$slider target=\"MyTiddler\">\nThis is a widget invocation\n</$slider>\n\n}}}\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"html\";\nexports.types = {inline: true, block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextTag = this.findNextTag(this.parser.source,startPos,{\n\t\trequireLineBreak: this.is.block\n\t});\n\treturn this.nextTag ? this.nextTag.start : undefined;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Retrieve the most recent match so that recursive calls don't overwrite it\n\tvar tag = this.nextTag;\n\tthis.nextTag = null;\n\t// Advance the parser position to past the tag\n\tthis.parser.pos = tag.end;\n\t// Check for an immediately following double linebreak\n\tvar hasLineBreak = !tag.isSelfClosing && !!$tw.utils.parseTokenRegExp(this.parser.source,this.parser.pos,/([^\\S\\n\\r]*\\r?\\n(?:[^\\S\\n\\r]*\\r?\\n|$))/g);\n\t// Set whether we're in block mode\n\ttag.isBlock = this.is.block || hasLineBreak;\n\t// Parse the body if we need to\n\tif(!tag.isSelfClosing && $tw.config.htmlVoidElements.indexOf(tag.tag) === -1) {\n\t\t\tvar reEndString = \"</\" + $tw.utils.escapeRegExp(tag.tag) + \">\",\n\t\t\t\treEnd = new RegExp(\"(\" + reEndString + \")\",\"mg\");\n\t\tif(hasLineBreak) {\n\t\t\ttag.children = this.parser.parseBlocks(reEndString);\n\t\t} else {\n\t\t\ttag.children = this.parser.parseInlineRun(reEnd);\n\t\t}\n\t\treEnd.lastIndex = this.parser.pos;\n\t\tvar endMatch = reEnd.exec(this.parser.source);\n\t\tif(endMatch && endMatch.index === this.parser.pos) {\n\t\t\tthis.parser.pos = endMatch.index + endMatch[0].length;\n\t\t}\n\t}\n\t// Return the tag\n\treturn [tag];\n};\n\n/*\nLook for an HTML tag. Returns null if not found, otherwise returns {type: \"element\", name:, attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseTag = function(source,pos,options) {\n\toptions = options || {};\n\tvar token,\n\t\tnode = {\n\t\t\ttype: \"element\",\n\t\t\tstart: pos,\n\t\t\tattributes: {}\n\t\t};\n\t// Define our regexps\n\tvar reTagName = /([a-zA-Z0-9\\-\\$]+)/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a less than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\"<\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the tag name\n\ttoken = $tw.utils.parseTokenRegExp(source,pos,reTagName);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tnode.tag = token.match[1];\n\tif(node.tag.charAt(0) === \"$\") {\n\t\tnode.type = node.tag.substr(1);\n\t}\n\tpos = token.end;\n\t// Process attributes\n\tvar attribute = $tw.utils.parseAttribute(source,pos);\n\twhile(attribute) {\n\t\tnode.attributes[attribute.name] = attribute;\n\t\tpos = attribute.end;\n\t\t// Get the next attribute\n\t\tattribute = $tw.utils.parseAttribute(source,pos);\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a closing slash\n\ttoken = $tw.utils.parseTokenString(source,pos,\"/\");\n\tif(token) {\n\t\tpos = token.end;\n\t\tnode.isSelfClosing = true;\n\t}\n\t// Look for a greater than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\">\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Check for a required line break\n\tif(options.requireLineBreak) {\n\t\ttoken = $tw.utils.parseTokenRegExp(source,pos,/([^\\S\\n\\r]*\\r?\\n(?:[^\\S\\n\\r]*\\r?\\n|$))/g);\n\t\tif(!token) {\n\t\t\treturn null;\n\t\t}\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\nexports.findNextTag = function(source,pos,options) {\n\t// A regexp for finding candidate HTML tags\n\tvar reLookahead = /<([a-zA-Z\\-\\$]+)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a tag\n\t\tvar tag = this.parseTag(source,match.index,options);\n\t\t// Return success\n\t\tif(tag && this.isLegalTag(tag)) {\n\t\t\treturn tag;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\nexports.isLegalTag = function(tag) {\n\t// Widgets are always OK\n\tif(tag.type !== \"element\") {\n\t\treturn true;\n\t// If it's an HTML tag that starts with a dash then it's not legal\n\t} else if(tag.tag.charAt(0) === \"-\") {\n\t\treturn false;\n\t} else {\n\t\t// Otherwise it's OK\n\t\treturn true;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/html.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/image.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/image.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for embedding images. For example:\n\n```\n[img[http://tiddlywiki.com/fractalveg.jpg]]\n[img width=23 height=24 [http://tiddlywiki.com/fractalveg.jpg]]\n[img width={{!!width}} height={{!!height}} [http://tiddlywiki.com/fractalveg.jpg]]\n[img[Description of image|http://tiddlywiki.com/fractalveg.jpg]]\n[img[TiddlerTitle]]\n[img[Description of image|TiddlerTitle]]\n```\n\nGenerates the `<$image>` widget.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"image\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextImage = this.findNextImage(this.parser.source,startPos);\n\treturn this.nextImage ? this.nextImage.start : undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.nextImage.end;\n\tvar node = {\n\t\ttype: \"image\",\n\t\tattributes: this.nextImage.attributes\n\t};\n\treturn [node];\n};\n\n/*\nFind the next image from the current position\n*/\nexports.findNextImage = function(source,pos) {\n\t// A regexp for finding candidate HTML tags\n\tvar reLookahead = /(\\[img)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a tag\n\t\tvar tag = this.parseImage(source,match.index);\n\t\t// Return success\n\t\tif(tag) {\n\t\t\treturn tag;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\n/*\nLook for an image at the specified position. Returns null if not found, otherwise returns {type: \"image\", attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseImage = function(source,pos) {\n\tvar token,\n\t\tnode = {\n\t\t\ttype: \"image\",\n\t\t\tstart: pos,\n\t\t\tattributes: {}\n\t\t};\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[img`\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[img\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Process attributes\n\tif(source.charAt(pos) !== \"[\") {\n\t\tvar attribute = $tw.utils.parseAttribute(source,pos);\n\t\twhile(attribute) {\n\t\t\tnode.attributes[attribute.name] = attribute;\n\t\t\tpos = attribute.end;\n\t\t\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t\t\tif(source.charAt(pos) !== \"[\") {\n\t\t\t\t// Get the next attribute\n\t\t\t\tattribute = $tw.utils.parseAttribute(source,pos);\n\t\t\t} else {\n\t\t\t\tattribute = null;\n\t\t\t}\n\t\t}\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[` after the attributes\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Get the source up to the terminating `]]`\n\ttoken = $tw.utils.parseTokenRegExp(source,pos,/(?:([^|\\]]*?)\\|)?([^\\]]+?)\\]\\]/g);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\tif(token.match[1]) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: token.match[1].trim()};\n\t}\n\tnode.attributes.source = {type: \"string\", value: (token.match[2] || \"\").trim()};\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/image.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/list.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/list.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for lists. For example:\n\n```\n* This is an unordered list\n* It has two items\n\n# This is a numbered list\n## With a subitem\n# And a third item\n\n; This is a term that is being defined\n: This is the definition of that term\n```\n\nNote that lists can be nested arbitrarily:\n\n```\n#** One\n#* Two\n#** Three\n#**** Four\n#**# Five\n#**## Six\n## Seven\n### Eight\n## Nine\n```\n\nA CSS class can be applied to a list item as follows:\n\n```\n* List item one\n*.active List item two has the class `active`\n* List item three\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"list\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /([\\*#;:>]+)/mg;\n};\n\nvar listTypes = {\n\t\"*\": {listTag: \"ul\", itemTag: \"li\"},\n\t\"#\": {listTag: \"ol\", itemTag: \"li\"},\n\t\";\": {listTag: \"dl\", itemTag: \"dt\"},\n\t\":\": {listTag: \"dl\", itemTag: \"dd\"},\n\t\">\": {listTag: \"blockquote\", itemTag: \"p\"}\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Array of parse tree nodes for the previous row of the list\n\tvar listStack = [];\n\t// Cycle through the items in the list\n\twhile(true) {\n\t\t// Match the list marker\n\t\tvar reMatch = /([\\*#;:>]+)/mg;\n\t\treMatch.lastIndex = this.parser.pos;\n\t\tvar match = reMatch.exec(this.parser.source);\n\t\tif(!match || match.index !== this.parser.pos) {\n\t\t\tbreak;\n\t\t}\n\t\t// Check whether the list type of the top level matches\n\t\tvar listInfo = listTypes[match[0].charAt(0)];\n\t\tif(listStack.length > 0 && listStack[0].tag !== listInfo.listTag) {\n\t\t\tbreak;\n\t\t}\n\t\t// Move past the list marker\n\t\tthis.parser.pos = match.index + match[0].length;\n\t\t// Walk through the list markers for the current row\n\t\tfor(var t=0; t<match[0].length; t++) {\n\t\t\tlistInfo = listTypes[match[0].charAt(t)];\n\t\t\t// Remove any stacked up element if we can't re-use it because the list type doesn't match\n\t\t\tif(listStack.length > t && listStack[t].tag !== listInfo.listTag) {\n\t\t\t\tlistStack.splice(t,listStack.length - t);\n\t\t\t}\n\t\t\t// Construct the list element or reuse the previous one at this level\n\t\t\tif(listStack.length <= t) {\n\t\t\t\tvar listElement = {type: \"element\", tag: listInfo.listTag, children: [\n\t\t\t\t\t{type: \"element\", tag: listInfo.itemTag, children: []}\n\t\t\t\t]};\n\t\t\t\t// Link this list element into the last child item of the parent list item\n\t\t\t\tif(t) {\n\t\t\t\t\tvar prevListItem = listStack[t-1].children[listStack[t-1].children.length-1];\n\t\t\t\t\tprevListItem.children.push(listElement);\n\t\t\t\t}\n\t\t\t\t// Save this element in the stack\n\t\t\t\tlistStack[t] = listElement;\n\t\t\t} else if(t === (match[0].length - 1)) {\n\t\t\t\tlistStack[t].children.push({type: \"element\", tag: listInfo.itemTag, children: []});\n\t\t\t}\n\t\t}\n\t\tif(listStack.length > match[0].length) {\n\t\t\tlistStack.splice(match[0].length,listStack.length - match[0].length);\n\t\t}\n\t\t// Process the body of the list item into the last list item\n\t\tvar lastListChildren = listStack[listStack.length-1].children,\n\t\t\tlastListItem = lastListChildren[lastListChildren.length-1],\n\t\t\tclasses = this.parser.parseClasses();\n\t\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\t\tvar tree = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t\tlastListItem.children.push.apply(lastListItem.children,tree);\n\t\tif(classes.length > 0) {\n\t\t\t$tw.utils.addClassToParseTreeNode(lastListItem,classes.join(\" \"));\n\t\t}\n\t\t// Consume any whitespace following the list item\n\t\tthis.parser.skipWhitespace();\n\t}\n\t// Return the root element of the list\n\treturn [listStack[0]];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/list.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrocallblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrocallblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for block macro calls\n\n```\n<<name value value2>>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrocallblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /<<([^>\\s]+)(?:\\s*)((?:[^>]|(?:>(?!>)))*?)>>(?:\\r?\\n|$)/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar macroName = this.match[1],\n\t\tparamString = this.match[2];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar params = [],\n\t\treParam = /\\s*(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))/mg,\n\t\tparamMatch = reParam.exec(paramString);\n\twhile(paramMatch) {\n\t\t// Process this parameter\n\t\tvar paramInfo = {\n\t\t\tvalue: paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5] || paramMatch[6]\n\t\t};\n\t\tif(paramMatch[1]) {\n\t\t\tparamInfo.name = paramMatch[1];\n\t\t}\n\t\tparams.push(paramInfo);\n\t\t// Find the next match\n\t\tparamMatch = reParam.exec(paramString);\n\t}\n\treturn [{\n\t\ttype: \"macrocall\",\n\t\tname: macroName,\n\t\tparams: params,\n\t\tisBlock: true\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrocallblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrocallinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrocallinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for macro calls\n\n```\n<<name value value2>>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrocallinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /<<([^\\s>]+)\\s*([\\s\\S]*?)>>/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar macroName = this.match[1],\n\t\tparamString = this.match[2];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar params = [],\n\t\treParam = /\\s*(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))/mg,\n\t\tparamMatch = reParam.exec(paramString);\n\twhile(paramMatch) {\n\t\t// Process this parameter\n\t\tvar paramInfo = {\n\t\t\tvalue: paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5]|| paramMatch[6]\n\t\t};\n\t\tif(paramMatch[1]) {\n\t\t\tparamInfo.name = paramMatch[1];\n\t\t}\n\t\tparams.push(paramInfo);\n\t\t// Find the next match\n\t\tparamMatch = reParam.exec(paramString);\n\t}\n\treturn [{\n\t\ttype: \"macrocall\",\n\t\tname: macroName,\n\t\tparams: params\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrocallinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrodef.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrodef.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki pragma rule for macro definitions\n\n```\n\\define name(param:defaultvalue,param2:defaultvalue)\ndefinition text, including $param$ markers\n\\end\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrodef\";\nexports.types = {pragma: true};\n\n/*\nInstantiate parse rule\n*/\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\\\define\\s+([^(\\s]+)\\(\\s*([^)]*)\\)(\\s*\\r?\\n)?/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Move past the macro name and parameters\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse the parameters\n\tvar paramString = this.match[2],\n\t\tparams = [];\n\tif(paramString !== \"\") {\n\t\tvar reParam = /\\s*([A-Za-z0-9\\-_]+)(?:\\s*:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))?/mg,\n\t\t\tparamMatch = reParam.exec(paramString);\n\t\twhile(paramMatch) {\n\t\t\t// Save the parameter details\n\t\t\tvar paramInfo = {name: paramMatch[1]},\n\t\t\t\tdefaultValue = paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5] || paramMatch[6];\n\t\t\tif(defaultValue) {\n\t\t\t\tparamInfo[\"default\"] = defaultValue;\n\t\t\t}\n\t\t\tparams.push(paramInfo);\n\t\t\t// Look for the next parameter\n\t\t\tparamMatch = reParam.exec(paramString);\n\t\t}\n\t}\n\t// Is this a multiline definition?\n\tvar reEnd;\n\tif(this.match[3]) {\n\t\t// If so, the end of the body is marked with \\end\n\t\treEnd = /(\\r?\\n\\\\end[^\\S\\n\\r]*(?:$|\\r?\\n))/mg;\n\t} else {\n\t\t// Otherwise, the end of the definition is marked by the end of the line\n\t\treEnd = /(\\r?\\n)/mg;\n\t\t// Move past any whitespace\n\t\tthis.parser.pos = $tw.utils.skipWhiteSpace(this.parser.source,this.parser.pos);\n\t}\n\t// Find the end of the definition\n\treEnd.lastIndex = this.parser.pos;\n\tvar text,\n\t\tendMatch = reEnd.exec(this.parser.source);\n\tif(endMatch) {\n\t\ttext = this.parser.source.substring(this.parser.pos,endMatch.index);\n\t\tthis.parser.pos = endMatch.index + endMatch[0].length;\n\t} else {\n\t\t// We didn't find the end of the definition, so we'll make it blank\n\t\ttext = \"\";\n\t}\n\t// Save the macro definition\n\treturn [{\n\t\ttype: \"set\",\n\t\tattributes: {\n\t\t\tname: {type: \"string\", value: this.match[1]},\n\t\t\tvalue: {type: \"string\", value: text}\n\t\t},\n\t\tchildren: [],\n\t\tparams: params\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrodef.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/prettyextlink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/prettyextlink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for external links. For example:\n\n```\n[ext[http://tiddlywiki.com/fractalveg.jpg]]\n[ext[Tooltip|http://tiddlywiki.com/fractalveg.jpg]]\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"prettyextlink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextLink = this.findNextLink(this.parser.source,startPos);\n\treturn this.nextLink ? this.nextLink.start : undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.nextLink.end;\n\treturn [this.nextLink];\n};\n\n/*\nFind the next link from the current position\n*/\nexports.findNextLink = function(source,pos) {\n\t// A regexp for finding candidate links\n\tvar reLookahead = /(\\[ext\\[)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a link\n\t\tvar link = this.parseLink(source,match.index);\n\t\t// Return success\n\t\tif(link) {\n\t\t\treturn link;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\n/*\nLook for an link at the specified position. Returns null if not found, otherwise returns {type: \"element\", tag: \"a\", attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseLink = function(source,pos) {\n\tvar token,\n\t\ttextNode = {\n\t\t\ttype: \"text\"\n\t\t},\n\t\tnode = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tstart: pos,\n\t\t\tattributes: {\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t},\n\t\t\tchildren: [textNode]\n\t\t};\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[ext[`\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[ext[\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Look ahead for the terminating `]]`\n\tvar closePos = source.indexOf(\"]]\",pos);\n\tif(closePos === -1) {\n\t\treturn null;\n\t}\n\t// Look for a `|` separating the tooltip\n\tvar splitPos = source.indexOf(\"|\",pos);\n\tif(splitPos === -1 || splitPos > closePos) {\n\t\tsplitPos = null;\n\t}\n\t// Pull out the tooltip and URL\n\tvar tooltip, URL;\n\tif(splitPos) {\n\t\tURL = source.substring(splitPos + 1,closePos).trim();\n\t\ttextNode.text = source.substring(pos,splitPos).trim();\n\t} else {\n\t\tURL = source.substring(pos,closePos).trim();\n\t\ttextNode.text = URL;\n\t}\n\tnode.attributes.href = {type: \"string\", value: URL};\n\tnode.attributes.target = {type: \"string\", value: \"_blank\"};\n\tnode.attributes.rel = {type: \"string\", value: \"noopener noreferrer\"};\n\t// Update the end position\n\tnode.end = closePos + 2;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/prettyextlink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/prettylink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/prettylink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for pretty links. For example:\n\n```\n[[Introduction]]\n\n[[Link description|TiddlerTitle]]\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"prettylink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\[\\[(.*?)(?:\\|(.*?))?\\]\\]/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Process the link\n\tvar text = this.match[1],\n\t\tlink = this.match[2] || text;\n\tif($tw.utils.isLinkExternal(link)) {\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tattributes: {\n\t\t\t\thref: {type: \"string\", value: link},\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t\ttarget: {type: \"string\", value: \"_blank\"},\n\t\t\t\trel: {type: \"string\", value: \"noopener noreferrer\"}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: text\n\t\t\t}]\n\t\t}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"link\",\n\t\t\tattributes: {\n\t\t\t\tto: {type: \"string\", value: link}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: text\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/prettylink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/quoteblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/quoteblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for quote blocks. For example:\n\n```\n\t<<<.optionalClass(es) optional cited from\n\ta quote\n\t<<<\n\t\n\t<<<.optionalClass(es)\n\ta quote\n\t<<< optional cited from\n```\n\nQuotes can be quoted by putting more <s\n\n```\n\t<<<\n\tQuote Level 1\n\t\n\t<<<<\n\tQuoteLevel 2\n\t<<<<\n\t\n\t<<<\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"quoteblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(<<<+)/mg;\n};\n\nexports.parse = function() {\n\tvar classes = [\"tc-quote\"];\n\t// Get all the details of the match\n\tvar reEndString = \"^\" + this.match[1] + \"(?!<)\";\n\t// Move past the <s\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t\n\t// Parse any classes, whitespace and then the optional cite itself\n\tclasses.push.apply(classes, this.parser.parseClasses());\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tvar cite = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// before handling the cite, parse the body of the quote\n\tvar tree= this.parser.parseBlocks(reEndString);\n\t// If we got a cite, put it before the text\n\tif(cite.length > 0) {\n\t\ttree.unshift({\n\t\t\ttype: \"element\",\n\t\t\ttag: \"cite\",\n\t\t\tchildren: cite\n\t\t});\n\t}\n\t// Parse any optional cite\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tcite = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// If we got a cite, push it\n\tif(cite.length > 0) {\n\t\ttree.push({\n\t\t\ttype: \"element\",\n\t\t\ttag: \"cite\",\n\t\t\tchildren: cite\n\t\t});\n\t}\n\t// Return the blockquote element\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"blockquote\",\n\t\tattributes: {\n\t\t\tclass: { type: \"string\", value: classes.join(\" \") },\n\t\t},\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/quoteblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/rules.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/rules.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki pragma rule for rules specifications\n\n```\n\\rules except ruleone ruletwo rulethree\n\\rules only ruleone ruletwo rulethree\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"rules\";\nexports.types = {pragma: true};\n\n/*\nInstantiate parse rule\n*/\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\\\rules[^\\S\\n]/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Move past the pragma invocation\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse whitespace delimited tokens terminated by a line break\n\tvar reMatch = /[^\\S\\n]*(\\S+)|(\\r?\\n)/mg,\n\t\ttokens = [];\n\treMatch.lastIndex = this.parser.pos;\n\tvar match = reMatch.exec(this.parser.source);\n\twhile(match && match.index === this.parser.pos) {\n\t\tthis.parser.pos = reMatch.lastIndex;\n\t\t// Exit if we've got the line break\n\t\tif(match[2]) {\n\t\t\tbreak;\n\t\t}\n\t\t// Process the token\n\t\tif(match[1]) {\n\t\t\ttokens.push(match[1]);\n\t\t}\n\t\t// Match the next token\n\t\tmatch = reMatch.exec(this.parser.source);\n\t}\n\t// Process the tokens\n\tif(tokens.length > 0) {\n\t\tthis.parser.amendRules(tokens[0],tokens.slice(1));\n\t}\n\t// No parse tree nodes to return\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/rules.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/styleblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/styleblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for assigning styles and classes to paragraphs and other blocks. For example:\n\n```\n@@.myClass\n@@background-color:red;\nThis paragraph will have the CSS class `myClass`.\n\n* The `<ul>` around this list will also have the class `myClass`\n* List item 2\n\n@@\n```\n\nNote that classes and styles can be mixed subject to the rule that styles must precede classes. For example\n\n```\n@@.myFirstClass.mySecondClass\n@@width:100px;.myThirdClass\nThis is a paragraph\n@@\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"styleblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /@@((?:[^\\.\\r\\n\\s:]+:[^\\r\\n;]+;)+)?(?:\\.([^\\r\\n\\s]+))?\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEndString = \"^@@(?:\\\\r?\\\\n)?\";\n\tvar classes = [], styles = [];\n\tdo {\n\t\t// Get the class and style\n\t\tif(this.match[1]) {\n\t\t\tstyles.push(this.match[1]);\n\t\t}\n\t\tif(this.match[2]) {\n\t\t\tclasses.push(this.match[2].split(\".\").join(\" \"));\n\t\t}\n\t\t// Move past the match\n\t\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t\t// Look for another line of classes and styles\n\t\tthis.match = this.matchRegExp.exec(this.parser.source);\n\t} while(this.match && this.match.index === this.parser.pos);\n\t// Parse the body\n\tvar tree = this.parser.parseBlocks(reEndString);\n\tfor(var t=0; t<tree.length; t++) {\n\t\tif(classes.length > 0) {\n\t\t\t$tw.utils.addClassToParseTreeNode(tree[t],classes.join(\" \"));\n\t\t}\n\t\tif(styles.length > 0) {\n\t\t\t$tw.utils.addAttributeToParseTreeNode(tree[t],\"style\",styles.join(\"\"));\n\t\t}\n\t}\n\treturn tree;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/styleblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/styleinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/styleinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for assigning styles and classes to inline runs. For example:\n\n```\n@@.myClass This is some text with a class@@\n@@background-color:red;This is some text with a background colour@@\n@@width:100px;.myClass This is some text with a class and a width@@\n```\n\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"styleinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /@@((?:[^\\.\\r\\n\\s:]+:[^\\r\\n;]+;)+)?(\\.(?:[^\\r\\n\\s]+)\\s+)?/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /@@/g;\n\t// Get the styles and class\n\tvar stylesString = this.match[1],\n\t\tclassString = this.match[2] ? this.match[2].split(\".\").join(\" \") : undefined;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse the run up to the terminator\n\tvar tree = this.parser.parseInlineRun(reEnd,{eatTerminator: true});\n\t// Return the classed span\n\tvar node = {\n\t\ttype: \"element\",\n\t\ttag: \"span\",\n\t\tattributes: {\n\t\t\t\"class\": {type: \"string\", value: \"tc-inline-style\"}\n\t\t},\n\t\tchildren: tree\n\t};\n\tif(classString) {\n\t\t$tw.utils.addClassToParseTreeNode(node,classString);\n\t}\n\tif(stylesString) {\n\t\t$tw.utils.addAttributeToParseTreeNode(node,\"style\",stylesString);\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/styleinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/syslink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/syslink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for system tiddler links.\nCan be suppressed preceding them with `~`.\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"syslink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~?\\$:\\/[a-zA-Z0-9/.\\-_]+/mg;\n};\n\nexports.parse = function() {\n\tvar match = this.match[0];\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Create the link unless it is suppressed\n\tif(match.substr(0,1) === \"~\") {\n\t\treturn [{type: \"text\", text: match.substr(1)}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"link\",\n\t\t\tattributes: {\n\t\t\t\tto: {type: \"string\", value: match}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: match\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/syslink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/table.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/table.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for tables.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"table\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\|(?:[^\\n]*)\\|(?:[fhck]?)\\r?(?:\\n|$)/mg;\n};\n\nvar processRow = function(prevColumns) {\n\tvar cellRegExp = /(?:\\|([^\\n\\|]*)\\|)|(\\|[fhck]?\\r?(?:\\n|$))/mg,\n\t\tcellTermRegExp = /((?:\\x20*)\\|)/mg,\n\t\ttree = [],\n\t\tcol = 0,\n\t\tcolSpanCount = 1,\n\t\tprevCell,\n\t\tvAlign;\n\t// Match a single cell\n\tcellRegExp.lastIndex = this.parser.pos;\n\tvar cellMatch = cellRegExp.exec(this.parser.source);\n\twhile(cellMatch && cellMatch.index === this.parser.pos) {\n\t\tif(cellMatch[1] === \"~\") {\n\t\t\t// Rowspan\n\t\t\tvar last = prevColumns[col];\n\t\t\tif(last) {\n\t\t\t\tlast.rowSpanCount++;\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"rowspan\",last.rowSpanCount);\n\t\t\t\tvAlign = $tw.utils.getAttributeValueFromParseTreeNode(last.element,\"valign\",\"center\");\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"valign\",vAlign);\n\t\t\t\tif(colSpanCount > 1) {\n\t\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"colspan\",colSpanCount);\n\t\t\t\t\tcolSpanCount = 1;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[1] === \">\") {\n\t\t\t// Colspan\n\t\t\tcolSpanCount++;\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[1] === \"<\" && prevCell) {\n\t\t\tcolSpanCount = 1 + $tw.utils.getAttributeValueFromParseTreeNode(prevCell,\"colspan\",1);\n\t\t\t$tw.utils.addAttributeToParseTreeNode(prevCell,\"colspan\",colSpanCount);\n\t\t\tcolSpanCount = 1;\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[2]) {\n\t\t\t// End of row\n\t\t\tif(prevCell && colSpanCount > 1) {\n\t\t\t\tif(prevCell.attributes && prevCell.attributes && prevCell.attributes.colspan) {\n\t\t\t\t\t\tcolSpanCount += prevCell.attributes.colspan.value;\n\t\t\t\t} else {\n\t\t\t\t\tcolSpanCount -= 1;\n\t\t\t\t}\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(prevCell,\"colspan\",colSpanCount);\n\t\t\t}\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t\tbreak;\n\t\t} else {\n\t\t\t// For ordinary cells, step beyond the opening `|`\n\t\t\tthis.parser.pos++;\n\t\t\t// Look for a space at the start of the cell\n\t\t\tvar spaceLeft = false;\n\t\t\tvAlign = null;\n\t\t\tif(this.parser.source.substr(this.parser.pos).search(/^\\^([^\\^]|\\^\\^)/) === 0) {\n\t\t\t\tvAlign = \"top\";\n\t\t\t} else if(this.parser.source.substr(this.parser.pos).search(/^,([^,]|,,)/) === 0) {\n\t\t\t\tvAlign = \"bottom\";\n\t\t\t}\n\t\t\tif(vAlign) {\n\t\t\t\tthis.parser.pos++;\n\t\t\t}\n\t\t\tvar chr = this.parser.source.substr(this.parser.pos,1);\n\t\t\twhile(chr === \" \") {\n\t\t\t\tspaceLeft = true;\n\t\t\t\tthis.parser.pos++;\n\t\t\t\tchr = this.parser.source.substr(this.parser.pos,1);\n\t\t\t}\n\t\t\t// Check whether this is a heading cell\n\t\t\tvar cell;\n\t\t\tif(chr === \"!\") {\n\t\t\t\tthis.parser.pos++;\n\t\t\t\tcell = {type: \"element\", tag: \"th\", children: []};\n\t\t\t} else {\n\t\t\t\tcell = {type: \"element\", tag: \"td\", children: []};\n\t\t\t}\n\t\t\ttree.push(cell);\n\t\t\t// Record information about this cell\n\t\t\tprevCell = cell;\n\t\t\tprevColumns[col] = {rowSpanCount:1,element:cell};\n\t\t\t// Check for a colspan\n\t\t\tif(colSpanCount > 1) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"colspan\",colSpanCount);\n\t\t\t\tcolSpanCount = 1;\n\t\t\t}\n\t\t\t// Parse the cell\n\t\t\tcell.children = this.parser.parseInlineRun(cellTermRegExp,{eatTerminator: true});\n\t\t\t// Set the alignment for the cell\n\t\t\tif(vAlign) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"valign\",vAlign);\n\t\t\t}\n\t\t\tif(this.parser.source.substr(this.parser.pos - 2,1) === \" \") { // spaceRight\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"align\",spaceLeft ? \"center\" : \"left\");\n\t\t\t} else if(spaceLeft) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"align\",\"right\");\n\t\t\t}\n\t\t\t// Move back to the closing `|`\n\t\t\tthis.parser.pos--;\n\t\t}\n\t\tcol++;\n\t\tcellRegExp.lastIndex = this.parser.pos;\n\t\tcellMatch = cellRegExp.exec(this.parser.source);\n\t}\n\treturn tree;\n};\n\nexports.parse = function() {\n\tvar rowContainerTypes = {\"c\":\"caption\", \"h\":\"thead\", \"\":\"tbody\", \"f\":\"tfoot\"},\n\t\ttable = {type: \"element\", tag: \"table\", children: []},\n\t\trowRegExp = /^\\|([^\\n]*)\\|([fhck]?)\\r?(?:\\n|$)/mg,\n\t\trowTermRegExp = /(\\|(?:[fhck]?)\\r?(?:\\n|$))/mg,\n\t\tprevColumns = [],\n\t\tcurrRowType,\n\t\trowContainer,\n\t\trowCount = 0;\n\t// Match the row\n\trowRegExp.lastIndex = this.parser.pos;\n\tvar rowMatch = rowRegExp.exec(this.parser.source);\n\twhile(rowMatch && rowMatch.index === this.parser.pos) {\n\t\tvar rowType = rowMatch[2];\n\t\t// Check if it is a class assignment\n\t\tif(rowType === \"k\") {\n\t\t\t$tw.utils.addClassToParseTreeNode(table,rowMatch[1]);\n\t\t\tthis.parser.pos = rowMatch.index + rowMatch[0].length;\n\t\t} else {\n\t\t\t// Otherwise, create a new row if this one is of a different type\n\t\t\tif(rowType !== currRowType) {\n\t\t\t\trowContainer = {type: \"element\", tag: rowContainerTypes[rowType], children: []};\n\t\t\t\ttable.children.push(rowContainer);\n\t\t\t\tcurrRowType = rowType;\n\t\t\t}\n\t\t\t// Is this a caption row?\n\t\t\tif(currRowType === \"c\") {\n\t\t\t\t// If so, move past the opening `|` of the row\n\t\t\t\tthis.parser.pos++;\n\t\t\t\t// Move the caption to the first row if it isn't already\n\t\t\t\tif(table.children.length !== 1) {\n\t\t\t\t\ttable.children.pop(); // Take rowContainer out of the children array\n\t\t\t\t\ttable.children.splice(0,0,rowContainer); // Insert it at the bottom\t\t\t\t\t\t\n\t\t\t\t}\n\t\t\t\t// Set the alignment - TODO: figure out why TW did this\n//\t\t\t\trowContainer.attributes.align = rowCount === 0 ? \"top\" : \"bottom\";\n\t\t\t\t// Parse the caption\n\t\t\t\trowContainer.children = this.parser.parseInlineRun(rowTermRegExp,{eatTerminator: true});\n\t\t\t} else {\n\t\t\t\t// Create the row\n\t\t\t\tvar theRow = {type: \"element\", tag: \"tr\", children: []};\n\t\t\t\t$tw.utils.addClassToParseTreeNode(theRow,rowCount%2 ? \"oddRow\" : \"evenRow\");\n\t\t\t\trowContainer.children.push(theRow);\n\t\t\t\t// Process the row\n\t\t\t\ttheRow.children = processRow.call(this,prevColumns);\n\t\t\t\tthis.parser.pos = rowMatch.index + rowMatch[0].length;\n\t\t\t\t// Increment the row count\n\t\t\t\trowCount++;\n\t\t\t}\n\t\t}\n\t\trowMatch = rowRegExp.exec(this.parser.source);\n\t}\n\treturn [table];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/table.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/transcludeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/transcludeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for block-level transclusion. For example:\n\n```\n{{MyTiddler}}\n{{MyTiddler||TemplateTitle}}\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"transcludeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{([^\\{\\}\\|]*)(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}(?:\\r?\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar template = $tw.utils.trim(this.match[2]),\n\t\ttextRef = $tw.utils.trim(this.match[1]);\n\t// Prepare the transclude widget\n\tvar transcludeNode = {\n\t\t\ttype: \"transclude\",\n\t\t\tattributes: {},\n\t\t\tisBlock: true\n\t\t};\n\t// Prepare the tiddler widget\n\tvar tr, targetTitle, targetField, targetIndex, tiddlerNode;\n\tif(textRef) {\n\t\ttr = $tw.utils.parseTextReference(textRef);\n\t\ttargetTitle = tr.title;\n\t\ttargetField = tr.field;\n\t\ttargetIndex = tr.index;\n\t\ttiddlerNode = {\n\t\t\ttype: \"tiddler\",\n\t\t\tattributes: {\n\t\t\t\ttiddler: {type: \"string\", value: targetTitle}\n\t\t\t},\n\t\t\tisBlock: true,\n\t\t\tchildren: [transcludeNode]\n\t\t};\n\t}\n\tif(template) {\n\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: template};\n\t\tif(textRef) {\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t} else {\n\t\tif(textRef) {\n\t\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: targetTitle};\n\t\t\tif(targetField) {\n\t\t\t\ttranscludeNode.attributes.field = {type: \"string\", value: targetField};\n\t\t\t}\n\t\t\tif(targetIndex) {\n\t\t\t\ttranscludeNode.attributes.index = {type: \"string\", value: targetIndex};\n\t\t\t}\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/transcludeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/transcludeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/transcludeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for inline-level transclusion. For example:\n\n```\n{{MyTiddler}}\n{{MyTiddler||TemplateTitle}}\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"transcludeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{([^\\{\\}\\|]*)(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar template = $tw.utils.trim(this.match[2]),\n\t\ttextRef = $tw.utils.trim(this.match[1]);\n\t// Prepare the transclude widget\n\tvar transcludeNode = {\n\t\t\ttype: \"transclude\",\n\t\t\tattributes: {}\n\t\t};\n\t// Prepare the tiddler widget\n\tvar tr, targetTitle, targetField, targetIndex, tiddlerNode;\n\tif(textRef) {\n\t\ttr = $tw.utils.parseTextReference(textRef);\n\t\ttargetTitle = tr.title;\n\t\ttargetField = tr.field;\n\t\ttargetIndex = tr.index;\n\t\ttiddlerNode = {\n\t\t\ttype: \"tiddler\",\n\t\t\tattributes: {\n\t\t\t\ttiddler: {type: \"string\", value: targetTitle}\n\t\t\t},\n\t\t\tchildren: [transcludeNode]\n\t\t};\n\t}\n\tif(template) {\n\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: template};\n\t\tif(textRef) {\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t} else {\n\t\tif(textRef) {\n\t\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: targetTitle};\n\t\t\tif(targetField) {\n\t\t\t\ttranscludeNode.attributes.field = {type: \"string\", value: targetField};\n\t\t\t}\n\t\t\tif(targetIndex) {\n\t\t\t\ttranscludeNode.attributes.index = {type: \"string\", value: targetIndex};\n\t\t\t}\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/transcludeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/typedblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/typedblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for typed blocks. For example:\n\n```\n$$$.js\nThis will be rendered as JavaScript\n$$$\n\n$$$.svg\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"150\" height=\"100\">\n <circle cx=\"100\" cy=\"50\" r=\"40\" stroke=\"black\" stroke-width=\"2\" fill=\"red\" />\n</svg>\n$$$\n\n$$$text/vnd.tiddlywiki>text/html\nThis will be rendered as an //HTML representation// of WikiText\n$$$\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.name = \"typedblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\$\\$\\$([^ >\\r\\n]*)(?: *> *([^ \\r\\n]+))?\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /\\r?\\n\\$\\$\\$\\r?(?:\\n|$)/mg;\n\t// Save the type\n\tvar parseType = this.match[1],\n\t\trenderType = this.match[2];\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Look for the end of the block\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the block\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\t// Parse the block according to the specified type\n\tvar parser = this.parser.wiki.parseText(parseType,text,{defaultType: \"text/plain\"});\n\t// If there's no render type, just return the parse tree\n\tif(!renderType) {\n\t\treturn parser.tree;\n\t} else {\n\t\t// Otherwise, render to the rendertype and return in a <PRE> tag\n\t\tvar widgetNode = this.parser.wiki.makeWidget(parser),\n\t\t\tcontainer = $tw.fakeDocument.createElement(\"div\");\n\t\twidgetNode.render(container,null);\n\t\ttext = renderType === \"text/html\" ? container.innerHTML : container.textContent;\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"pre\",\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: text\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/typedblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/wikilink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/wikilink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for wiki links. For example:\n\n```\nAWikiLink\nAnotherLink\n~SuppressedLink\n```\n\nPrecede a camel case word with `~` to prevent it from being recognised as a link.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"wikilink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = new RegExp($tw.config.textPrimitives.unWikiLink + \"?\" + $tw.config.textPrimitives.wikiLink,\"mg\");\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get the details of the match\n\tvar linkText = this.match[0];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// If the link starts with the unwikilink character then just output it as plain text\n\tif(linkText.substr(0,1) === $tw.config.textPrimitives.unWikiLink) {\n\t\treturn [{type: \"text\", text: linkText.substr(1)}];\n\t}\n\t// If the link has been preceded with a blocked letter then don't treat it as a link\n\tif(this.match.index > 0) {\n\t\tvar preRegExp = new RegExp($tw.config.textPrimitives.blockPrefixLetters,\"mg\");\n\t\tpreRegExp.lastIndex = this.match.index-1;\n\t\tvar preMatch = preRegExp.exec(this.parser.source);\n\t\tif(preMatch && preMatch.index === this.match.index-1) {\n\t\t\treturn [{type: \"text\", text: linkText}];\n\t\t}\n\t}\n\treturn [{\n\t\ttype: \"link\",\n\t\tattributes: {\n\t\t\tto: {type: \"string\", value: linkText}\n\t\t},\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\ttext: linkText\n\t\t}]\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/wikilink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/wikiparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/wikiparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe wiki text parser processes blocks of source text into a parse tree.\n\nThe parse tree is made up of nested arrays of these JavaScript objects:\n\n\t{type: \"element\", tag: <string>, attributes: {}, children: []} - an HTML element\n\t{type: \"text\", text: <string>} - a text node\n\t{type: \"entity\", value: <string>} - an entity\n\t{type: \"raw\", html: <string>} - raw HTML\n\nAttributes are stored as hashmaps of the following objects:\n\n\t{type: \"string\", value: <string>} - literal string\n\t{type: \"indirect\", textReference: <textReference>} - indirect through a text reference\n\t{type: \"macro\", macro: <TBD>} - indirect through a macro invocation\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar WikiParser = function(type,text,options) {\n\tthis.wiki = options.wiki;\n\tvar self = this;\n\t// Check for an externally linked tiddler\n\tif($tw.browser && (text || \"\") === \"\" && options._canonical_uri) {\n\t\tthis.loadRemoteTiddler(options._canonical_uri);\n\t\ttext = $tw.language.getRawString(\"LazyLoadingWarning\");\n\t}\n\t// Initialise the classes if we don't have them already\n\tif(!this.pragmaRuleClasses) {\n\t\tWikiParser.prototype.pragmaRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"pragma\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.pragmaRuleClasses,\"$:/config/WikiParserRules/Pragmas/\");\n\t}\n\tif(!this.blockRuleClasses) {\n\t\tWikiParser.prototype.blockRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"block\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.blockRuleClasses,\"$:/config/WikiParserRules/Block/\");\n\t}\n\tif(!this.inlineRuleClasses) {\n\t\tWikiParser.prototype.inlineRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"inline\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.inlineRuleClasses,\"$:/config/WikiParserRules/Inline/\");\n\t}\n\t// Save the parse text\n\tthis.type = type || \"text/vnd.tiddlywiki\";\n\tthis.source = text || \"\";\n\tthis.sourceLength = this.source.length;\n\t// Set current parse position\n\tthis.pos = 0;\n\t// Instantiate the pragma parse rules\n\tthis.pragmaRules = this.instantiateRules(this.pragmaRuleClasses,\"pragma\",0);\n\t// Instantiate the parser block and inline rules\n\tthis.blockRules = this.instantiateRules(this.blockRuleClasses,\"block\",0);\n\tthis.inlineRules = this.instantiateRules(this.inlineRuleClasses,\"inline\",0);\n\t// Parse any pragmas\n\tthis.tree = [];\n\tvar topBranch = this.parsePragmas();\n\t// Parse the text into inline runs or blocks\n\tif(options.parseAsInline) {\n\t\ttopBranch.push.apply(topBranch,this.parseInlineRun());\n\t} else {\n\t\ttopBranch.push.apply(topBranch,this.parseBlocks());\n\t}\n\t// Return the parse tree\n};\n\n/*\n*/\nWikiParser.prototype.loadRemoteTiddler = function(url) {\n\tvar self = this;\n\t$tw.utils.httpRequest({\n\t\turl: url,\n\t\ttype: \"GET\",\n\t\tcallback: function(err,data) {\n\t\t\tif(!err) {\n\t\t\t\tvar tiddlers = self.wiki.deserializeTiddlers(\".tid\",data,self.wiki.getCreationFields());\n\t\t\t\t$tw.utils.each(tiddlers,function(tiddler) {\n\t\t\t\t\ttiddler[\"_canonical_uri\"] = url;\n\t\t\t\t});\n\t\t\t\tif(tiddlers) {\n\t\t\t\t\tself.wiki.addTiddlers(tiddlers);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\n*/\nWikiParser.prototype.setupRules = function(proto,configPrefix) {\n\tvar self = this;\n\tif(!$tw.safemode) {\n\t\t$tw.utils.each(proto,function(object,name) {\n\t\t\tif(self.wiki.getTiddlerText(configPrefix + name,\"enable\") !== \"enable\") {\n\t\t\t\tdelete proto[name];\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nInstantiate an array of parse rules\n*/\nWikiParser.prototype.instantiateRules = function(classes,type,startPos) {\n\tvar rulesInfo = [],\n\t\tself = this;\n\t$tw.utils.each(classes,function(RuleClass) {\n\t\t// Instantiate the rule\n\t\tvar rule = new RuleClass(self);\n\t\trule.is = {};\n\t\trule.is[type] = true;\n\t\trule.init(self);\n\t\tvar matchIndex = rule.findNextMatch(startPos);\n\t\tif(matchIndex !== undefined) {\n\t\t\trulesInfo.push({\n\t\t\t\trule: rule,\n\t\t\t\tmatchIndex: matchIndex\n\t\t\t});\n\t\t}\n\t});\n\treturn rulesInfo;\n};\n\n/*\nSkip any whitespace at the current position. Options are:\n\ttreatNewlinesAsNonWhitespace: true if newlines are NOT to be treated as whitespace\n*/\nWikiParser.prototype.skipWhitespace = function(options) {\n\toptions = options || {};\n\tvar whitespaceRegExp = options.treatNewlinesAsNonWhitespace ? /([^\\S\\n]+)/mg : /(\\s+)/mg;\n\twhitespaceRegExp.lastIndex = this.pos;\n\tvar whitespaceMatch = whitespaceRegExp.exec(this.source);\n\tif(whitespaceMatch && whitespaceMatch.index === this.pos) {\n\t\tthis.pos = whitespaceRegExp.lastIndex;\n\t}\n};\n\n/*\nGet the next match out of an array of parse rule instances\n*/\nWikiParser.prototype.findNextMatch = function(rules,startPos) {\n\t// Find the best matching rule by finding the closest match position\n\tvar matchingRule,\n\t\tmatchingRulePos = this.sourceLength;\n\t// Step through each rule\n\tfor(var t=0; t<rules.length; t++) {\n\t\tvar ruleInfo = rules[t];\n\t\t// Ask the rule to get the next match if we've moved past the current one\n\t\tif(ruleInfo.matchIndex !== undefined && ruleInfo.matchIndex < startPos) {\n\t\t\truleInfo.matchIndex = ruleInfo.rule.findNextMatch(startPos);\n\t\t}\n\t\t// Adopt this match if it's closer than the current best match\n\t\tif(ruleInfo.matchIndex !== undefined && ruleInfo.matchIndex <= matchingRulePos) {\n\t\t\tmatchingRule = ruleInfo;\n\t\t\tmatchingRulePos = ruleInfo.matchIndex;\n\t\t}\n\t}\n\treturn matchingRule;\n};\n\n/*\nParse any pragmas at the beginning of a block of parse text\n*/\nWikiParser.prototype.parsePragmas = function() {\n\tvar currentTreeBranch = this.tree;\n\twhile(true) {\n\t\t// Skip whitespace\n\t\tthis.skipWhitespace();\n\t\t// Check for the end of the text\n\t\tif(this.pos >= this.sourceLength) {\n\t\t\tbreak;\n\t\t}\n\t\t// Check if we've arrived at a pragma rule match\n\t\tvar nextMatch = this.findNextMatch(this.pragmaRules,this.pos);\n\t\t// If not, just exit\n\t\tif(!nextMatch || nextMatch.matchIndex !== this.pos) {\n\t\t\tbreak;\n\t\t}\n\t\t// Process the pragma rule\n\t\tvar subTree = nextMatch.rule.parse();\n\t\tif(subTree.length > 0) {\n\t\t\t// Quick hack; we only cope with a single parse tree node being returned, which is true at the moment\n\t\t\tcurrentTreeBranch.push.apply(currentTreeBranch,subTree);\n\t\t\tsubTree[0].children = [];\n\t\t\tcurrentTreeBranch = subTree[0].children;\n\t\t}\n\t}\n\treturn currentTreeBranch;\n};\n\n/*\nParse a block from the current position\n\tterminatorRegExpString: optional regular expression string that identifies the end of plain paragraphs. Must not include capturing parenthesis\n*/\nWikiParser.prototype.parseBlock = function(terminatorRegExpString) {\n\tvar terminatorRegExp = terminatorRegExpString ? new RegExp(\"(\" + terminatorRegExpString + \"|\\\\r?\\\\n\\\\r?\\\\n)\",\"mg\") : /(\\r?\\n\\r?\\n)/mg;\n\tthis.skipWhitespace();\n\tif(this.pos >= this.sourceLength) {\n\t\treturn [];\n\t}\n\t// Look for a block rule that applies at the current position\n\tvar nextMatch = this.findNextMatch(this.blockRules,this.pos);\n\tif(nextMatch && nextMatch.matchIndex === this.pos) {\n\t\treturn nextMatch.rule.parse();\n\t}\n\t// Treat it as a paragraph if we didn't find a block rule\n\treturn [{type: \"element\", tag: \"p\", children: this.parseInlineRun(terminatorRegExp)}];\n};\n\n/*\nParse a series of blocks of text until a terminating regexp is encountered or the end of the text\n\tterminatorRegExpString: terminating regular expression\n*/\nWikiParser.prototype.parseBlocks = function(terminatorRegExpString) {\n\tif(terminatorRegExpString) {\n\t\treturn this.parseBlocksTerminated(terminatorRegExpString);\n\t} else {\n\t\treturn this.parseBlocksUnterminated();\n\t}\n};\n\n/*\nParse a block from the current position to the end of the text\n*/\nWikiParser.prototype.parseBlocksUnterminated = function() {\n\tvar tree = [];\n\twhile(this.pos < this.sourceLength) {\n\t\ttree.push.apply(tree,this.parseBlock());\n\t}\n\treturn tree;\n};\n\n/*\nParse blocks of text until a terminating regexp is encountered\n*/\nWikiParser.prototype.parseBlocksTerminated = function(terminatorRegExpString) {\n\tvar terminatorRegExp = new RegExp(\"(\" + terminatorRegExpString + \")\",\"mg\"),\n\t\ttree = [];\n\t// Skip any whitespace\n\tthis.skipWhitespace();\n\t// Check if we've got the end marker\n\tterminatorRegExp.lastIndex = this.pos;\n\tvar match = terminatorRegExp.exec(this.source);\n\t// Parse the text into blocks\n\twhile(this.pos < this.sourceLength && !(match && match.index === this.pos)) {\n\t\tvar blocks = this.parseBlock(terminatorRegExpString);\n\t\ttree.push.apply(tree,blocks);\n\t\t// Skip any whitespace\n\t\tthis.skipWhitespace();\n\t\t// Check if we've got the end marker\n\t\tterminatorRegExp.lastIndex = this.pos;\n\t\tmatch = terminatorRegExp.exec(this.source);\n\t}\n\tif(match && match.index === this.pos) {\n\t\tthis.pos = match.index + match[0].length;\n\t}\n\treturn tree;\n};\n\n/*\nParse a run of text at the current position\n\tterminatorRegExp: a regexp at which to stop the run\n\toptions: see below\nOptions available:\n\teatTerminator: move the parse position past any encountered terminator (default false)\n*/\nWikiParser.prototype.parseInlineRun = function(terminatorRegExp,options) {\n\tif(terminatorRegExp) {\n\t\treturn this.parseInlineRunTerminated(terminatorRegExp,options);\n\t} else {\n\t\treturn this.parseInlineRunUnterminated(options);\n\t}\n};\n\nWikiParser.prototype.parseInlineRunUnterminated = function(options) {\n\tvar tree = [];\n\t// Find the next occurrence of an inline rule\n\tvar nextMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t// Loop around the matches until we've reached the end of the text\n\twhile(this.pos < this.sourceLength && nextMatch) {\n\t\t// Process the text preceding the run rule\n\t\tif(nextMatch.matchIndex > this.pos) {\n\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,nextMatch.matchIndex)});\n\t\t\tthis.pos = nextMatch.matchIndex;\n\t\t}\n\t\t// Process the run rule\n\t\ttree.push.apply(tree,nextMatch.rule.parse());\n\t\t// Look for the next run rule\n\t\tnextMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t}\n\t// Process the remaining text\n\tif(this.pos < this.sourceLength) {\n\t\ttree.push({type: \"text\", text: this.source.substr(this.pos)});\n\t}\n\tthis.pos = this.sourceLength;\n\treturn tree;\n};\n\nWikiParser.prototype.parseInlineRunTerminated = function(terminatorRegExp,options) {\n\toptions = options || {};\n\tvar tree = [];\n\t// Find the next occurrence of the terminator\n\tterminatorRegExp.lastIndex = this.pos;\n\tvar terminatorMatch = terminatorRegExp.exec(this.source);\n\t// Find the next occurrence of a inlinerule\n\tvar inlineRuleMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t// Loop around until we've reached the end of the text\n\twhile(this.pos < this.sourceLength && (terminatorMatch || inlineRuleMatch)) {\n\t\t// Return if we've found the terminator, and it precedes any inline rule match\n\t\tif(terminatorMatch) {\n\t\t\tif(!inlineRuleMatch || inlineRuleMatch.matchIndex >= terminatorMatch.index) {\n\t\t\t\tif(terminatorMatch.index > this.pos) {\n\t\t\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,terminatorMatch.index)});\n\t\t\t\t}\n\t\t\t\tthis.pos = terminatorMatch.index;\n\t\t\t\tif(options.eatTerminator) {\n\t\t\t\t\tthis.pos += terminatorMatch[0].length;\n\t\t\t\t}\n\t\t\t\treturn tree;\n\t\t\t}\n\t\t}\n\t\t// Process any inline rule, along with the text preceding it\n\t\tif(inlineRuleMatch) {\n\t\t\t// Preceding text\n\t\t\tif(inlineRuleMatch.matchIndex > this.pos) {\n\t\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,inlineRuleMatch.matchIndex)});\n\t\t\t\tthis.pos = inlineRuleMatch.matchIndex;\n\t\t\t}\n\t\t\t// Process the inline rule\n\t\t\ttree.push.apply(tree,inlineRuleMatch.rule.parse());\n\t\t\t// Look for the next inline rule\n\t\t\tinlineRuleMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t\t\t// Look for the next terminator match\n\t\t\tterminatorRegExp.lastIndex = this.pos;\n\t\t\tterminatorMatch = terminatorRegExp.exec(this.source);\n\t\t}\n\t}\n\t// Process the remaining text\n\tif(this.pos < this.sourceLength) {\n\t\ttree.push({type: \"text\", text: this.source.substr(this.pos)});\n\t}\n\tthis.pos = this.sourceLength;\n\treturn tree;\n};\n\n/*\nParse zero or more class specifiers `.classname`\n*/\nWikiParser.prototype.parseClasses = function() {\n\tvar classRegExp = /\\.([^\\s\\.]+)/mg,\n\t\tclassNames = [];\n\tclassRegExp.lastIndex = this.pos;\n\tvar match = classRegExp.exec(this.source);\n\twhile(match && match.index === this.pos) {\n\t\tthis.pos = match.index + match[0].length;\n\t\tclassNames.push(match[1]);\n\t\tmatch = classRegExp.exec(this.source);\n\t}\n\treturn classNames;\n};\n\n/*\nAmend the rules used by this instance of the parser\n\ttype: `only` keeps just the named rules, `except` keeps all but the named rules\n\tnames: array of rule names\n*/\nWikiParser.prototype.amendRules = function(type,names) {\n\tnames = names || [];\n\t// Define the filter function\n\tvar keepFilter;\n\tif(type === \"only\") {\n\t\tkeepFilter = function(name) {\n\t\t\treturn names.indexOf(name) !== -1;\n\t\t};\n\t} else if(type === \"except\") {\n\t\tkeepFilter = function(name) {\n\t\t\treturn names.indexOf(name) === -1;\n\t\t};\n\t} else {\n\t\treturn;\n\t}\n\t// Define a function to process each of our rule arrays\n\tvar processRuleArray = function(ruleArray) {\n\t\tfor(var t=ruleArray.length-1; t>=0; t--) {\n\t\t\tif(!keepFilter(ruleArray[t].rule.name)) {\n\t\t\t\truleArray.splice(t,1);\n\t\t\t}\n\t\t}\n\t};\n\t// Process each rule array\n\tprocessRuleArray(this.pragmaRules);\n\tprocessRuleArray(this.blockRules);\n\tprocessRuleArray(this.inlineRules);\n};\n\nexports[\"text/vnd.tiddlywiki\"] = WikiParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/wikiparser/wikiparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/wikiparser/rules/wikirulebase.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/wikirulebase.js\ntype: application/javascript\nmodule-type: global\n\nBase class for wiki parser rules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nThis constructor is always overridden with a blank constructor, and so shouldn't be used\n*/\nvar WikiRuleBase = function() {\n};\n\n/*\nTo be overridden by individual rules\n*/\nWikiRuleBase.prototype.init = function(parser) {\n\tthis.parser = parser;\n};\n\n/*\nDefault implementation of findNextMatch uses RegExp matching\n*/\nWikiRuleBase.prototype.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\treturn this.match ? this.match.index : undefined;\n};\n\nexports.WikiRuleBase = WikiRuleBase;\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/wikirulebase.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/pluginswitcher.js": {
"text": "/*\\\ntitle: $:/core/modules/pluginswitcher.js\ntype: application/javascript\nmodule-type: global\n\nManages switching plugins for themes and languages.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\noptions:\nwiki: wiki store to be used\npluginType: type of plugin to be switched\ncontrollerTitle: title of tiddler used to control switching of this resource\ndefaultPlugins: array of default plugins to be used if nominated plugin isn't found\n*/\nfunction PluginSwitcher(options) {\n\tthis.wiki = options.wiki;\n\tthis.pluginType = options.pluginType;\n\tthis.controllerTitle = options.controllerTitle;\n\tthis.defaultPlugins = options.defaultPlugins || [];\n\t// Switch to the current plugin\n\tthis.switchPlugins();\n\t// Listen for changes to the selected plugin\n\tvar self = this;\n\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,self.controllerTitle)) {\n\t\t\tself.switchPlugins();\n\t\t}\n\t});\n}\n\nPluginSwitcher.prototype.switchPlugins = function() {\n\t// Get the name of the current theme\n\tvar selectedPluginTitle = this.wiki.getTiddlerText(this.controllerTitle);\n\t// If it doesn't exist, then fallback to one of the default themes\n\tvar index = 0;\n\twhile(!this.wiki.getTiddler(selectedPluginTitle) && index < this.defaultPlugins.length) {\n\t\tselectedPluginTitle = this.defaultPlugins[index++];\n\t}\n\t// Accumulate the titles of the plugins that we need to load\n\tvar plugins = [],\n\t\tself = this,\n\t\taccumulatePlugin = function(title) {\n\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\tif(tiddler && tiddler.isPlugin() && plugins.indexOf(title) === -1) {\n\t\t\t\tplugins.push(title);\n\t\t\t\tvar pluginInfo = JSON.parse(self.wiki.getTiddlerText(title)),\n\t\t\t\t\tdependents = $tw.utils.parseStringArray(tiddler.fields.dependents || \"\");\n\t\t\t\t$tw.utils.each(dependents,function(title) {\n\t\t\t\t\taccumulatePlugin(title);\n\t\t\t\t});\n\t\t\t}\n\t\t};\n\taccumulatePlugin(selectedPluginTitle);\n\t// Unregister any existing theme tiddlers\n\tvar unregisteredTiddlers = $tw.wiki.unregisterPluginTiddlers(this.pluginType);\n\t// Register any new theme tiddlers\n\tvar registeredTiddlers = $tw.wiki.registerPluginTiddlers(this.pluginType,plugins);\n\t// Unpack the current theme tiddlers\n\t$tw.wiki.unpackPluginTiddlers();\n};\n\nexports.PluginSwitcher = PluginSwitcher;\n\n})();\n",
"title": "$:/core/modules/pluginswitcher.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/saver-handler.js": {
"text": "/*\\\ntitle: $:/core/modules/saver-handler.js\ntype: application/javascript\nmodule-type: global\n\nThe saver handler tracks changes to the store and handles saving the entire wiki via saver modules.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInstantiate the saver handler with the following options:\nwiki: wiki to be synced\ndirtyTracking: true if dirty tracking should be performed\n*/\nfunction SaverHandler(options) {\n\tvar self = this;\n\tthis.wiki = options.wiki;\n\tthis.dirtyTracking = options.dirtyTracking;\n\tthis.pendingAutoSave = false;\n\t// Make a logger\n\tthis.logger = new $tw.utils.Logger(\"saver-handler\");\n\t// Initialise our savers\n\tif($tw.browser) {\n\t\tthis.initSavers();\n\t}\n\t// Only do dirty tracking if required\n\tif($tw.browser && this.dirtyTracking) {\n\t\t// Compile the dirty tiddler filter\n\t\tthis.filterFn = this.wiki.compileFilter(this.wiki.getTiddlerText(this.titleSyncFilter));\n\t\t// Count of changes that have not yet been saved\n\t\tthis.numChanges = 0;\n\t\t// Listen out for changes to tiddlers\n\t\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\t\t// Filter the changes so that we only count changes to tiddlers that we care about\n\t\t\tvar filteredChanges = self.filterFn.call(self.wiki,function(callback) {\n\t\t\t\t$tw.utils.each(changes,function(change,title) {\n\t\t\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\t\t\tcallback(tiddler,title);\n\t\t\t\t});\n\t\t\t});\n\t\t\t// Adjust the number of changes\n\t\t\tself.numChanges += filteredChanges.length;\n\t\t\tself.updateDirtyStatus();\n\t\t\t// Do any autosave if one is pending and there's no more change events\n\t\t\tif(self.pendingAutoSave && self.wiki.getSizeOfTiddlerEventQueue() === 0) {\n\t\t\t\t// Check if we're dirty\n\t\t\t\tif(self.numChanges > 0) {\n\t\t\t\t\tself.saveWiki({\n\t\t\t\t\t\tmethod: \"autosave\",\n\t\t\t\t\t\tdownloadType: \"text/plain\"\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tself.pendingAutoSave = false;\n\t\t\t}\n\t\t});\n\t\t// Listen for the autosave event\n\t\t$tw.rootWidget.addEventListener(\"tm-auto-save-wiki\",function(event) {\n\t\t\t// Do the autosave unless there are outstanding tiddler change events\n\t\t\tif(self.wiki.getSizeOfTiddlerEventQueue() === 0) {\n\t\t\t\t// Check if we're dirty\n\t\t\t\tif(self.numChanges > 0) {\n\t\t\t\t\tself.saveWiki({\n\t\t\t\t\t\tmethod: \"autosave\",\n\t\t\t\t\t\tdownloadType: \"text/plain\"\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Otherwise put ourselves in the \"pending autosave\" state and wait for the change event before we do the autosave\n\t\t\t\tself.pendingAutoSave = true;\n\t\t\t}\n\t\t});\n\t\t// Set up our beforeunload handler\n\t\t$tw.addUnloadTask(function(event) {\n\t\t\tvar confirmationMessage;\n\t\t\tif(self.isDirty()) {\n\t\t\t\tconfirmationMessage = $tw.language.getString(\"UnsavedChangesWarning\");\n\t\t\t\tevent.returnValue = confirmationMessage; // Gecko\n\t\t\t}\n\t\t\treturn confirmationMessage;\n\t\t});\n\t}\n\t// Install the save action handlers\n\tif($tw.browser) {\n\t\t$tw.rootWidget.addEventListener(\"tm-save-wiki\",function(event) {\n\t\t\tself.saveWiki({\n\t\t\t\ttemplate: event.param,\n\t\t\t\tdownloadType: \"text/plain\",\n\t\t\t\tvariables: event.paramObject\n\t\t\t});\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-download-file\",function(event) {\n\t\t\tself.saveWiki({\n\t\t\t\tmethod: \"download\",\n\t\t\t\ttemplate: event.param,\n\t\t\t\tdownloadType: \"text/plain\",\n\t\t\t\tvariables: event.paramObject\n\t\t\t});\n\t\t});\n\t}\n}\n\nSaverHandler.prototype.titleSyncFilter = \"$:/config/SaverFilter\";\nSaverHandler.prototype.titleAutoSave = \"$:/config/AutoSave\";\nSaverHandler.prototype.titleSavedNotification = \"$:/language/Notifications/Save/Done\";\n\n/*\nSelect the appropriate saver modules and set them up\n*/\nSaverHandler.prototype.initSavers = function(moduleType) {\n\tmoduleType = moduleType || \"saver\";\n\t// Instantiate the available savers\n\tthis.savers = [];\n\tvar self = this;\n\t$tw.modules.forEachModuleOfType(moduleType,function(title,module) {\n\t\tif(module.canSave(self)) {\n\t\t\tself.savers.push(module.create(self.wiki));\n\t\t}\n\t});\n\t// Sort the savers into priority order\n\tthis.savers.sort(function(a,b) {\n\t\tif(a.info.priority < b.info.priority) {\n\t\t\treturn -1;\n\t\t} else {\n\t\t\tif(a.info.priority > b.info.priority) {\n\t\t\t\treturn +1;\n\t\t\t} else {\n\t\t\t\treturn 0;\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nSave the wiki contents. Options are:\n\tmethod: \"save\", \"autosave\" or \"download\"\n\ttemplate: the tiddler containing the template to save\n\tdownloadType: the content type for the saved file\n*/\nSaverHandler.prototype.saveWiki = function(options) {\n\toptions = options || {};\n\tvar self = this,\n\t\tmethod = options.method || \"save\",\n\t\tvariables = options.variables || {},\n\t\ttemplate = options.template || \"$:/core/save/all\",\n\t\tdownloadType = options.downloadType || \"text/plain\",\n\t\ttext = this.wiki.renderTiddler(downloadType,template,options),\n\t\tcallback = function(err) {\n\t\t\tif(err) {\n\t\t\t\talert($tw.language.getString(\"Error/WhileSaving\") + \":\\n\\n\" + err);\n\t\t\t} else {\n\t\t\t\t// Clear the task queue if we're saving (rather than downloading)\n\t\t\t\tif(method !== \"download\") {\n\t\t\t\t\tself.numChanges = 0;\n\t\t\t\t\tself.updateDirtyStatus();\n\t\t\t\t}\n\t\t\t\t$tw.notifier.display(self.titleSavedNotification);\n\t\t\t\tif(options.callback) {\n\t\t\t\t\toptions.callback();\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\t// Ignore autosave if disabled\n\tif(method === \"autosave\" && this.wiki.getTiddlerText(this.titleAutoSave,\"yes\") !== \"yes\") {\n\t\treturn false;\n\t}\n\t// Call the highest priority saver that supports this method\n\tfor(var t=this.savers.length-1; t>=0; t--) {\n\t\tvar saver = this.savers[t];\n\t\tif(saver.info.capabilities.indexOf(method) !== -1 && saver.save(text,method,callback,{variables: {filename: variables.filename}})) {\n\t\t\tthis.logger.log(\"Saving wiki with method\",method,\"through saver\",saver.info.name);\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n};\n\n/*\nChecks whether the wiki is dirty (ie the window shouldn't be closed)\n*/\nSaverHandler.prototype.isDirty = function() {\n\treturn this.numChanges > 0;\n};\n\n/*\nUpdate the document body with the class \"tc-dirty\" if the wiki has unsaved/unsynced changes\n*/\nSaverHandler.prototype.updateDirtyStatus = function() {\n\tif($tw.browser) {\n\t\t$tw.utils.toggleClass(document.body,\"tc-dirty\",this.isDirty());\n\t}\n};\n\nexports.SaverHandler = SaverHandler;\n\n})();\n",
"title": "$:/core/modules/saver-handler.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/savers/andtidwiki.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/andtidwiki.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the AndTidWiki Android app\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar AndTidWiki = function(wiki) {\n};\n\nAndTidWiki.prototype.save = function(text,method,callback) {\n\t// Get the pathname of this document\n\tvar pathname = decodeURIComponent(document.location.toString().split(\"#\")[0]);\n\t// Strip the file://\n\tif(pathname.indexOf(\"file://\") === 0) {\n\t\tpathname = pathname.substr(7);\n\t}\n\t// Strip any query or location part\n\tvar p = pathname.indexOf(\"?\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\tp = pathname.indexOf(\"#\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\t// Save the file\n\twindow.twi.saveFile(pathname,text);\n\t// Call the callback\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nAndTidWiki.prototype.info = {\n\tname: \"andtidwiki\",\n\tpriority: 1600,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn !!window.twi && !!window.twi.saveFile;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new AndTidWiki(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/andtidwiki.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/download.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/download.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via HTML5's download APIs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar DownloadSaver = function(wiki) {\n};\n\nDownloadSaver.prototype.save = function(text,method,callback,options) {\n\toptions = options || {};\n\t// Get the current filename\n\tvar filename = options.variables.filename;\n\tif(!filename) {\n\t\tvar p = document.location.pathname.lastIndexOf(\"/\");\n\t\tif(p !== -1) {\n\t\t\tfilename = document.location.pathname.substr(p+1);\n\t\t}\n\t}\n\tif(!filename) {\n\t\tfilename = \"tiddlywiki.html\";\n\t}\n\t// Set up the link\n\tvar link = document.createElement(\"a\");\n\tlink.setAttribute(\"target\",\"_blank\");\n\tlink.setAttribute(\"rel\",\"noopener noreferrer\");\n\tif(Blob !== undefined) {\n\t\tvar blob = new Blob([text], {type: \"text/html\"});\n\t\tlink.setAttribute(\"href\", URL.createObjectURL(blob));\n\t} else {\n\t\tlink.setAttribute(\"href\",\"data:text/html,\" + encodeURIComponent(text));\n\t}\n\tlink.setAttribute(\"download\",filename);\n\tdocument.body.appendChild(link);\n\tlink.click();\n\tdocument.body.removeChild(link);\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nDownloadSaver.prototype.info = {\n\tname: \"download\",\n\tpriority: 100,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn document.createElement(\"a\").download !== undefined;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new DownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/download.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/fsosaver.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/fsosaver.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via MS FileSystemObject ActiveXObject\n\nNote: Since TiddlyWiki's markup contains the MOTW, the FileSystemObject normally won't be available. \nHowever, if the wiki is loaded as an .HTA file (Windows HTML Applications) then the FSO can be used.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar FSOSaver = function(wiki) {\n};\n\nFSOSaver.prototype.save = function(text,method,callback) {\n\t// Get the pathname of this document\n\tvar pathname = unescape(document.location.pathname);\n\t// Test for a Windows path of the form /x:\\blah...\n\tif(/^\\/[A-Z]\\:\\\\[^\\\\]+/i.test(pathname)) {\t// ie: ^/[a-z]:/[^/]+\n\t\t// Remove the leading slash\n\t\tpathname = pathname.substr(1);\n\t} else if(document.location.hostname !== \"\" && /^\\/\\\\[^\\\\]+\\\\[^\\\\]+/i.test(pathname)) {\t// test for \\\\server\\share\\blah... - ^/[^/]+/[^/]+\n\t\t// Remove the leading slash\n\t\tpathname = pathname.substr(1);\n\t\t// reconstruct UNC path\n\t\tpathname = \"\\\\\\\\\" + document.location.hostname + pathname;\n\t} else {\n\t\treturn false;\n\t}\n\t// Save the file (as UTF-16)\n\tvar fso = new ActiveXObject(\"Scripting.FileSystemObject\");\n\tvar file = fso.OpenTextFile(pathname,2,-1,-1);\n\tfile.Write(text);\n\tfile.Close();\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nFSOSaver.prototype.info = {\n\tname: \"FSOSaver\",\n\tpriority: 120,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\ttry {\n\t\treturn (window.location.protocol === \"file:\") && !!(new ActiveXObject(\"Scripting.FileSystemObject\"));\n\t} catch(e) { return false; }\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new FSOSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/fsosaver.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/manualdownload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/manualdownload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via HTML5's download APIs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Title of the tiddler containing the download message\nvar downloadInstructionsTitle = \"$:/language/Modals/Download\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar ManualDownloadSaver = function(wiki) {\n};\n\nManualDownloadSaver.prototype.save = function(text,method,callback) {\n\t$tw.modal.display(downloadInstructionsTitle,{\n\t\tdownloadLink: \"data:text/html,\" + encodeURIComponent(text)\n\t});\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nManualDownloadSaver.prototype.info = {\n\tname: \"manualdownload\",\n\tpriority: 0,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new ManualDownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/manualdownload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/msdownload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/msdownload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via window.navigator.msSaveBlob()\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar MsDownloadSaver = function(wiki) {\n};\n\nMsDownloadSaver.prototype.save = function(text,method,callback) {\n\t// Get the current filename\n\tvar filename = \"tiddlywiki.html\",\n\t\tp = document.location.pathname.lastIndexOf(\"/\");\n\tif(p !== -1) {\n\t\tfilename = document.location.pathname.substr(p+1);\n\t}\n\t// Set up the link\n\tvar blob = new Blob([text], {type: \"text/html\"});\n\twindow.navigator.msSaveBlob(blob,filename);\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nMsDownloadSaver.prototype.info = {\n\tname: \"msdownload\",\n\tpriority: 110,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn !!window.navigator.msSaveBlob;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new MsDownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/msdownload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/put.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/put.js\ntype: application/javascript\nmodule-type: saver\n\nSaves wiki by performing a PUT request to the server\n\nWorks with any server which accepts a PUT request\nto the current URL, such as a WebDAV server.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar PutSaver = function(wiki) {\n\tthis.wiki = wiki;\n\tvar self = this;\n\t// Async server probe. Until probe finishes, save will fail fast\n\t// See also https://github.com/Jermolene/TiddlyWiki5/issues/2276\n\tvar req = new XMLHttpRequest();\n\treq.open(\"OPTIONS\",encodeURI(document.location.protocol + \"//\" + document.location.hostname + \":\" + document.location.port + document.location.pathname));\n\treq.onload = function() {\n\t\t// Check DAV header http://www.webdav.org/specs/rfc2518.html#rfc.section.9.1\n\t\tself.serverAcceptsPuts = (this.status === 200 && !!this.getResponseHeader('dav'));\n\t};\n\treq.send();\n};\n\nPutSaver.prototype.save = function(text,method,callback) {\n\tif (!this.serverAcceptsPuts) {\n\t\treturn false;\n\t}\n\tvar req = new XMLHttpRequest();\n\t// TODO: store/check ETags if supported by server, to protect against overwrites\n\t// Prompt: Do you want to save over this? Y/N\n\t// Merging would be ideal, and may be possible using future generic merge flow\n\treq.onload = function() {\n\t\tif (this.status === 200 || this.status === 201) {\n\t\t\tcallback(null); // success\n\t\t}\n\t\telse {\n\t\t\tcallback(this.responseText); // fail\n\t\t}\n\t};\n\treq.open(\"PUT\", encodeURI(window.location.href));\n\treq.setRequestHeader(\"Content-Type\", \"text/html;charset=UTF-8\");\n\treq.send(text);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nPutSaver.prototype.info = {\n\tname: \"put\",\n\tpriority: 2000,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn /^https?:/.test(location.protocol);\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new PutSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/put.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/tiddlyfox.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/tiddlyfox.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the TiddlyFox file extension\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar TiddlyFoxSaver = function(wiki) {\n};\n\nTiddlyFoxSaver.prototype.save = function(text,method,callback) {\n\tvar messageBox = document.getElementById(\"tiddlyfox-message-box\");\n\tif(messageBox) {\n\t\t// Get the pathname of this document\n\t\tvar pathname = document.location.toString().split(\"#\")[0];\n\t\t// Replace file://localhost/ with file:///\n\t\tif(pathname.indexOf(\"file://localhost/\") === 0) {\n\t\t\tpathname = \"file://\" + pathname.substr(16);\n\t\t}\n\t\t// Windows path file:///x:/blah/blah --> x:\\blah\\blah\n\t\tif(/^file\\:\\/\\/\\/[A-Z]\\:\\//i.test(pathname)) {\n\t\t\t// Remove the leading slash and convert slashes to backslashes\n\t\t\tpathname = pathname.substr(8).replace(/\\//g,\"\\\\\");\n\t\t// Firefox Windows network path file://///server/share/blah/blah --> //server/share/blah/blah\n\t\t} else if(pathname.indexOf(\"file://///\") === 0) {\n\t\t\tpathname = \"\\\\\\\\\" + unescape(pathname.substr(10)).replace(/\\//g,\"\\\\\");\n\t\t// Mac/Unix local path file:///path/path --> /path/path\n\t\t} else if(pathname.indexOf(\"file:///\") === 0) {\n\t\t\tpathname = unescape(pathname.substr(7));\n\t\t// Mac/Unix local path file:/path/path --> /path/path\n\t\t} else if(pathname.indexOf(\"file:/\") === 0) {\n\t\t\tpathname = unescape(pathname.substr(5));\n\t\t// Otherwise Windows networth path file://server/share/path/path --> \\\\server\\share\\path\\path\n\t\t} else {\n\t\t\tpathname = \"\\\\\\\\\" + unescape(pathname.substr(7)).replace(new RegExp(\"/\",\"g\"),\"\\\\\");\n\t\t}\n\t\t// Create the message element and put it in the message box\n\t\tvar message = document.createElement(\"div\");\n\t\tmessage.setAttribute(\"data-tiddlyfox-path\",decodeURIComponent(pathname));\n\t\tmessage.setAttribute(\"data-tiddlyfox-content\",text);\n\t\tmessageBox.appendChild(message);\n\t\t// Add an event handler for when the file has been saved\n\t\tmessage.addEventListener(\"tiddlyfox-have-saved-file\",function(event) {\n\t\t\tcallback(null);\n\t\t}, false);\n\t\t// Create and dispatch the custom event to the extension\n\t\tvar event = document.createEvent(\"Events\");\n\t\tevent.initEvent(\"tiddlyfox-save-file\",true,false);\n\t\tmessage.dispatchEvent(event);\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nInformation about this saver\n*/\nTiddlyFoxSaver.prototype.info = {\n\tname: \"tiddlyfox\",\n\tpriority: 1500,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn (window.location.protocol === \"file:\");\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TiddlyFoxSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/tiddlyfox.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/tiddlyie.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/tiddlyie.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via Internet Explorer BHO extenion (TiddlyIE)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar TiddlyIESaver = function(wiki) {\n};\n\nTiddlyIESaver.prototype.save = function(text,method,callback) {\n\t// Check existence of TiddlyIE BHO extension (note: only works after document is complete)\n\tif(typeof(window.TiddlyIE) != \"undefined\") {\n\t\t// Get the pathname of this document\n\t\tvar pathname = unescape(document.location.pathname);\n\t\t// Test for a Windows path of the form /x:/blah...\n\t\tif(/^\\/[A-Z]\\:\\/[^\\/]+/i.test(pathname)) {\t// ie: ^/[a-z]:/[^/]+ (is this better?: ^/[a-z]:/[^/]+(/[^/]+)*\\.[^/]+ )\n\t\t\t// Remove the leading slash\n\t\t\tpathname = pathname.substr(1);\n\t\t\t// Convert slashes to backslashes\n\t\t\tpathname = pathname.replace(/\\//g,\"\\\\\");\n\t\t} else if(document.hostname !== \"\" && /^\\/[^\\/]+\\/[^\\/]+/i.test(pathname)) {\t// test for \\\\server\\share\\blah... - ^/[^/]+/[^/]+\n\t\t\t// Convert slashes to backslashes\n\t\t\tpathname = pathname.replace(/\\//g,\"\\\\\");\n\t\t\t// reconstruct UNC path\n\t\t\tpathname = \"\\\\\\\\\" + document.location.hostname + pathname;\n\t\t} else return false;\n\t\t// Prompt the user to save the file\n\t\twindow.TiddlyIE.save(pathname, text);\n\t\t// Callback that we succeeded\n\t\tcallback(null);\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nInformation about this saver\n*/\nTiddlyIESaver.prototype.info = {\n\tname: \"tiddlyiesaver\",\n\tpriority: 1500,\n\tcapabilities: [\"save\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn (window.location.protocol === \"file:\");\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TiddlyIESaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/tiddlyie.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/twedit.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/twedit.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the TWEdit iOS app\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar TWEditSaver = function(wiki) {\n};\n\nTWEditSaver.prototype.save = function(text,method,callback) {\n\t// Bail if we're not running under TWEdit\n\tif(typeof DeviceInfo !== \"object\") {\n\t\treturn false;\n\t}\n\t// Get the pathname of this document\n\tvar pathname = decodeURIComponent(document.location.pathname);\n\t// Strip any query or location part\n\tvar p = pathname.indexOf(\"?\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\tp = pathname.indexOf(\"#\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\t// Remove the leading \"/Documents\" from path\n\tvar prefix = \"/Documents\";\n\tif(pathname.indexOf(prefix) === 0) {\n\t\tpathname = pathname.substr(prefix.length);\n\t}\n\t// Error handler\n\tvar errorHandler = function(event) {\n\t\t// Error\n\t\tcallback($tw.language.getString(\"Error/SavingToTWEdit\") + \": \" + event.target.error.code);\n\t};\n\t// Get the file system\n\twindow.requestFileSystem(LocalFileSystem.PERSISTENT,0,function(fileSystem) {\n\t\t// Now we've got the filesystem, get the fileEntry\n\t\tfileSystem.root.getFile(pathname, {create: true}, function(fileEntry) {\n\t\t\t// Now we've got the fileEntry, create the writer\n\t\t\tfileEntry.createWriter(function(writer) {\n\t\t\t\twriter.onerror = errorHandler;\n\t\t\t\twriter.onwrite = function() {\n\t\t\t\t\tcallback(null);\n\t\t\t\t};\n\t\t\t\twriter.position = 0;\n\t\t\t\twriter.write(text);\n\t\t\t},errorHandler);\n\t\t}, errorHandler);\n\t}, errorHandler);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nTWEditSaver.prototype.info = {\n\tname: \"twedit\",\n\tpriority: 1600,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TWEditSaver(wiki);\n};\n\n/////////////////////////// Hack\n// HACK: This ensures that TWEdit recognises us as a TiddlyWiki document\nif($tw.browser) {\n\twindow.version = {title: \"TiddlyWiki\"};\n}\n\n})();\n",
"title": "$:/core/modules/savers/twedit.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/upload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/upload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via upload to a server.\n\nDesigned to be compatible with BidiX's UploadPlugin at http://tiddlywiki.bidix.info/#UploadPlugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar UploadSaver = function(wiki) {\n\tthis.wiki = wiki;\n};\n\nUploadSaver.prototype.save = function(text,method,callback) {\n\t// Get the various parameters we need\n\tvar backupDir = this.wiki.getTextReference(\"$:/UploadBackupDir\") || \".\",\n\t\tusername = this.wiki.getTextReference(\"$:/UploadName\"),\n\t\tpassword = $tw.utils.getPassword(\"upload\"),\n\t\tuploadDir = this.wiki.getTextReference(\"$:/UploadDir\") || \".\",\n\t\tuploadFilename = this.wiki.getTextReference(\"$:/UploadFilename\") || \"index.html\",\n\t\turl = this.wiki.getTextReference(\"$:/UploadURL\");\n\t// Bail out if we don't have the bits we need\n\tif(!username || username.toString().trim() === \"\" || !password || password.toString().trim() === \"\") {\n\t\treturn false;\n\t}\n\t// Construct the url if not provided\n\tif(!url) {\n\t\turl = \"http://\" + username + \".tiddlyspot.com/store.cgi\";\n\t}\n\t// Assemble the header\n\tvar boundary = \"---------------------------\" + \"AaB03x\";\t\n\tvar uploadFormName = \"UploadPlugin\";\n\tvar head = [];\n\thead.push(\"--\" + boundary + \"\\r\\nContent-disposition: form-data; name=\\\"UploadPlugin\\\"\\r\\n\");\n\thead.push(\"backupDir=\" + backupDir + \";user=\" + username + \";password=\" + password + \";uploaddir=\" + uploadDir + \";;\"); \n\thead.push(\"\\r\\n\" + \"--\" + boundary);\n\thead.push(\"Content-disposition: form-data; name=\\\"userfile\\\"; filename=\\\"\" + uploadFilename + \"\\\"\");\n\thead.push(\"Content-Type: text/html;charset=UTF-8\");\n\thead.push(\"Content-Length: \" + text.length + \"\\r\\n\");\n\thead.push(\"\");\n\t// Assemble the tail and the data itself\n\tvar tail = \"\\r\\n--\" + boundary + \"--\\r\\n\",\n\t\tdata = head.join(\"\\r\\n\") + text + tail;\n\t// Do the HTTP post\n\tvar http = new XMLHttpRequest();\n\thttp.open(\"POST\",url,true,username,password);\n\thttp.setRequestHeader(\"Content-Type\",\"multipart/form-data; charset=UTF-8; boundary=\" + boundary);\n\thttp.onreadystatechange = function() {\n\t\tif(http.readyState == 4 && http.status == 200) {\n\t\t\tif(http.responseText.substr(0,4) === \"0 - \") {\n\t\t\t\tcallback(null);\n\t\t\t} else {\n\t\t\t\tcallback(http.responseText);\n\t\t\t}\n\t\t}\n\t};\n\ttry {\n\t\thttp.send(data);\n\t} catch(ex) {\n\t\treturn callback($tw.language.getString(\"Error/Caption\") + \":\" + ex);\n\t}\n\t$tw.notifier.display(\"$:/language/Notifications/Save/Starting\");\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nUploadSaver.prototype.info = {\n\tname: \"upload\",\n\tpriority: 2000,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new UploadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/upload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/browser-messaging.js": {
"text": "/*\\\ntitle: $:/core/modules/browser-messaging.js\ntype: application/javascript\nmodule-type: startup\n\nBrowser message handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"browser-messaging\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n/*\nLoad a specified url as an iframe and call the callback when it is loaded. If the url is already loaded then the existing iframe instance is used\n*/\nfunction loadIFrame(url,callback) {\n\t// Check if iframe already exists\n\tvar iframeInfo = $tw.browserMessaging.iframeInfoMap[url];\n\tif(iframeInfo) {\n\t\t// We've already got the iframe\n\t\tcallback(null,iframeInfo);\n\t} else {\n\t\t// Create the iframe and save it in the list\n\t\tvar iframe = document.createElement(\"iframe\"),\n\t\t\tiframeInfo = {\n\t\t\t\turl: url,\n\t\t\t\tstatus: \"loading\",\n\t\t\t\tdomNode: iframe\n\t\t\t};\n\t\t$tw.browserMessaging.iframeInfoMap[url] = iframeInfo;\n\t\tsaveIFrameInfoTiddler(iframeInfo);\n\t\t// Add the iframe to the DOM and hide it\n\t\tiframe.style.display = \"none\";\n\t\tdocument.body.appendChild(iframe);\n\t\t// Set up onload\n\t\tiframe.onload = function() {\n\t\t\tiframeInfo.status = \"loaded\";\n\t\t\tsaveIFrameInfoTiddler(iframeInfo);\n\t\t\tcallback(null,iframeInfo);\n\t\t};\n\t\tiframe.onerror = function() {\n\t\t\tcallback(\"Cannot load iframe\");\n\t\t};\n\t\ttry {\n\t\t\tiframe.src = url;\n\t\t} catch(ex) {\n\t\t\tcallback(ex);\n\t\t}\n\t}\n}\n\nfunction saveIFrameInfoTiddler(iframeInfo) {\n\t$tw.wiki.addTiddler(new $tw.Tiddler($tw.wiki.getCreationFields(),{\n\t\ttitle: \"$:/temp/ServerConnection/\" + iframeInfo.url,\n\t\ttext: iframeInfo.status,\n\t\ttags: [\"$:/tags/ServerConnection\"],\n\t\turl: iframeInfo.url\n\t},$tw.wiki.getModificationFields()));\n}\n\nexports.startup = function() {\n\t// Initialise the store of iframes we've created\n\t$tw.browserMessaging = {\n\t\tiframeInfoMap: {} // Hashmap by URL of {url:,status:\"loading/loaded\",domNode:}\n\t};\n\t// Listen for widget messages to control loading the plugin library\n\t$tw.rootWidget.addEventListener(\"tm-load-plugin-library\",function(event) {\n\t\tvar paramObject = event.paramObject || {},\n\t\t\turl = paramObject.url;\n\t\tif(url) {\n\t\t\tloadIFrame(url,function(err,iframeInfo) {\n\t\t\t\tif(err) {\n\t\t\t\t\talert($tw.language.getString(\"Error/LoadingPluginLibrary\") + \": \" + url);\n\t\t\t\t} else {\n\t\t\t\t\tiframeInfo.domNode.contentWindow.postMessage({\n\t\t\t\t\t\tverb: \"GET\",\n\t\t\t\t\t\turl: \"recipes/library/tiddlers.json\",\n\t\t\t\t\t\tcookies: {\n\t\t\t\t\t\t\ttype: \"save-info\",\n\t\t\t\t\t\t\tinfoTitlePrefix: paramObject.infoTitlePrefix || \"$:/temp/RemoteAssetInfo/\",\n\t\t\t\t\t\t\turl: url\n\t\t\t\t\t\t}\n\t\t\t\t\t},\"*\");\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t$tw.rootWidget.addEventListener(\"tm-load-plugin-from-library\",function(event) {\n\t\tvar paramObject = event.paramObject || {},\n\t\t\turl = paramObject.url,\n\t\t\ttitle = paramObject.title;\n\t\tif(url && title) {\n\t\t\tloadIFrame(url,function(err,iframeInfo) {\n\t\t\t\tif(err) {\n\t\t\t\t\talert($tw.language.getString(\"Error/LoadingPluginLibrary\") + \": \" + url);\n\t\t\t\t} else {\n\t\t\t\t\tiframeInfo.domNode.contentWindow.postMessage({\n\t\t\t\t\t\tverb: \"GET\",\n\t\t\t\t\t\turl: \"recipes/library/tiddlers/\" + encodeURIComponent(title) + \".json\",\n\t\t\t\t\t\tcookies: {\n\t\t\t\t\t\t\ttype: \"save-tiddler\",\n\t\t\t\t\t\t\turl: url\n\t\t\t\t\t\t}\n\t\t\t\t\t},\"*\");\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t// Listen for window messages from other windows\n\twindow.addEventListener(\"message\",function listener(event){\n\t\tconsole.log(\"browser-messaging: \",document.location.toString())\n\t\tconsole.log(\"browser-messaging: Received message from\",event.origin);\n\t\tconsole.log(\"browser-messaging: Message content\",event.data);\n\t\tswitch(event.data.verb) {\n\t\t\tcase \"GET-RESPONSE\":\n\t\t\t\tif(event.data.status.charAt(0) === \"2\") {\n\t\t\t\t\tif(event.data.cookies) {\n\t\t\t\t\t\tif(event.data.cookies.type === \"save-info\") {\n\t\t\t\t\t\t\tvar tiddlers = JSON.parse(event.data.body);\n\t\t\t\t\t\t\t$tw.utils.each(tiddlers,function(tiddler) {\n\t\t\t\t\t\t\t\t$tw.wiki.addTiddler(new $tw.Tiddler($tw.wiki.getCreationFields(),tiddler,{\n\t\t\t\t\t\t\t\t\ttitle: event.data.cookies.infoTitlePrefix + event.data.cookies.url + \"/\" + tiddler.title,\n\t\t\t\t\t\t\t\t\t\"original-title\": tiddler.title,\n\t\t\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\t\t\ttype: \"text/vnd.tiddlywiki\",\n\t\t\t\t\t\t\t\t\t\"original-type\": tiddler.type,\n\t\t\t\t\t\t\t\t\t\"plugin-type\": undefined,\n\t\t\t\t\t\t\t\t\t\"original-plugin-type\": tiddler[\"plugin-type\"],\n\t\t\t\t\t\t\t\t\t\"module-type\": undefined,\n\t\t\t\t\t\t\t\t\t\"original-module-type\": tiddler[\"module-type\"],\n\t\t\t\t\t\t\t\t\ttags: [\"$:/tags/RemoteAssetInfo\"],\n\t\t\t\t\t\t\t\t\t\"original-tags\": $tw.utils.stringifyList(tiddler.tags || []),\n\t\t\t\t\t\t\t\t\t\"server-url\": event.data.cookies.url\n\t\t\t\t\t\t\t\t},$tw.wiki.getModificationFields()));\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if(event.data.cookies.type === \"save-tiddler\") {\n\t\t\t\t\t\t\tvar tiddler = JSON.parse(event.data.body);\n\t\t\t\t\t\t\t$tw.wiki.addTiddler(new $tw.Tiddler(tiddler));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t},false);\n};\n\n})();\n",
"title": "$:/core/modules/browser-messaging.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/commands.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/commands.js\ntype: application/javascript\nmodule-type: startup\n\nCommand processing\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"commands\";\nexports.platforms = [\"node\"];\nexports.after = [\"story\"];\nexports.synchronous = false;\n\nexports.startup = function(callback) {\n\t// On the server, start a commander with the command line arguments\n\tvar commander = new $tw.Commander(\n\t\t$tw.boot.argv,\n\t\tfunction(err) {\n\t\t\tif(err) {\n\t\t\t\treturn $tw.utils.error(\"Error: \" + err);\n\t\t\t}\n\t\t\tcallback();\n\t\t},\n\t\t$tw.wiki,\n\t\t{output: process.stdout, error: process.stderr}\n\t);\n\tcommander.execute();\n};\n\n})();\n",
"title": "$:/core/modules/startup/commands.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/favicon.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/favicon.js\ntype: application/javascript\nmodule-type: startup\n\nFavicon handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"favicon\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\t\t\n// Favicon tiddler\nvar FAVICON_TITLE = \"$:/favicon.ico\";\n\nexports.startup = function() {\n\t// Set up the favicon\n\tsetFavicon();\n\t// Reset the favicon when the tiddler changes\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,FAVICON_TITLE)) {\n\t\t\tsetFavicon();\n\t\t}\n\t});\n};\n\nfunction setFavicon() {\n\tvar tiddler = $tw.wiki.getTiddler(FAVICON_TITLE);\n\tif(tiddler) {\n\t\tvar faviconLink = document.getElementById(\"faviconLink\");\n\t\tfaviconLink.setAttribute(\"href\",\"data:\" + tiddler.fields.type + \";base64,\" + tiddler.fields.text);\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/startup/favicon.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/info.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/info.js\ntype: application/javascript\nmodule-type: startup\n\nInitialise $:/info tiddlers via $:/temp/info-plugin pseudo-plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"info\";\nexports.before = [\"startup\"];\nexports.after = [\"load-modules\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Collect up the info tiddlers\n\tvar infoTiddlerFields = {};\n\t// Give each info module a chance to fill in as many info tiddlers as they want\n\t$tw.modules.forEachModuleOfType(\"info\",function(title,moduleExports) {\n\t\tif(moduleExports && moduleExports.getInfoTiddlerFields) {\n\t\t\tvar tiddlerFieldsArray = moduleExports.getInfoTiddlerFields(infoTiddlerFields);\n\t\t\t$tw.utils.each(tiddlerFieldsArray,function(fields) {\n\t\t\t\tif(fields) {\n\t\t\t\t\tinfoTiddlerFields[fields.title] = fields;\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t// Bake the info tiddlers into a plugin\n\tvar fields = {\n\t\ttitle: \"$:/temp/info-plugin\",\n\t\ttype: \"application/json\",\n\t\t\"plugin-type\": \"info\",\n\t\ttext: JSON.stringify({tiddlers: infoTiddlerFields},null,$tw.config.preferences.jsonSpaces)\n\t};\n\t$tw.wiki.addTiddler(new $tw.Tiddler(fields));\n\t$tw.wiki.readPluginInfo();\n\t$tw.wiki.registerPluginTiddlers(\"info\");\n\t$tw.wiki.unpackPluginTiddlers();\n};\n\n})();\n",
"title": "$:/core/modules/startup/info.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/load-modules.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/load-modules.js\ntype: application/javascript\nmodule-type: startup\n\nLoad core modules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"load-modules\";\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Load modules\n\t$tw.modules.applyMethods(\"utils\",$tw.utils);\n\tif($tw.node) {\n\t\t$tw.modules.applyMethods(\"utils-node\",$tw.utils);\n\t}\n\t$tw.modules.applyMethods(\"global\",$tw);\n\t$tw.modules.applyMethods(\"config\",$tw.config);\n\t$tw.Tiddler.fieldModules = $tw.modules.getModulesByTypeAsHashmap(\"tiddlerfield\");\n\t$tw.modules.applyMethods(\"tiddlermethod\",$tw.Tiddler.prototype);\n\t$tw.modules.applyMethods(\"wikimethod\",$tw.Wiki.prototype);\n\t$tw.modules.applyMethods(\"tiddlerdeserializer\",$tw.Wiki.tiddlerDeserializerModules);\n\t$tw.macros = $tw.modules.getModulesByTypeAsHashmap(\"macro\");\n\t$tw.wiki.initParsers();\n\t$tw.Commander.initCommands();\n};\n\n})();\n",
"title": "$:/core/modules/startup/load-modules.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/password.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/password.js\ntype: application/javascript\nmodule-type: startup\n\nPassword handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"password\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t$tw.rootWidget.addEventListener(\"tm-set-password\",function(event) {\n\t\t$tw.passwordPrompt.createPrompt({\n\t\t\tserviceName: $tw.language.getString(\"Encryption/PromptSetPassword\"),\n\t\t\tnoUserName: true,\n\t\t\tsubmitText: $tw.language.getString(\"Encryption/SetPassword\"),\n\t\t\tcanCancel: true,\n\t\t\trepeatPassword: true,\n\t\t\tcallback: function(data) {\n\t\t\t\tif(data) {\n\t\t\t\t\t$tw.crypto.setPassword(data.password);\n\t\t\t\t}\n\t\t\t\treturn true; // Get rid of the password prompt\n\t\t\t}\n\t\t});\n\t});\n\t$tw.rootWidget.addEventListener(\"tm-clear-password\",function(event) {\n\t\tif($tw.browser) {\n\t\t\tif(!confirm($tw.language.getString(\"Encryption/ConfirmClearPassword\"))) {\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t\t$tw.crypto.setPassword(null);\n\t});\n\t// Ensure that $:/isEncrypted is maintained properly\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,\"$:/isEncrypted\")) {\n\t\t\t$tw.crypto.updateCryptoStateTiddler();\n\t\t}\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/startup/password.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/render.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/render.js\ntype: application/javascript\nmodule-type: startup\n\nTitle, stylesheet and page rendering\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"render\";\nexports.platforms = [\"browser\"];\nexports.after = [\"story\"];\nexports.synchronous = true;\n\n// Default story and history lists\nvar PAGE_TITLE_TITLE = \"$:/core/wiki/title\";\nvar PAGE_STYLESHEET_TITLE = \"$:/core/ui/PageStylesheet\";\nvar PAGE_TEMPLATE_TITLE = \"$:/core/ui/PageTemplate\";\n\n// Time (in ms) that we defer refreshing changes to draft tiddlers\nvar DRAFT_TIDDLER_TIMEOUT_TITLE = \"$:/config/Drafts/TypingTimeout\";\nvar DRAFT_TIDDLER_TIMEOUT = 400;\n\nexports.startup = function() {\n\t// Set up the title\n\t$tw.titleWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_TITLE_TITLE,{document: $tw.fakeDocument, parseAsInline: true});\n\t$tw.titleContainer = $tw.fakeDocument.createElement(\"div\");\n\t$tw.titleWidgetNode.render($tw.titleContainer,null);\n\tdocument.title = $tw.titleContainer.textContent;\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.titleWidgetNode.refresh(changes,$tw.titleContainer,null)) {\n\t\t\tdocument.title = $tw.titleContainer.textContent;\n\t\t}\n\t});\n\t// Set up the styles\n\t$tw.styleWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_STYLESHEET_TITLE,{document: $tw.fakeDocument});\n\t$tw.styleContainer = $tw.fakeDocument.createElement(\"style\");\n\t$tw.styleWidgetNode.render($tw.styleContainer,null);\n\t$tw.styleElement = document.createElement(\"style\");\n\t$tw.styleElement.innerHTML = $tw.styleContainer.textContent;\n\tdocument.head.insertBefore($tw.styleElement,document.head.firstChild);\n\t$tw.wiki.addEventListener(\"change\",$tw.perf.report(\"styleRefresh\",function(changes) {\n\t\tif($tw.styleWidgetNode.refresh(changes,$tw.styleContainer,null)) {\n\t\t\t$tw.styleElement.innerHTML = $tw.styleContainer.textContent;\n\t\t}\n\t}));\n\t// Display the $:/core/ui/PageTemplate tiddler to kick off the display\n\t$tw.perf.report(\"mainRender\",function() {\n\t\t$tw.pageWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_TEMPLATE_TITLE,{document: document, parentWidget: $tw.rootWidget});\n\t\t$tw.pageContainer = document.createElement(\"div\");\n\t\t$tw.utils.addClass($tw.pageContainer,\"tc-page-container-wrapper\");\n\t\tdocument.body.insertBefore($tw.pageContainer,document.body.firstChild);\n\t\t$tw.pageWidgetNode.render($tw.pageContainer,null);\n\t})();\n\t// Prepare refresh mechanism\n\tvar deferredChanges = Object.create(null),\n\t\ttimerId;\n\tfunction refresh() {\n\t\t// Process the refresh\n\t\t$tw.pageWidgetNode.refresh(deferredChanges);\n\t\tdeferredChanges = Object.create(null);\n\t}\n\t// Add the change event handler\n\t$tw.wiki.addEventListener(\"change\",$tw.perf.report(\"mainRefresh\",function(changes) {\n\t\t// Check if only drafts have changed\n\t\tvar onlyDraftsHaveChanged = true;\n\t\tfor(var title in changes) {\n\t\t\tvar tiddler = $tw.wiki.getTiddler(title);\n\t\t\tif(!tiddler || !tiddler.hasField(\"draft.of\")) {\n\t\t\t\tonlyDraftsHaveChanged = false;\n\t\t\t}\n\t\t}\n\t\t// Defer the change if only drafts have changed\n\t\tif(timerId) {\n\t\t\tclearTimeout(timerId);\n\t\t}\n\t\ttimerId = null;\n\t\tif(onlyDraftsHaveChanged) {\n\t\t\tvar timeout = parseInt($tw.wiki.getTiddlerText(DRAFT_TIDDLER_TIMEOUT_TITLE,\"\"),10);\n\t\t\tif(isNaN(timeout)) {\n\t\t\t\ttimeout = DRAFT_TIDDLER_TIMEOUT;\n\t\t\t}\n\t\t\ttimerId = setTimeout(refresh,timeout);\n\t\t\t$tw.utils.extend(deferredChanges,changes);\n\t\t} else {\n\t\t\t$tw.utils.extend(deferredChanges,changes);\n\t\t\trefresh();\n\t\t}\n\t}));\n\t// Fix up the link between the root widget and the page container\n\t$tw.rootWidget.domNodes = [$tw.pageContainer];\n\t$tw.rootWidget.children = [$tw.pageWidgetNode];\n};\n\n})();\n",
"title": "$:/core/modules/startup/render.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/rootwidget.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/rootwidget.js\ntype: application/javascript\nmodule-type: startup\n\nSetup the root widget and the core root widget handlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"rootwidget\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.before = [\"story\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Install the modal message mechanism\n\t$tw.modal = new $tw.utils.Modal($tw.wiki);\n\t$tw.rootWidget.addEventListener(\"tm-modal\",function(event) {\n\t\t$tw.modal.display(event.param,{variables: event.paramObject});\n\t});\n\t// Install the notification mechanism\n\t$tw.notifier = new $tw.utils.Notifier($tw.wiki);\n\t$tw.rootWidget.addEventListener(\"tm-notify\",function(event) {\n\t\t$tw.notifier.display(event.param,{variables: event.paramObject});\n\t});\n\t// Install the scroller\n\t$tw.pageScroller = new $tw.utils.PageScroller();\n\t$tw.rootWidget.addEventListener(\"tm-scroll\",function(event) {\n\t\t$tw.pageScroller.handleEvent(event);\n\t});\n\tvar fullscreen = $tw.utils.getFullScreenApis();\n\tif(fullscreen) {\n\t\t$tw.rootWidget.addEventListener(\"tm-full-screen\",function(event) {\n\t\t\tif(document[fullscreen._fullscreenElement]) {\n\t\t\t\tdocument[fullscreen._exitFullscreen]();\n\t\t\t} else {\n\t\t\t\tdocument.documentElement[fullscreen._requestFullscreen](Element.ALLOW_KEYBOARD_INPUT);\n\t\t\t}\n\t\t});\n\t}\n\t// If we're being viewed on a data: URI then give instructions for how to save\n\tif(document.location.protocol === \"data:\") {\n\t\t$tw.rootWidget.dispatchEvent({\n\t\t\ttype: \"tm-modal\",\n\t\t\tparam: \"$:/language/Modals/SaveInstructions\"\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/startup/rootwidget.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup.js": {
"text": "/*\\\ntitle: $:/core/modules/startup.js\ntype: application/javascript\nmodule-type: startup\n\nMiscellaneous startup logic for both the client and server.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"startup\";\nexports.after = [\"load-modules\"];\nexports.synchronous = true;\n\n// Set to `true` to enable performance instrumentation\nvar PERFORMANCE_INSTRUMENTATION_CONFIG_TITLE = \"$:/config/Performance/Instrumentation\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.startup = function() {\n\tvar modules,n,m,f;\n\t// Minimal browser detection\n\tif($tw.browser) {\n\t\t$tw.browser.isIE = (/msie|trident/i.test(navigator.userAgent));\n\t\t$tw.browser.isFirefox = !!document.mozFullScreenEnabled;\n\t}\n\t// Platform detection\n\t$tw.platform = {};\n\tif($tw.browser) {\n\t\t$tw.platform.isMac = /Mac/.test(navigator.platform);\n\t\t$tw.platform.isWindows = /win/i.test(navigator.platform);\n\t\t$tw.platform.isLinux = /Linux/i.test(navigator.appVersion);\n\t} else {\n\t\tswitch(require(\"os\").platform()) {\n\t\t\tcase \"darwin\":\n\t\t\t\t$tw.platform.isMac = true;\n\t\t\t\tbreak;\n\t\t\tcase \"win32\":\n\t\t\t\t$tw.platform.isWindows = true;\n\t\t\t\tbreak;\n\t\t\tcase \"freebsd\":\n\t\t\t\t$tw.platform.isLinux = true;\n\t\t\t\tbreak;\n\t\t\tcase \"linux\":\n\t\t\t\t$tw.platform.isLinux = true;\n\t\t\t\tbreak;\n\t\t}\n\t}\n\t// Initialise version\n\t$tw.version = $tw.utils.extractVersionInfo();\n\t// Set up the performance framework\n\t$tw.perf = new $tw.Performance($tw.wiki.getTiddlerText(PERFORMANCE_INSTRUMENTATION_CONFIG_TITLE,\"no\") === \"yes\");\n\t// Kick off the language manager and switcher\n\t$tw.language = new $tw.Language();\n\t$tw.languageSwitcher = new $tw.PluginSwitcher({\n\t\twiki: $tw.wiki,\n\t\tpluginType: \"language\",\n\t\tcontrollerTitle: \"$:/language\",\n\t\tdefaultPlugins: [\n\t\t\t\"$:/languages/en-US\"\n\t\t]\n\t});\n\t// Kick off the theme manager\n\t$tw.themeManager = new $tw.PluginSwitcher({\n\t\twiki: $tw.wiki,\n\t\tpluginType: \"theme\",\n\t\tcontrollerTitle: \"$:/theme\",\n\t\tdefaultPlugins: [\n\t\t\t\"$:/themes/tiddlywiki/snowwhite\",\n\t\t\t\"$:/themes/tiddlywiki/vanilla\"\n\t\t]\n\t});\n\t// Kick off the keyboard manager\n\t$tw.keyboardManager = new $tw.KeyboardManager();\n\t// Clear outstanding tiddler store change events to avoid an unnecessary refresh cycle at startup\n\t$tw.wiki.clearTiddlerEventQueue();\n\t// Create a root widget for attaching event handlers. By using it as the parentWidget for another widget tree, one can reuse the event handlers\n\tif($tw.browser) {\n\t\t$tw.rootWidget = new widget.widget({\n\t\t\ttype: \"widget\",\n\t\t\tchildren: []\n\t\t},{\n\t\t\twiki: $tw.wiki,\n\t\t\tdocument: document\n\t\t});\n\t}\n\t// Find a working syncadaptor\n\t$tw.syncadaptor = undefined;\n\t$tw.modules.forEachModuleOfType(\"syncadaptor\",function(title,module) {\n\t\tif(!$tw.syncadaptor && module.adaptorClass) {\n\t\t\t$tw.syncadaptor = new module.adaptorClass({wiki: $tw.wiki});\n\t\t}\n\t});\n\t// Set up the syncer object if we've got a syncadaptor\n\tif($tw.syncadaptor) {\n\t\t$tw.syncer = new $tw.Syncer({wiki: $tw.wiki, syncadaptor: $tw.syncadaptor});\n\t} \n\t// Setup the saver handler\n\t$tw.saverHandler = new $tw.SaverHandler({wiki: $tw.wiki, dirtyTracking: !$tw.syncadaptor});\n\t// Host-specific startup\n\tif($tw.browser) {\n\t\t// Install the popup manager\n\t\t$tw.popup = new $tw.utils.Popup();\n\t\t// Install the animator\n\t\t$tw.anim = new $tw.utils.Animator();\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/startup.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/story.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/story.js\ntype: application/javascript\nmodule-type: startup\n\nLoad core modules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"story\";\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n// Default story and history lists\nvar DEFAULT_STORY_TITLE = \"$:/StoryList\";\nvar DEFAULT_HISTORY_TITLE = \"$:/HistoryList\";\n\n// Default tiddlers\nvar DEFAULT_TIDDLERS_TITLE = \"$:/DefaultTiddlers\";\n\n// Config\nvar CONFIG_UPDATE_ADDRESS_BAR = \"$:/config/Navigation/UpdateAddressBar\"; // Can be \"no\", \"permalink\", \"permaview\"\nvar CONFIG_UPDATE_HISTORY = \"$:/config/Navigation/UpdateHistory\"; // Can be \"yes\" or \"no\"\n\nexports.startup = function() {\n\t// Open startup tiddlers\n\topenStartupTiddlers();\n\tif($tw.browser) {\n\t\t// Set up location hash update\n\t\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\t\tif($tw.utils.hop(changes,DEFAULT_STORY_TITLE) || $tw.utils.hop(changes,DEFAULT_HISTORY_TITLE)) {\n\t\t\t\tupdateLocationHash({\n\t\t\t\t\tupdateAddressBar: $tw.wiki.getTiddlerText(CONFIG_UPDATE_ADDRESS_BAR,\"permaview\").trim(),\n\t\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim()\n\t\t\t\t});\n\t\t\t}\n\t\t});\n\t\t// Listen for changes to the browser location hash\n\t\twindow.addEventListener(\"hashchange\",function() {\n\t\t\tvar hash = $tw.utils.getLocationHash();\n\t\t\tif(hash !== $tw.locationHash) {\n\t\t\t\t$tw.locationHash = hash;\n\t\t\t\topenStartupTiddlers({defaultToCurrentStory: true});\n\t\t\t}\n\t\t},false);\n\t\t// Listen for the tm-browser-refresh message\n\t\t$tw.rootWidget.addEventListener(\"tm-browser-refresh\",function(event) {\n\t\t\twindow.location.reload(true);\n\t\t});\n\t\t// Listen for the tm-home message\n\t\t$tw.rootWidget.addEventListener(\"tm-home\",function(event) {\n\t\t\twindow.location.hash = \"\";\n\t\t\tvar storyFilter = $tw.wiki.getTiddlerText(DEFAULT_TIDDLERS_TITLE),\n\t\t\t\tstoryList = $tw.wiki.filterTiddlers(storyFilter);\n\t\t\t//invoke any hooks that might change the default story list\n\t\t\tstoryList = $tw.hooks.invokeHook(\"th-opening-default-tiddlers-list\",storyList);\n\t\t\t$tw.wiki.addTiddler({title: DEFAULT_STORY_TITLE, text: \"\", list: storyList},$tw.wiki.getModificationFields());\n\t\t\tif(storyList[0]) {\n\t\t\t\t$tw.wiki.addToHistory(storyList[0]);\t\t\t\t\n\t\t\t}\n\t\t});\n\t\t// Listen for the tm-permalink message\n\t\t$tw.rootWidget.addEventListener(\"tm-permalink\",function(event) {\n\t\t\tupdateLocationHash({\n\t\t\t\tupdateAddressBar: \"permalink\",\n\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim(),\n\t\t\t\ttargetTiddler: event.param || event.tiddlerTitle\n\t\t\t});\n\t\t});\n\t\t// Listen for the tm-permaview message\n\t\t$tw.rootWidget.addEventListener(\"tm-permaview\",function(event) {\n\t\t\tupdateLocationHash({\n\t\t\t\tupdateAddressBar: \"permaview\",\n\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim(),\n\t\t\t\ttargetTiddler: event.param || event.tiddlerTitle\n\t\t\t});\n\t\t});\n\t}\n};\n\n/*\nProcess the location hash to open the specified tiddlers. Options:\ndefaultToCurrentStory: If true, the current story is retained as the default, instead of opening the default tiddlers\n*/\nfunction openStartupTiddlers(options) {\n\toptions = options || {};\n\t// Work out the target tiddler and the story filter. \"null\" means \"unspecified\"\n\tvar target = null,\n\t\tstoryFilter = null;\n\tif($tw.locationHash.length > 1) {\n\t\tvar hash = $tw.locationHash.substr(1),\n\t\t\tsplit = hash.indexOf(\":\");\n\t\tif(split === -1) {\n\t\t\ttarget = decodeURIComponent(hash.trim());\n\t\t} else {\n\t\t\ttarget = decodeURIComponent(hash.substr(0,split).trim());\n\t\t\tstoryFilter = decodeURIComponent(hash.substr(split + 1).trim());\n\t\t}\n\t}\n\t// If the story wasn't specified use the current tiddlers or a blank story\n\tif(storyFilter === null) {\n\t\tif(options.defaultToCurrentStory) {\n\t\t\tvar currStoryList = $tw.wiki.getTiddlerList(DEFAULT_STORY_TITLE);\n\t\t\tstoryFilter = $tw.utils.stringifyList(currStoryList);\n\t\t} else {\n\t\t\tif(target && target !== \"\") {\n\t\t\t\tstoryFilter = \"\";\n\t\t\t} else {\n\t\t\t\tstoryFilter = $tw.wiki.getTiddlerText(DEFAULT_TIDDLERS_TITLE);\n\t\t\t}\n\t\t}\n\t}\n\t// Process the story filter to get the story list\n\tvar storyList = $tw.wiki.filterTiddlers(storyFilter);\n\t// Invoke any hooks that want to change the default story list\n\tstoryList = $tw.hooks.invokeHook(\"th-opening-default-tiddlers-list\",storyList);\n\t// If the target tiddler isn't included then splice it in at the top\n\tif(target && storyList.indexOf(target) === -1) {\n\t\tstoryList.unshift(target);\n\t}\n\t// Save the story list\n\t$tw.wiki.addTiddler({title: DEFAULT_STORY_TITLE, text: \"\", list: storyList},$tw.wiki.getModificationFields());\n\t// If a target tiddler was specified add it to the history stack\n\tif(target && target !== \"\") {\n\t\t// The target tiddler doesn't need double square brackets, but we'll silently remove them if they're present\n\t\tif(target.indexOf(\"[[\") === 0 && target.substr(-2) === \"]]\") {\n\t\t\ttarget = target.substr(2,target.length - 4);\n\t\t}\n\t\t$tw.wiki.addToHistory(target);\n\t} else if(storyList.length > 0) {\n\t\t$tw.wiki.addToHistory(storyList[0]);\n\t}\n}\n\n/*\noptions: See below\noptions.updateAddressBar: \"permalink\", \"permaview\" or \"no\" (defaults to \"permaview\")\noptions.updateHistory: \"yes\" or \"no\" (defaults to \"no\")\noptions.targetTiddler: optional title of target tiddler for permalink\n*/\nfunction updateLocationHash(options) {\n\tif(options.updateAddressBar !== \"no\") {\n\t\t// Get the story and the history stack\n\t\tvar storyList = $tw.wiki.getTiddlerList(DEFAULT_STORY_TITLE),\n\t\t\thistoryList = $tw.wiki.getTiddlerData(DEFAULT_HISTORY_TITLE,[]),\n\t\t\ttargetTiddler = \"\";\n\t\tif(options.targetTiddler) {\n\t\t\ttargetTiddler = options.targetTiddler;\n\t\t} else {\n\t\t\t// The target tiddler is the one at the top of the stack\n\t\t\tif(historyList.length > 0) {\n\t\t\t\ttargetTiddler = historyList[historyList.length-1].title;\n\t\t\t}\n\t\t\t// Blank the target tiddler if it isn't present in the story\n\t\t\tif(storyList.indexOf(targetTiddler) === -1) {\n\t\t\t\ttargetTiddler = \"\";\n\t\t\t}\n\t\t}\n\t\t// Assemble the location hash\n\t\tif(options.updateAddressBar === \"permalink\") {\n\t\t\t$tw.locationHash = \"#\" + encodeURIComponent(targetTiddler);\n\t\t} else {\n\t\t\t$tw.locationHash = \"#\" + encodeURIComponent(targetTiddler) + \":\" + encodeURIComponent($tw.utils.stringifyList(storyList));\n\t\t}\n\t\t// Only change the location hash if we must, thus avoiding unnecessary onhashchange events\n\t\tif($tw.utils.getLocationHash() !== $tw.locationHash) {\n\t\t\tif(options.updateHistory === \"yes\") {\n\t\t\t\t// Assign the location hash so that history is updated\n\t\t\t\twindow.location.hash = $tw.locationHash;\n\t\t\t} else {\n\t\t\t\t// We use replace so that browser history isn't affected\n\t\t\t\twindow.location.replace(window.location.toString().split(\"#\")[0] + $tw.locationHash);\n\t\t\t}\n\t\t}\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/startup/story.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/windows.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/windows.js\ntype: application/javascript\nmodule-type: startup\n\nSetup root widget handlers for the messages concerned with opening external browser windows\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"windows\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n// Global to keep track of open windows (hashmap by title)\nvar windows = {};\n\nexports.startup = function() {\n\t// Handle open window message\n\t$tw.rootWidget.addEventListener(\"tm-open-window\",function(event) {\n\t\t// Get the parameters\n\t\tvar refreshHandler,\n\t\t\ttitle = event.param || event.tiddlerTitle,\n\t\t\tparamObject = event.paramObject || {},\n\t\t\ttemplate = paramObject.template || \"$:/core/templates/single.tiddler.window\",\n\t\t\twidth = paramObject.width || \"700\",\n\t\t\theight = paramObject.height || \"600\",\n\t\t\tvariables = $tw.utils.extend({},paramObject,{currentTiddler: title});\n\t\t// Open the window\n\t\tvar srcWindow = window.open(\"\",\"external-\" + title,\"scrollbars,width=\" + width + \",height=\" + height),\n\t\t\tsrcDocument = srcWindow.document;\n\t\twindows[title] = srcWindow;\n\t\t// Check for reopening the same window\n\t\tif(srcWindow.haveInitialisedWindow) {\n\t\t\treturn;\n\t\t}\n\t\t// Initialise the document\n\t\tsrcDocument.write(\"<html><head></head><body class='tc-body tc-single-tiddler-window'></body></html>\");\n\t\tsrcDocument.close();\n\t\tsrcDocument.title = title;\n\t\tsrcWindow.addEventListener(\"beforeunload\",function(event) {\n\t\t\tdelete windows[title];\n\t\t\t$tw.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t},false);\n\t\t// Set up the styles\n\t\tvar styleWidgetNode = $tw.wiki.makeTranscludeWidget(\"$:/core/ui/PageStylesheet\",{document: $tw.fakeDocument, variables: variables}),\n\t\t\tstyleContainer = $tw.fakeDocument.createElement(\"style\");\n\t\tstyleWidgetNode.render(styleContainer,null);\n\t\tvar styleElement = srcDocument.createElement(\"style\");\n\t\tstyleElement.innerHTML = styleContainer.textContent;\n\t\tsrcDocument.head.insertBefore(styleElement,srcDocument.head.firstChild);\n\t\t// Render the text of the tiddler\n\t\tvar parser = $tw.wiki.parseTiddler(template),\n\t\t\twidgetNode = $tw.wiki.makeWidget(parser,{document: srcDocument, parentWidget: $tw.rootWidget, variables: variables});\n\t\twidgetNode.render(srcDocument.body,srcDocument.body.firstChild);\n\t\t// Function to handle refreshes\n\t\trefreshHandler = function(changes) {\n\t\t\tif(styleWidgetNode.refresh(changes,styleContainer,null)) {\n\t\t\t\tstyleElement.innerHTML = styleContainer.textContent;\n\t\t\t}\n\t\t\twidgetNode.refresh(changes);\n\t\t};\n\t\t$tw.wiki.addEventListener(\"change\",refreshHandler);\n\t\tsrcWindow.haveInitialisedWindow = true;\n\t});\n\t// Close open windows when unloading main window\n\t$tw.addUnloadTask(function() {\n\t\t$tw.utils.each(windows,function(win) {\n\t\t\twin.close();\n\t\t});\n\t});\n\n};\n\n})();\n",
"title": "$:/core/modules/startup/windows.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/story.js": {
"text": "/*\\\ntitle: $:/core/modules/story.js\ntype: application/javascript\nmodule-type: global\n\nLightweight object for managing interactions with the story and history lists.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nConstruct Story object with options:\nwiki: reference to wiki object to use to resolve tiddler titles\nstoryTitle: title of story list tiddler\nhistoryTitle: title of history list tiddler\n*/\nfunction Story(options) {\n\toptions = options || {};\n\tthis.wiki = options.wiki || $tw.wiki;\n\tthis.storyTitle = options.storyTitle || \"$:/StoryList\";\n\tthis.historyTitle = options.historyTitle || \"$:/HistoryList\";\n};\n\nStory.prototype.navigateTiddler = function(navigateTo,navigateFromTitle,navigateFromClientRect) {\n\tthis.addToStory(navigateTo,navigateFromTitle);\n\tthis.addToHistory(navigateTo,navigateFromClientRect);\n};\n\nStory.prototype.getStoryList = function() {\n\treturn this.wiki.getTiddlerList(this.storyTitle) || [];\n};\n\nStory.prototype.addToStory = function(navigateTo,navigateFromTitle,options) {\n\toptions = options || {};\n\tvar storyList = this.getStoryList();\n\t// See if the tiddler is already there\n\tvar slot = storyList.indexOf(navigateTo);\n\t// Quit if it already exists in the story river\n\tif(slot >= 0) {\n\t\treturn;\n\t}\n\t// First we try to find the position of the story element we navigated from\n\tvar fromIndex = storyList.indexOf(navigateFromTitle);\n\tif(fromIndex >= 0) {\n\t\t// The tiddler is added from inside the river\n\t\t// Determine where to insert the tiddler; Fallback is \"below\"\n\t\tswitch(options.openLinkFromInsideRiver) {\n\t\t\tcase \"top\":\n\t\t\t\tslot = 0;\n\t\t\t\tbreak;\n\t\t\tcase \"bottom\":\n\t\t\t\tslot = storyList.length;\n\t\t\t\tbreak;\n\t\t\tcase \"above\":\n\t\t\t\tslot = fromIndex;\n\t\t\t\tbreak;\n\t\t\tcase \"below\": // Intentional fall-through\n\t\t\tdefault:\n\t\t\t\tslot = fromIndex + 1;\n\t\t\t\tbreak;\n\t\t}\n\t} else {\n\t\t// The tiddler is opened from outside the river. Determine where to insert the tiddler; default is \"top\"\n\t\tif(options.openLinkFromOutsideRiver === \"bottom\") {\n\t\t\t// Insert at bottom\n\t\t\tslot = storyList.length;\n\t\t} else {\n\t\t\t// Insert at top\n\t\t\tslot = 0;\n\t\t}\n\t}\n\t// Add the tiddler\n\tstoryList.splice(slot,0,navigateTo);\n\t// Save the story\n\tthis.saveStoryList(storyList);\n};\n\nStory.prototype.saveStoryList = function(storyList) {\n\tvar storyTiddler = this.wiki.getTiddler(this.storyTitle);\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\tthis.wiki.getCreationFields(),\n\t\t{title: this.storyTitle},\n\t\tstoryTiddler,\n\t\t{list: storyList},\n\t\tthis.wiki.getModificationFields()\n\t));\n};\n\nStory.prototype.addToHistory = function(navigateTo,navigateFromClientRect) {\n\tvar titles = $tw.utils.isArray(navigateTo) ? navigateTo : [navigateTo];\n\t// Add a new record to the top of the history stack\n\tvar historyList = this.wiki.getTiddlerData(this.historyTitle,[]);\n\t$tw.utils.each(titles,function(title) {\n\t\thistoryList.push({title: title, fromPageRect: navigateFromClientRect});\n\t});\n\tthis.wiki.setTiddlerData(this.historyTitle,historyList,{\"current-tiddler\": titles[titles.length-1]});\n};\n\nStory.prototype.storyCloseTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyCloseAllTiddlers = function() {\n// TBD\n};\n\nStory.prototype.storyCloseOtherTiddlers = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyEditTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyDeleteTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storySaveTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyCancelTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyNewTiddler = function(targetTitle) {\n// TBD\n};\n\nexports.Story = Story;\n\n\n})();\n",
"title": "$:/core/modules/story.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/storyviews/classic.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/classic.js\ntype: application/javascript\nmodule-type: storyview\n\nViews the story as a linear sequence\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar easing = \"cubic-bezier(0.645, 0.045, 0.355, 1)\"; // From http://easings.net/#easeInOutCubic\n\nvar ClassicStoryView = function(listWidget) {\n\tthis.listWidget = listWidget;\n};\n\nClassicStoryView.prototype.navigateTo = function(historyInfo) {\n\tvar listElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Scroll the node into view\n\tthis.listWidget.dispatchEvent({type: \"tm-scroll\", target: targetElement});\n};\n\nClassicStoryView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Get the current height of the tiddler\n\tvar computedStyle = window.getComputedStyle(targetElement),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrHeight = targetElement.offsetHeight + currMarginTop;\n\t// Reset the margin once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(targetElement,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"}\n\t\t]);\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{marginBottom: (-currHeight) + \"px\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t// Transition to the final position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"opacity \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms \" + easing},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n};\n\nClassicStoryView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\twidget.removeChildDomNodes();\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Get the current height of the tiddler\n\tvar currWidth = targetElement.offsetWidth,\n\t\tcomputedStyle = window.getComputedStyle(targetElement),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrHeight = targetElement.offsetHeight + currMarginTop;\n\t// Remove the dom nodes of the widget at the end of the transition\n\tsetTimeout(removeElement,duration);\n\t// Animate the closure\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"translateX(0px)\"},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms \" + easing},\n\t\t{transform: \"translateX(-\" + currWidth + \"px)\"},\n\t\t{marginBottom: (-currHeight) + \"px\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n};\n\nexports.classic = ClassicStoryView;\n\n})();",
"title": "$:/core/modules/storyviews/classic.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/storyviews/pop.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/pop.js\ntype: application/javascript\nmodule-type: storyview\n\nAnimates list insertions and removals\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar PopStoryView = function(listWidget) {\n\tthis.listWidget = listWidget;\n};\n\nPopStoryView.prototype.navigateTo = function(historyInfo) {\n\tvar listElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Scroll the node into view\n\tthis.listWidget.dispatchEvent({type: \"tm-scroll\", target: targetElement});\n};\n\nPopStoryView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Reset once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(targetElement,[\n\t\t\t{transition: \"none\"},\n\t\t\t{transform: \"none\"}\n\t\t]);\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"scale(2)\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t// Transition to the final position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{transform: \"scale(1)\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n};\n\nPopStoryView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\tif(targetElement.parentNode) {\n\t\t\t\twidget.removeChildDomNodes();\n\t\t\t}\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Remove the element at the end of the transition\n\tsetTimeout(removeElement,duration);\n\t// Animate the closure\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"scale(1)\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{transform: \"scale(0.1)\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n};\n\nexports.pop = PopStoryView;\n\n})();\n",
"title": "$:/core/modules/storyviews/pop.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/storyviews/zoomin.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/zoomin.js\ntype: application/javascript\nmodule-type: storyview\n\nZooms between individual tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar easing = \"cubic-bezier(0.645, 0.045, 0.355, 1)\"; // From http://easings.net/#easeInOutCubic\n\nvar ZoominListView = function(listWidget) {\n\tvar self = this;\n\tthis.listWidget = listWidget;\n\t// Get the index of the tiddler that is at the top of the history\n\tvar history = this.listWidget.wiki.getTiddlerDataCached(this.listWidget.historyTitle,[]),\n\t\ttargetTiddler;\n\tif(history.length > 0) {\n\t\ttargetTiddler = history[history.length-1].title;\n\t}\n\t// Make all the tiddlers position absolute, and hide all but the top (or first) one\n\t$tw.utils.each(this.listWidget.children,function(itemWidget,index) {\n\t\tvar domNode = itemWidget.findFirstDomNode();\n\t\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\t\tif(!(domNode instanceof Element)) {\n\t\t\treturn;\n\t\t}\n\t\tif((targetTiddler && targetTiddler !== itemWidget.parseTreeNode.itemTitle) || (!targetTiddler && index)) {\n\t\t\tdomNode.style.display = \"none\";\n\t\t} else {\n\t\t\tself.currentTiddlerDomNode = domNode;\n\t\t}\n\t\t$tw.utils.addClass(domNode,\"tc-storyview-zoomin-tiddler\");\n\t});\n};\n\nZoominListView.prototype.navigateTo = function(historyInfo) {\n\tvar duration = $tw.utils.getAnimationDuration(),\n\t\tlistElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Make the new tiddler be position absolute and visible so that we can measure it\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"block\"},\n\t\t{transformOrigin: \"0 0\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{transition: \"none\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t// Get the position of the source node, or use the centre of the window as the source position\n\tvar sourceBounds = historyInfo.fromPageRect || {\n\t\t\tleft: window.innerWidth/2 - 2,\n\t\t\ttop: window.innerHeight/2 - 2,\n\t\t\twidth: window.innerWidth/8,\n\t\t\theight: window.innerHeight/8\n\t\t};\n\t// Try to find the title node in the target tiddler\n\tvar titleDomNode = findTitleDomNode(listItemWidget) || listItemWidget.findFirstDomNode(),\n\t\tzoomBounds = titleDomNode.getBoundingClientRect();\n\t// Compute the transform for the target tiddler to make the title lie over the source rectange\n\tvar targetBounds = targetElement.getBoundingClientRect(),\n\t\tscale = sourceBounds.width / zoomBounds.width,\n\t\tx = sourceBounds.left - targetBounds.left - (zoomBounds.left - targetBounds.left) * scale,\n\t\ty = sourceBounds.top - targetBounds.top - (zoomBounds.top - targetBounds.top) * scale;\n\t// Transform the target tiddler to its starting position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transform: \"translateX(\" + x + \"px) translateY(\" + y + \"px) scale(\" + scale + \")\"}\n\t]);\n\t// Force layout\n\t$tw.utils.forceLayout(targetElement);\n\t// Apply the ending transitions with a timeout to ensure that the previously applied transformations are applied first\n\tvar self = this,\n\t\tprevCurrentTiddler = this.currentTiddlerDomNode;\n\tthis.currentTiddlerDomNode = targetElement;\n\t// Transform the target tiddler to its natural size\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t{opacity: \"1.0\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{zIndex: \"500\"},\n\t]);\n\t// Transform the previous tiddler out of the way and then hide it\n\tif(prevCurrentTiddler && prevCurrentTiddler !== targetElement) {\n\t\tscale = zoomBounds.width / sourceBounds.width;\n\t\tx = zoomBounds.left - targetBounds.left - (sourceBounds.left - targetBounds.left) * scale;\n\t\ty = zoomBounds.top - targetBounds.top - (sourceBounds.top - targetBounds.top) * scale;\n\t\t$tw.utils.setStyle(prevCurrentTiddler,[\n\t\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t\t{opacity: \"0.0\"},\n\t\t\t{transformOrigin: \"0 0\"},\n\t\t\t{transform: \"translateX(\" + x + \"px) translateY(\" + y + \"px) scale(\" + scale + \")\"},\n\t\t\t{zIndex: \"0\"}\n\t\t]);\n\t\t// Hide the tiddler when the transition has finished\n\t\tsetTimeout(function() {\n\t\t\tif(self.currentTiddlerDomNode !== prevCurrentTiddler) {\n\t\t\t\tprevCurrentTiddler.style.display = \"none\";\n\t\t\t}\n\t\t},duration);\n\t}\n\t// Scroll the target into view\n//\t$tw.pageScroller.scrollIntoView(targetElement);\n};\n\n/*\nFind the first child DOM node of a widget that has the class \"tc-title\"\n*/\nfunction findTitleDomNode(widget,targetClass) {\n\ttargetClass = targetClass || \"tc-title\";\n\tvar domNode = widget.findFirstDomNode();\n\tif(domNode && domNode.querySelector) {\n\t\treturn domNode.querySelector(\".\" + targetClass);\n\t}\n\treturn null;\n}\n\nZoominListView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Make the newly inserted node position absolute and hidden\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"none\"}\n\t]);\n};\n\nZoominListView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\twidget.removeChildDomNodes();\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Abandon if hidden\n\tif(targetElement.style.display != \"block\" ) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Set up the tiddler that is being closed\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"block\"},\n\t\t{transformOrigin: \"50% 50%\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{transition: \"none\"},\n\t\t{zIndex: \"0\"}\n\t]);\n\t// We'll move back to the previous or next element in the story\n\tvar toWidget = widget.previousSibling();\n\tif(!toWidget) {\n\t\ttoWidget = widget.nextSibling();\n\t}\n\tvar toWidgetDomNode = toWidget && toWidget.findFirstDomNode();\n\t// Set up the tiddler we're moving back in\n\tif(toWidgetDomNode) {\n\t\t$tw.utils.addClass(toWidgetDomNode,\"tc-storyview-zoomin-tiddler\");\n\t\t$tw.utils.setStyle(toWidgetDomNode,[\n\t\t\t{display: \"block\"},\n\t\t\t{transformOrigin: \"50% 50%\"},\n\t\t\t{transform: \"translateX(0px) translateY(0px) scale(10)\"},\n\t\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t\t{opacity: \"0\"},\n\t\t\t{zIndex: \"500\"}\n\t\t]);\n\t\tthis.currentTiddlerDomNode = toWidgetDomNode;\n\t}\n\t// Animate them both\n\t// Force layout\n\t$tw.utils.forceLayout(this.listWidget.parentDomNode);\n\t// First, the tiddler we're closing\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transformOrigin: \"50% 50%\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(0.1)\"},\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t{opacity: \"0\"},\n\t\t{zIndex: \"0\"}\n\t]);\n\tsetTimeout(removeElement,duration);\n\t// Now the tiddler we're going back to\n\tif(toWidgetDomNode) {\n\t\t$tw.utils.setStyle(toWidgetDomNode,[\n\t\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t\t{opacity: \"1\"}\n\t\t]);\n\t}\n\treturn true; // Indicate that we'll delete the DOM node\n};\n\nexports.zoomin = ZoominListView;\n\n})();\n",
"title": "$:/core/modules/storyviews/zoomin.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/syncer.js": {
"text": "/*\\\ntitle: $:/core/modules/syncer.js\ntype: application/javascript\nmodule-type: global\n\nThe syncer tracks changes to the store. If a syncadaptor is used then individual tiddlers are synchronised through it. If there is no syncadaptor then the entire wiki is saved via saver modules.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInstantiate the syncer with the following options:\nsyncadaptor: reference to syncadaptor to be used\nwiki: wiki to be synced\n*/\nfunction Syncer(options) {\n\tvar self = this;\n\tthis.wiki = options.wiki;\n\tthis.syncadaptor = options.syncadaptor;\n\t// Make a logger\n\tthis.logger = new $tw.utils.Logger(\"syncer\" + ($tw.browser ? \"-browser\" : \"\") + ($tw.node ? \"-server\" : \"\"));\n\t// Compile the dirty tiddler filter\n\tthis.filterFn = this.wiki.compileFilter(this.wiki.getTiddlerText(this.titleSyncFilter));\n\t// Record information for known tiddlers\n\tthis.readTiddlerInfo();\n\t// Tasks are {type: \"load\"/\"save\"/\"delete\", title:, queueTime:, lastModificationTime:}\n\tthis.taskQueue = {}; // Hashmap of tasks yet to be performed\n\tthis.taskInProgress = {}; // Hash of tasks in progress\n\tthis.taskTimerId = null; // Timer for task dispatch\n\tthis.pollTimerId = null; // Timer for polling server\n\t// Listen out for changes to tiddlers\n\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\tself.syncToServer(changes);\n\t});\n\t// Browser event handlers\n\tif($tw.browser) {\n\t\t// Set up our beforeunload handler\n\t\t$tw.addUnloadTask(function(event) {\n\t\t\tvar confirmationMessage;\n\t\t\tif(self.isDirty()) {\n\t\t\t\tconfirmationMessage = $tw.language.getString(\"UnsavedChangesWarning\");\n\t\t\t\tevent.returnValue = confirmationMessage; // Gecko\n\t\t\t}\n\t\t\treturn confirmationMessage;\n\t\t});\n\t\t// Listen out for login/logout/refresh events in the browser\n\t\t$tw.rootWidget.addEventListener(\"tm-login\",function() {\n\t\t\tself.handleLoginEvent();\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-logout\",function() {\n\t\t\tself.handleLogoutEvent();\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-server-refresh\",function() {\n\t\t\tself.handleRefreshEvent();\n\t\t});\n\t}\n\t// Listen out for lazyLoad events\n\tthis.wiki.addEventListener(\"lazyLoad\",function(title) {\n\t\tself.handleLazyLoadEvent(title);\n\t});\n\t// Get the login status\n\tthis.getStatus(function(err,isLoggedIn) {\n\t\t// Do a sync from the server\n\t\tself.syncFromServer();\n\t});\n}\n\n/*\nConstants\n*/\nSyncer.prototype.titleIsLoggedIn = \"$:/status/IsLoggedIn\";\nSyncer.prototype.titleUserName = \"$:/status/UserName\";\nSyncer.prototype.titleSyncFilter = \"$:/config/SyncFilter\";\nSyncer.prototype.titleSavedNotification = \"$:/language/Notifications/Save/Done\";\nSyncer.prototype.taskTimerInterval = 1 * 1000; // Interval for sync timer\nSyncer.prototype.throttleInterval = 1 * 1000; // Defer saving tiddlers if they've changed in the last 1s...\nSyncer.prototype.fallbackInterval = 10 * 1000; // Unless the task is older than 10s\nSyncer.prototype.pollTimerInterval = 60 * 1000; // Interval for polling for changes from the adaptor\n\n\n/*\nRead (or re-read) the latest tiddler info from the store\n*/\nSyncer.prototype.readTiddlerInfo = function() {\n\t// Hashmap by title of {revision:,changeCount:,adaptorInfo:}\n\tthis.tiddlerInfo = {};\n\t// Record information for known tiddlers\n\tvar self = this,\n\t\ttiddlers = this.filterFn.call(this.wiki);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\tself.tiddlerInfo[title] = {\n\t\t\trevision: tiddler.fields.revision,\n\t\t\tadaptorInfo: self.syncadaptor && self.syncadaptor.getTiddlerInfo(tiddler),\n\t\t\tchangeCount: self.wiki.getChangeCount(title),\n\t\t\thasBeenLazyLoaded: false\n\t\t};\n\t});\n};\n\n/*\nCreate an tiddlerInfo structure if it doesn't already exist\n*/\nSyncer.prototype.createTiddlerInfo = function(title) {\n\tif(!$tw.utils.hop(this.tiddlerInfo,title)) {\n\t\tthis.tiddlerInfo[title] = {\n\t\t\trevision: null,\n\t\t\tadaptorInfo: {},\n\t\t\tchangeCount: -1,\n\t\t\thasBeenLazyLoaded: false\n\t\t};\n\t}\n};\n\n/*\nChecks whether the wiki is dirty (ie the window shouldn't be closed)\n*/\nSyncer.prototype.isDirty = function() {\n\treturn (this.numTasksInQueue() > 0) || (this.numTasksInProgress() > 0);\n};\n\n/*\nUpdate the document body with the class \"tc-dirty\" if the wiki has unsaved/unsynced changes\n*/\nSyncer.prototype.updateDirtyStatus = function() {\n\tif($tw.browser) {\n\t\t$tw.utils.toggleClass(document.body,\"tc-dirty\",this.isDirty());\n\t}\n};\n\n/*\nSave an incoming tiddler in the store, and updates the associated tiddlerInfo\n*/\nSyncer.prototype.storeTiddler = function(tiddlerFields) {\n\t// Save the tiddler\n\tvar tiddler = new $tw.Tiddler(this.wiki.getTiddler(tiddlerFields.title),tiddlerFields);\n\tthis.wiki.addTiddler(tiddler);\n\t// Save the tiddler revision and changeCount details\n\tthis.tiddlerInfo[tiddlerFields.title] = {\n\t\trevision: tiddlerFields.revision,\n\t\tadaptorInfo: this.syncadaptor.getTiddlerInfo(tiddler),\n\t\tchangeCount: this.wiki.getChangeCount(tiddlerFields.title),\n\t\thasBeenLazyLoaded: true\n\t};\n};\n\nSyncer.prototype.getStatus = function(callback) {\n\tvar self = this;\n\t// Check if the adaptor supports getStatus()\n\tif(this.syncadaptor && this.syncadaptor.getStatus) {\n\t\t// Mark us as not logged in\n\t\tthis.wiki.addTiddler({title: this.titleIsLoggedIn,text: \"no\"});\n\t\t// Get login status\n\t\tthis.syncadaptor.getStatus(function(err,isLoggedIn,username) {\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert(err);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Set the various status tiddlers\n\t\t\tself.wiki.addTiddler({title: self.titleIsLoggedIn,text: isLoggedIn ? \"yes\" : \"no\"});\n\t\t\tif(isLoggedIn) {\n\t\t\t\tself.wiki.addTiddler({title: self.titleUserName,text: username || \"\"});\n\t\t\t} else {\n\t\t\t\tself.wiki.deleteTiddler(self.titleUserName);\n\t\t\t}\n\t\t\t// Invoke the callback\n\t\t\tif(callback) {\n\t\t\t\tcallback(err,isLoggedIn,username);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tcallback(null,true,\"UNAUTHENTICATED\");\n\t}\n};\n\n/*\nSynchronise from the server by reading the skinny tiddler list and queuing up loads for any tiddlers that we don't already have up to date\n*/\nSyncer.prototype.syncFromServer = function() {\n\tif(this.syncadaptor && this.syncadaptor.getSkinnyTiddlers) {\n\t\tthis.logger.log(\"Retrieving skinny tiddler list\");\n\t\tvar self = this;\n\t\tif(this.pollTimerId) {\n\t\t\tclearTimeout(this.pollTimerId);\n\t\t\tthis.pollTimerId = null;\n\t\t}\n\t\tthis.syncadaptor.getSkinnyTiddlers(function(err,tiddlers) {\n\t\t\t// Trigger the next sync\n\t\t\tself.pollTimerId = setTimeout(function() {\n\t\t\t\tself.pollTimerId = null;\n\t\t\t\tself.syncFromServer.call(self);\n\t\t\t},self.pollTimerInterval);\n\t\t\t// Check for errors\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert($tw.language.getString(\"Error/RetrievingSkinny\") + \":\",err);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Process each incoming tiddler\n\t\t\tfor(var t=0; t<tiddlers.length; t++) {\n\t\t\t\t// Get the incoming tiddler fields, and the existing tiddler\n\t\t\t\tvar tiddlerFields = tiddlers[t],\n\t\t\t\t\tincomingRevision = tiddlerFields.revision + \"\",\n\t\t\t\t\ttiddler = self.wiki.getTiddler(tiddlerFields.title),\n\t\t\t\t\ttiddlerInfo = self.tiddlerInfo[tiddlerFields.title],\n\t\t\t\t\tcurrRevision = tiddlerInfo ? tiddlerInfo.revision : null;\n\t\t\t\t// Ignore the incoming tiddler if it's the same as the revision we've already got\n\t\t\t\tif(currRevision !== incomingRevision) {\n\t\t\t\t\t// Do a full load if we've already got a fat version of the tiddler\n\t\t\t\t\tif(tiddler && tiddler.fields.text !== undefined) {\n\t\t\t\t\t\t// Do a full load of this tiddler\n\t\t\t\t\t\tself.enqueueSyncTask({\n\t\t\t\t\t\t\ttype: \"load\",\n\t\t\t\t\t\t\ttitle: tiddlerFields.title\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Load the skinny version of the tiddler\n\t\t\t\t\t\tself.storeTiddler(tiddlerFields);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nSynchronise a set of changes to the server\n*/\nSyncer.prototype.syncToServer = function(changes) {\n\tvar self = this,\n\t\tnow = Date.now(),\n\t\tfilteredChanges = this.filterFn.call(this.wiki,function(callback) {\n\t\t\t$tw.utils.each(changes,function(change,title) {\n\t\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\t\tcallback(tiddler,title);\n\t\t\t});\n\t\t});\n\t$tw.utils.each(changes,function(change,title,object) {\n\t\t// Process the change if it is a deletion of a tiddler we're already syncing, or is on the filtered change list\n\t\tif((change.deleted && $tw.utils.hop(self.tiddlerInfo,title)) || filteredChanges.indexOf(title) !== -1) {\n\t\t\t// Queue a task to sync this tiddler\n\t\t\tself.enqueueSyncTask({\n\t\t\t\ttype: change.deleted ? \"delete\" : \"save\",\n\t\t\t\ttitle: title\n\t\t\t});\n\t\t}\n\t});\n};\n\n/*\nLazily load a skinny tiddler if we can\n*/\nSyncer.prototype.handleLazyLoadEvent = function(title) {\n\t// Don't lazy load the same tiddler twice\n\tvar info = this.tiddlerInfo[title];\n\tif(!info || !info.hasBeenLazyLoaded) {\n\t\tthis.createTiddlerInfo(title);\n\t\tthis.tiddlerInfo[title].hasBeenLazyLoaded = true;\n\t\t// Queue up a sync task to load this tiddler\n\t\tthis.enqueueSyncTask({\n\t\t\ttype: \"load\",\n\t\t\ttitle: title\n\t\t});\t\t\n\t}\n};\n\n/*\nDispay a password prompt and allow the user to login\n*/\nSyncer.prototype.handleLoginEvent = function() {\n\tvar self = this;\n\tthis.getStatus(function(err,isLoggedIn,username) {\n\t\tif(!isLoggedIn) {\n\t\t\t$tw.passwordPrompt.createPrompt({\n\t\t\t\tserviceName: $tw.language.getString(\"LoginToTiddlySpace\"),\n\t\t\t\tcallback: function(data) {\n\t\t\t\t\tself.login(data.username,data.password,function(err,isLoggedIn) {\n\t\t\t\t\t\tself.syncFromServer();\n\t\t\t\t\t});\n\t\t\t\t\treturn true; // Get rid of the password prompt\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n};\n\n/*\nAttempt to login to TiddlyWeb.\n\tusername: username\n\tpassword: password\n\tcallback: invoked with arguments (err,isLoggedIn)\n*/\nSyncer.prototype.login = function(username,password,callback) {\n\tthis.logger.log(\"Attempting to login as\",username);\n\tvar self = this;\n\tif(this.syncadaptor.login) {\n\t\tthis.syncadaptor.login(username,password,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tself.getStatus(function(err,isLoggedIn,username) {\n\t\t\t\tif(callback) {\n\t\t\t\t\tcallback(null,isLoggedIn);\n\t\t\t\t}\n\t\t\t});\n\t\t});\n\t} else {\n\t\tcallback(null,true);\n\t}\n};\n\n/*\nAttempt to log out of TiddlyWeb\n*/\nSyncer.prototype.handleLogoutEvent = function() {\n\tthis.logger.log(\"Attempting to logout\");\n\tvar self = this;\n\tif(this.syncadaptor.logout) {\n\t\tthis.syncadaptor.logout(function(err) {\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert(err);\n\t\t\t} else {\n\t\t\t\tself.getStatus();\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nImmediately refresh from the server\n*/\nSyncer.prototype.handleRefreshEvent = function() {\n\tthis.syncFromServer();\n};\n\n/*\nQueue up a sync task. If there is already a pending task for the tiddler, just update the last modification time\n*/\nSyncer.prototype.enqueueSyncTask = function(task) {\n\tvar self = this,\n\t\tnow = Date.now();\n\t// Set the timestamps on this task\n\ttask.queueTime = now;\n\ttask.lastModificationTime = now;\n\t// Fill in some tiddlerInfo if the tiddler is one we haven't seen before\n\tthis.createTiddlerInfo(task.title);\n\t// Bail if this is a save and the tiddler is already at the changeCount that the server has\n\tif(task.type === \"save\" && this.wiki.getChangeCount(task.title) <= this.tiddlerInfo[task.title].changeCount) {\n\t\treturn;\n\t}\n\t// Check if this tiddler is already in the queue\n\tif($tw.utils.hop(this.taskQueue,task.title)) {\n\t\t// this.logger.log(\"Re-queueing up sync task with type:\",task.type,\"title:\",task.title);\n\t\tvar existingTask = this.taskQueue[task.title];\n\t\t// If so, just update the last modification time\n\t\texistingTask.lastModificationTime = task.lastModificationTime;\n\t\t// If the new task is a save then we upgrade the existing task to a save. Thus a pending load is turned into a save if the tiddler changes locally in the meantime. But a pending save is not modified to become a load\n\t\tif(task.type === \"save\" || task.type === \"delete\") {\n\t\t\texistingTask.type = task.type;\n\t\t}\n\t} else {\n\t\t// this.logger.log(\"Queuing up sync task with type:\",task.type,\"title:\",task.title);\n\t\t// If it is not in the queue, insert it\n\t\tthis.taskQueue[task.title] = task;\n\t\tthis.updateDirtyStatus();\n\t}\n\t// Process the queue\n\t$tw.utils.nextTick(function() {self.processTaskQueue.call(self);});\n};\n\n/*\nReturn the number of tasks in progress\n*/\nSyncer.prototype.numTasksInProgress = function() {\n\treturn $tw.utils.count(this.taskInProgress);\n};\n\n/*\nReturn the number of tasks in the queue\n*/\nSyncer.prototype.numTasksInQueue = function() {\n\treturn $tw.utils.count(this.taskQueue);\n};\n\n/*\nTrigger a timeout if one isn't already outstanding\n*/\nSyncer.prototype.triggerTimeout = function() {\n\tvar self = this;\n\tif(!this.taskTimerId) {\n\t\tthis.taskTimerId = setTimeout(function() {\n\t\t\tself.taskTimerId = null;\n\t\t\tself.processTaskQueue.call(self);\n\t\t},self.taskTimerInterval);\n\t}\n};\n\n/*\nProcess the task queue, performing the next task if appropriate\n*/\nSyncer.prototype.processTaskQueue = function() {\n\tvar self = this;\n\t// Only process a task if the sync adaptor is fully initialised and we're not already performing a task. If we are already performing a task then we'll dispatch the next one when it completes\n\tif(this.syncadaptor.isReady() && this.numTasksInProgress() === 0) {\n\t\t// Choose the next task to perform\n\t\tvar task = this.chooseNextTask();\n\t\t// Perform the task if we had one\n\t\tif(task) {\n\t\t\t// Remove the task from the queue and add it to the in progress list\n\t\t\tdelete this.taskQueue[task.title];\n\t\t\tthis.taskInProgress[task.title] = task;\n\t\t\tthis.updateDirtyStatus();\n\t\t\t// Dispatch the task\n\t\t\tthis.dispatchTask(task,function(err) {\n\t\t\t\tif(err) {\n\t\t\t\t\tself.logger.alert(\"Sync error while processing '\" + task.title + \"':\\n\" + err);\n\t\t\t\t}\n\t\t\t\t// Mark that this task is no longer in progress\n\t\t\t\tdelete self.taskInProgress[task.title];\n\t\t\t\tself.updateDirtyStatus();\n\t\t\t\t// Process the next task\n\t\t\t\tself.processTaskQueue.call(self);\n\t\t\t});\n\t\t} else {\n\t\t\t// Make sure we've set a time if there wasn't a task to perform, but we've still got tasks in the queue\n\t\t\tif(this.numTasksInQueue() > 0) {\n\t\t\t\tthis.triggerTimeout();\n\t\t\t}\n\t\t}\n\t}\n};\n\n/*\nChoose the next applicable task\n*/\nSyncer.prototype.chooseNextTask = function() {\n\tvar self = this,\n\t\tcandidateTask = null,\n\t\tnow = Date.now();\n\t// Select the best candidate task\n\t$tw.utils.each(this.taskQueue,function(task,title) {\n\t\t// Exclude the task if there's one of the same name in progress\n\t\tif($tw.utils.hop(self.taskInProgress,title)) {\n\t\t\treturn;\n\t\t}\n\t\t// Exclude the task if it is a save and the tiddler has been modified recently, but not hit the fallback time\n\t\tif(task.type === \"save\" && (now - task.lastModificationTime) < self.throttleInterval &&\n\t\t\t(now - task.queueTime) < self.fallbackInterval) {\n\t\t\treturn;\n\t\t}\n\t\t// Exclude the task if it is newer than the current best candidate\n\t\tif(candidateTask && candidateTask.queueTime < task.queueTime) {\n\t\t\treturn;\n\t\t}\n\t\t// Now this is our best candidate\n\t\tcandidateTask = task;\n\t});\n\treturn candidateTask;\n};\n\n/*\nDispatch a task and invoke the callback\n*/\nSyncer.prototype.dispatchTask = function(task,callback) {\n\tvar self = this;\n\tif(task.type === \"save\") {\n\t\tvar changeCount = this.wiki.getChangeCount(task.title),\n\t\t\ttiddler = this.wiki.getTiddler(task.title);\n\t\tthis.logger.log(\"Dispatching 'save' task:\",task.title);\n\t\tif(tiddler) {\n\t\t\tthis.syncadaptor.saveTiddler(tiddler,function(err,adaptorInfo,revision) {\n\t\t\t\tif(err) {\n\t\t\t\t\treturn callback(err);\n\t\t\t\t}\n\t\t\t\t// Adjust the info stored about this tiddler\n\t\t\t\tself.tiddlerInfo[task.title] = {\n\t\t\t\t\tchangeCount: changeCount,\n\t\t\t\t\tadaptorInfo: adaptorInfo,\n\t\t\t\t\trevision: revision\n\t\t\t\t};\n\t\t\t\t// Invoke the callback\n\t\t\t\tcallback(null);\n\t\t\t},{\n\t\t\t\ttiddlerInfo: self.tiddlerInfo[task.title]\n\t\t\t});\n\t\t} else {\n\t\t\tthis.logger.log(\" Not Dispatching 'save' task:\",task.title,\"tiddler does not exist\");\n\t\t\treturn callback(null);\n\t\t}\n\t} else if(task.type === \"load\") {\n\t\t// Load the tiddler\n\t\tthis.logger.log(\"Dispatching 'load' task:\",task.title);\n\t\tthis.syncadaptor.loadTiddler(task.title,function(err,tiddlerFields) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\t// Store the tiddler\n\t\t\tif(tiddlerFields) {\n\t\t\t\tself.storeTiddler(tiddlerFields);\n\t\t\t}\n\t\t\t// Invoke the callback\n\t\t\tcallback(null);\n\t\t});\n\t} else if(task.type === \"delete\") {\n\t\t// Delete the tiddler\n\t\tthis.logger.log(\"Dispatching 'delete' task:\",task.title);\n\t\tthis.syncadaptor.deleteTiddler(task.title,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tdelete self.tiddlerInfo[task.title];\n\t\t\t// Invoke the callback\n\t\t\tcallback(null);\n\t\t},{\n\t\t\ttiddlerInfo: self.tiddlerInfo[task.title]\n\t\t});\n\t}\n};\n\nexports.Syncer = Syncer;\n\n})();\n",
"title": "$:/core/modules/syncer.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/tiddler.js\ntype: application/javascript\nmodule-type: tiddlermethod\n\nExtension methods for the $tw.Tiddler object (constructor and methods required at boot time are in boot/boot.js)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.hasTag = function(tag) {\n\treturn this.fields.tags && this.fields.tags.indexOf(tag) !== -1;\n};\n\nexports.isPlugin = function() {\n\treturn this.fields.type === \"application/json\" && this.hasField(\"plugin-type\");\n};\n\nexports.isDraft = function() {\n\treturn this.hasField(\"draft.of\");\n};\n\nexports.getFieldString = function(field) {\n\tvar value = this.fields[field];\n\t// Check for a missing field\n\tif(value === undefined || value === null) {\n\t\treturn \"\";\n\t}\n\t// Parse the field with the associated module (if any)\n\tvar fieldModule = $tw.Tiddler.fieldModules[field];\n\tif(fieldModule && fieldModule.stringify) {\n\t\treturn fieldModule.stringify.call(this,value);\n\t} else {\n\t\treturn value.toString();\n\t}\n};\n\n/*\nGet all the fields as a name:value block. Options:\n\texclude: an array of field names to exclude\n*/\nexports.getFieldStringBlock = function(options) {\n\toptions = options || {};\n\tvar exclude = options.exclude || [];\n\tvar fields = [];\n\tfor(var field in this.fields) {\n\t\tif($tw.utils.hop(this.fields,field)) {\n\t\t\tif(exclude.indexOf(field) === -1) {\n\t\t\t\tfields.push(field + \": \" + this.getFieldString(field));\n\t\t\t}\n\t\t}\n\t}\n\treturn fields.join(\"\\n\");\n};\n\n/*\nCompare two tiddlers for equality\ntiddler: the tiddler to compare\nexcludeFields: array of field names to exclude from the comparison\n*/\nexports.isEqual = function(tiddler,excludeFields) {\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\treturn false;\n\t}\n\texcludeFields = excludeFields || [];\n\tvar self = this,\n\t\tdifferences = []; // Fields that have differences\n\t// Add to the differences array\n\tfunction addDifference(fieldName) {\n\t\t// Check for this field being excluded\n\t\tif(excludeFields.indexOf(fieldName) === -1) {\n\t\t\t// Save the field as a difference\n\t\t\t$tw.utils.pushTop(differences,fieldName);\n\t\t}\n\t}\n\t// Returns true if the two values of this field are equal\n\tfunction isFieldValueEqual(fieldName) {\n\t\tvar valueA = self.fields[fieldName],\n\t\t\tvalueB = tiddler.fields[fieldName];\n\t\t// Check for identical string values\n\t\tif(typeof(valueA) === \"string\" && typeof(valueB) === \"string\" && valueA === valueB) {\n\t\t\treturn true;\n\t\t}\n\t\t// Check for identical array values\n\t\tif($tw.utils.isArray(valueA) && $tw.utils.isArray(valueB) && $tw.utils.isArrayEqual(valueA,valueB)) {\n\t\t\treturn true;\n\t\t}\n\t\t// Otherwise the fields must be different\n\t\treturn false;\n\t}\n\t// Compare our fields\n\tfor(var fieldName in this.fields) {\n\t\tif(!isFieldValueEqual(fieldName)) {\n\t\t\taddDifference(fieldName);\n\t\t}\n\t}\n\t// There's a difference for every field in the other tiddler that we don't have\n\tfor(fieldName in tiddler.fields) {\n\t\tif(!(fieldName in this.fields)) {\n\t\t\taddDifference(fieldName);\n\t\t}\n\t}\n\t// Return whether there were any differences\n\treturn differences.length === 0;\n};\n\n})();\n",
"title": "$:/core/modules/tiddler.js",
"type": "application/javascript",
"module-type": "tiddlermethod"
},
"$:/core/modules/upgraders/plugins.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/plugins.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that checks that plugins are newer than any already installed version\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar UPGRADE_LIBRARY_TITLE = \"$:/UpgradeLibrary\";\n\nvar BLOCKED_PLUGINS = {\n\t\"$:/themes/tiddlywiki/stickytitles\": {\n\t\tversions: [\"*\"]\n\t},\n\t\"$:/plugins/tiddlywiki/fullscreen\": {\n\t\tversions: [\"*\"]\n\t}\n};\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {},\n\t\tupgradeLibrary,\n\t\tgetLibraryTiddler = function(title) {\n\t\t\tif(!upgradeLibrary) {\n\t\t\t\tupgradeLibrary = wiki.getTiddlerData(UPGRADE_LIBRARY_TITLE,{});\n\t\t\t\tupgradeLibrary.tiddlers = upgradeLibrary.tiddlers || {};\n\t\t\t}\n\t\t\treturn upgradeLibrary.tiddlers[title];\n\t\t};\n\n\t// Go through all the incoming tiddlers\n\t$tw.utils.each(titles,function(title) {\n\t\tvar incomingTiddler = tiddlers[title];\n\t\t// Check if we're dealing with a plugin\n\t\tif(incomingTiddler && incomingTiddler[\"plugin-type\"] && incomingTiddler.version) {\n\t\t\t// Upgrade the incoming plugin if it is in the upgrade library\n\t\t\tvar libraryTiddler = getLibraryTiddler(title);\n\t\t\tif(libraryTiddler && libraryTiddler[\"plugin-type\"] && libraryTiddler.version) {\n\t\t\t\ttiddlers[title] = libraryTiddler;\n\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Upgraded\",{variables: {incoming: incomingTiddler.version, upgraded: libraryTiddler.version}});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Suppress the incoming plugin if it is older than the currently installed one\n\t\t\tvar existingTiddler = wiki.getTiddler(title);\n\t\t\tif(existingTiddler && existingTiddler.hasField(\"plugin-type\") && existingTiddler.hasField(\"version\")) {\n\t\t\t\t// Reject the incoming plugin by blanking all its fields\n\t\t\t\tif($tw.utils.checkVersions(existingTiddler.fields.version,incomingTiddler.version)) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Suppressed/Version\",{variables: {incoming: incomingTiddler.version, existing: existingTiddler.fields.version}});\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(incomingTiddler && incomingTiddler[\"plugin-type\"]) {\n\t\t\t// Check whether the plugin is on the blocked list\n\t\t\tvar blockInfo = BLOCKED_PLUGINS[title];\n\t\t\tif(blockInfo) {\n\t\t\t\tif(blockInfo.versions.indexOf(\"*\") !== -1 || (incomingTiddler.version && blockInfo.versions.indexOf(incomingTiddler.version) !== -1)) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Suppressed/Incompatible\");\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/plugins.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/upgraders/system.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/system.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that suppresses certain system tiddlers that shouldn't be imported\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar DONT_IMPORT_LIST = [\"$:/StoryList\",\"$:/HistoryList\"],\n\tDONT_IMPORT_PREFIX_LIST = [\"$:/temp/\",\"$:/state/\"];\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {};\n\t// Check for tiddlers on our list\n\t$tw.utils.each(titles,function(title) {\n\t\tif(DONT_IMPORT_LIST.indexOf(title) !== -1) {\n\t\t\ttiddlers[title] = Object.create(null);\n\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/System/Suppressed\");\n\t\t} else {\n\t\t\tfor(var t=0; t<DONT_IMPORT_PREFIX_LIST.length; t++) {\n\t\t\t\tvar prefix = DONT_IMPORT_PREFIX_LIST[t];\n\t\t\t\tif(title.substr(0,prefix.length) === prefix) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/State/Suppressed\");\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/system.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/upgraders/themetweaks.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/themetweaks.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that handles the change in theme tweak storage introduced in 5.0.14-beta.\n\nPreviously, theme tweaks were stored in two data tiddlers:\n\n* $:/themes/tiddlywiki/vanilla/metrics\n* $:/themes/tiddlywiki/vanilla/settings\n\nNow, each tweak is stored in its own separate tiddler.\n\nThis upgrader copies any values from the old format to the new. The old data tiddlers are not deleted in case they have been used to store additional indexes.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar MAPPINGS = {\n\t\"$:/themes/tiddlywiki/vanilla/metrics\": {\n\t\t\"fontsize\": \"$:/themes/tiddlywiki/vanilla/metrics/fontsize\",\n\t\t\"lineheight\": \"$:/themes/tiddlywiki/vanilla/metrics/lineheight\",\n\t\t\"storyleft\": \"$:/themes/tiddlywiki/vanilla/metrics/storyleft\",\n\t\t\"storytop\": \"$:/themes/tiddlywiki/vanilla/metrics/storytop\",\n\t\t\"storyright\": \"$:/themes/tiddlywiki/vanilla/metrics/storyright\",\n\t\t\"storywidth\": \"$:/themes/tiddlywiki/vanilla/metrics/storywidth\",\n\t\t\"tiddlerwidth\": \"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\"\n\t},\n\t\"$:/themes/tiddlywiki/vanilla/settings\": {\n\t\t\"fontfamily\": \"$:/themes/tiddlywiki/vanilla/settings/fontfamily\"\n\t}\n};\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {};\n\t// Check for tiddlers on our list\n\t$tw.utils.each(titles,function(title) {\n\t\tvar mapping = MAPPINGS[title];\n\t\tif(mapping) {\n\t\t\tvar tiddler = new $tw.Tiddler(tiddlers[title]),\n\t\t\t\ttiddlerData = wiki.getTiddlerDataCached(tiddler,{});\n\t\t\tfor(var index in mapping) {\n\t\t\t\tvar mappedTitle = mapping[index];\n\t\t\t\tif(!tiddlers[mappedTitle] || tiddlers[mappedTitle].title !== mappedTitle) {\n\t\t\t\t\ttiddlers[mappedTitle] = {\n\t\t\t\t\t\ttitle: mappedTitle,\n\t\t\t\t\t\ttext: tiddlerData[index]\n\t\t\t\t\t};\n\t\t\t\t\tmessages[mappedTitle] = $tw.language.getString(\"Import/Upgrader/ThemeTweaks/Created\",{variables: {\n\t\t\t\t\t\tfrom: title + \"##\" + index\n\t\t\t\t\t}});\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/themetweaks.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/utils/crypto.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/crypto.js\ntype: application/javascript\nmodule-type: utils\n\nUtility functions related to crypto.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nLook for an encrypted store area in the text of a TiddlyWiki file\n*/\nexports.extractEncryptedStoreArea = function(text) {\n\tvar encryptedStoreAreaStartMarker = \"<pre id=\\\"encryptedStoreArea\\\" type=\\\"text/plain\\\" style=\\\"display:none;\\\">\",\n\t\tencryptedStoreAreaStart = text.indexOf(encryptedStoreAreaStartMarker);\n\tif(encryptedStoreAreaStart !== -1) {\n\t\tvar encryptedStoreAreaEnd = text.indexOf(\"</pre>\",encryptedStoreAreaStart);\n\t\tif(encryptedStoreAreaEnd !== -1) {\n\t\t\treturn $tw.utils.htmlDecode(text.substring(encryptedStoreAreaStart + encryptedStoreAreaStartMarker.length,encryptedStoreAreaEnd-1));\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nAttempt to extract the tiddlers from an encrypted store area using the current password. If the password is not provided then the password in the password store will be used\n*/\nexports.decryptStoreArea = function(encryptedStoreArea,password) {\n\tvar decryptedText = $tw.crypto.decrypt(encryptedStoreArea,password);\n\tif(decryptedText) {\n\t\tvar json = JSON.parse(decryptedText),\n\t\t\ttiddlers = [];\n\t\tfor(var title in json) {\n\t\t\tif(title !== \"$:/isEncrypted\") {\n\t\t\t\ttiddlers.push(json[title]);\n\t\t\t}\n\t\t}\n\t\treturn tiddlers;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n\n/*\nAttempt to extract the tiddlers from an encrypted store area using the current password. If that fails, the user is prompted for a password.\nencryptedStoreArea: text of the TiddlyWiki encrypted store area\ncallback: function(tiddlers) called with the array of decrypted tiddlers\n\nThe following configuration settings are supported:\n\n$tw.config.usePasswordVault: causes any password entered by the user to also be put into the system password vault\n*/\nexports.decryptStoreAreaInteractive = function(encryptedStoreArea,callback,options) {\n\t// Try to decrypt with the current password\n\tvar tiddlers = $tw.utils.decryptStoreArea(encryptedStoreArea);\n\tif(tiddlers) {\n\t\tcallback(tiddlers);\n\t} else {\n\t\t// Prompt for a new password and keep trying\n\t\t$tw.passwordPrompt.createPrompt({\n\t\t\tserviceName: \"Enter a password to decrypt the imported TiddlyWiki\",\n\t\t\tnoUserName: true,\n\t\t\tcanCancel: true,\n\t\t\tsubmitText: \"Decrypt\",\n\t\t\tcallback: function(data) {\n\t\t\t\t// Exit if the user cancelled\n\t\t\t\tif(!data) {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t\t// Attempt to decrypt the tiddlers\n\t\t\t\tvar tiddlers = $tw.utils.decryptStoreArea(encryptedStoreArea,data.password);\n\t\t\t\tif(tiddlers) {\n\t\t\t\t\tif($tw.config.usePasswordVault) {\n\t\t\t\t\t\t$tw.crypto.setPassword(data.password);\n\t\t\t\t\t}\n\t\t\t\t\tcallback(tiddlers);\n\t\t\t\t\t// Exit and remove the password prompt\n\t\t\t\t\treturn true;\n\t\t\t\t} else {\n\t\t\t\t\t// We didn't decrypt everything, so continue to prompt for password\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/crypto.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/animations/slide.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/animations/slide.js\ntype: application/javascript\nmodule-type: animation\n\nA simple slide animation that varies the height of the element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction slideOpen(domNode,options) {\n\toptions = options || {};\n\tvar duration = options.duration || $tw.utils.getAnimationDuration();\n\t// Get the current height of the domNode\n\tvar computedStyle = window.getComputedStyle(domNode),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrPaddingBottom = parseInt(computedStyle.paddingBottom,10),\n\t\tcurrPaddingTop = parseInt(computedStyle.paddingTop,10),\n\t\tcurrHeight = domNode.offsetHeight;\n\t// Reset the margin once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(domNode,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"},\n\t\t\t{marginTop: \"\"},\n\t\t\t{paddingBottom: \"\"},\n\t\t\t{paddingTop: \"\"},\n\t\t\t{height: \"auto\"},\n\t\t\t{opacity: \"\"}\n\t\t]);\n\t\tif(options.callback) {\n\t\t\toptions.callback();\n\t\t}\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"none\"},\n\t\t{marginTop: \"0px\"},\n\t\t{marginBottom: \"0px\"},\n\t\t{paddingTop: \"0px\"},\n\t\t{paddingBottom: \"0px\"},\n\t\t{height: \"0px\"},\n\t\t{opacity: \"0\"}\n\t]);\n\t$tw.utils.forceLayout(domNode);\n\t// Transition to the final position\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"margin-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"height \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{marginTop: currMarginTop + \"px\"},\n\t\t{paddingBottom: currPaddingBottom + \"px\"},\n\t\t{paddingTop: currPaddingTop + \"px\"},\n\t\t{height: currHeight + \"px\"},\n\t\t{opacity: \"1\"}\n\t]);\n}\n\nfunction slideClosed(domNode,options) {\n\toptions = options || {};\n\tvar duration = options.duration || $tw.utils.getAnimationDuration(),\n\t\tcurrHeight = domNode.offsetHeight;\n\t// Clear the properties we've set when the animation is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(domNode,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"},\n\t\t\t{marginTop: \"\"},\n\t\t\t{paddingBottom: \"\"},\n\t\t\t{paddingTop: \"\"},\n\t\t\t{height: \"auto\"},\n\t\t\t{opacity: \"\"}\n\t\t]);\n\t\tif(options.callback) {\n\t\t\toptions.callback();\n\t\t}\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(domNode,[\n\t\t{height: currHeight + \"px\"},\n\t\t{opacity: \"1\"}\n\t]);\n\t$tw.utils.forceLayout(domNode);\n\t// Transition to the final position\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"margin-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"height \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{marginTop: \"0px\"},\n\t\t{marginBottom: \"0px\"},\n\t\t{paddingTop: \"0px\"},\n\t\t{paddingBottom: \"0px\"},\n\t\t{height: \"0px\"},\n\t\t{opacity: \"0\"}\n\t]);\n}\n\nexports.slide = {\n\topen: slideOpen,\n\tclose: slideClosed\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/animations/slide.js",
"type": "application/javascript",
"module-type": "animation"
},
"$:/core/modules/utils/dom/animator.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/animator.js\ntype: application/javascript\nmodule-type: utils\n\nOrchestrates animations and transitions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction Animator() {\n\t// Get the registered animation modules\n\tthis.animations = {};\n\t$tw.modules.applyMethods(\"animation\",this.animations);\n}\n\nAnimator.prototype.perform = function(type,domNode,options) {\n\toptions = options || {};\n\t// Find an animation that can handle this type\n\tvar chosenAnimation;\n\t$tw.utils.each(this.animations,function(animation,name) {\n\t\tif($tw.utils.hop(animation,type)) {\n\t\t\tchosenAnimation = animation[type];\n\t\t}\n\t});\n\tif(!chosenAnimation) {\n\t\tchosenAnimation = function(domNode,options) {\n\t\t\tif(options.callback) {\n\t\t\t\toptions.callback();\n\t\t\t}\n\t\t};\n\t}\n\t// Call the animation\n\tchosenAnimation(domNode,options);\n};\n\nexports.Animator = Animator;\n\n})();\n",
"title": "$:/core/modules/utils/dom/animator.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/browser.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/browser.js\ntype: application/javascript\nmodule-type: utils\n\nBrowser feature detection\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSet style properties of an element\n\telement: dom node\n\tstyles: ordered array of {name: value} pairs\n*/\nexports.setStyle = function(element,styles) {\n\tif(element.nodeType === 1) { // Element.ELEMENT_NODE\n\t\tfor(var t=0; t<styles.length; t++) {\n\t\t\tfor(var styleName in styles[t]) {\n\t\t\t\telement.style[$tw.utils.convertStyleNameToPropertyName(styleName)] = styles[t][styleName];\n\t\t\t}\n\t\t}\n\t}\n};\n\n/*\nConverts a standard CSS property name into the local browser-specific equivalent. For example:\n\t\"background-color\" --> \"backgroundColor\"\n\t\"transition\" --> \"webkitTransition\"\n*/\n\nvar styleNameCache = {}; // We'll cache the style name conversions\n\nexports.convertStyleNameToPropertyName = function(styleName) {\n\t// Return from the cache if we can\n\tif(styleNameCache[styleName]) {\n\t\treturn styleNameCache[styleName];\n\t}\n\t// Convert it by first removing any hyphens\n\tvar propertyName = $tw.utils.unHyphenateCss(styleName);\n\t// Then check if it needs a prefix\n\tif($tw.browser && document.body.style[propertyName] === undefined) {\n\t\tvar prefixes = [\"O\",\"MS\",\"Moz\",\"webkit\"];\n\t\tfor(var t=0; t<prefixes.length; t++) {\n\t\t\tvar prefixedName = prefixes[t] + propertyName.substr(0,1).toUpperCase() + propertyName.substr(1);\n\t\t\tif(document.body.style[prefixedName] !== undefined) {\n\t\t\t\tpropertyName = prefixedName;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\t// Put it in the cache too\n\tstyleNameCache[styleName] = propertyName;\n\treturn propertyName;\n};\n\n/*\nConverts a JS format CSS property name back into the dashed form used in CSS declarations. For example:\n\t\"backgroundColor\" --> \"background-color\"\n\t\"webkitTransform\" --> \"-webkit-transform\"\n*/\nexports.convertPropertyNameToStyleName = function(propertyName) {\n\t// Rehyphenate the name\n\tvar styleName = $tw.utils.hyphenateCss(propertyName);\n\t// If there's a webkit prefix, add a dash (other browsers have uppercase prefixes, and so get the dash automatically)\n\tif(styleName.indexOf(\"webkit\") === 0) {\n\t\tstyleName = \"-\" + styleName;\n\t} else if(styleName.indexOf(\"-m-s\") === 0) {\n\t\tstyleName = \"-ms\" + styleName.substr(4);\n\t}\n\treturn styleName;\n};\n\n/*\nRound trip a stylename to a property name and back again. For example:\n\t\"transform\" --> \"webkitTransform\" --> \"-webkit-transform\"\n*/\nexports.roundTripPropertyName = function(propertyName) {\n\treturn $tw.utils.convertPropertyNameToStyleName($tw.utils.convertStyleNameToPropertyName(propertyName));\n};\n\n/*\nConverts a standard event name into the local browser specific equivalent. For example:\n\t\"animationEnd\" --> \"webkitAnimationEnd\"\n*/\n\nvar eventNameCache = {}; // We'll cache the conversions\n\nvar eventNameMappings = {\n\t\"transitionEnd\": {\n\t\tcorrespondingCssProperty: \"transition\",\n\t\tmappings: {\n\t\t\ttransition: \"transitionend\",\n\t\t\tOTransition: \"oTransitionEnd\",\n\t\t\tMSTransition: \"msTransitionEnd\",\n\t\t\tMozTransition: \"transitionend\",\n\t\t\twebkitTransition: \"webkitTransitionEnd\"\n\t\t}\n\t},\n\t\"animationEnd\": {\n\t\tcorrespondingCssProperty: \"animation\",\n\t\tmappings: {\n\t\t\tanimation: \"animationend\",\n\t\t\tOAnimation: \"oAnimationEnd\",\n\t\t\tMSAnimation: \"msAnimationEnd\",\n\t\t\tMozAnimation: \"animationend\",\n\t\t\twebkitAnimation: \"webkitAnimationEnd\"\n\t\t}\n\t}\n};\n\nexports.convertEventName = function(eventName) {\n\tif(eventNameCache[eventName]) {\n\t\treturn eventNameCache[eventName];\n\t}\n\tvar newEventName = eventName,\n\t\tmappings = eventNameMappings[eventName];\n\tif(mappings) {\n\t\tvar convertedProperty = $tw.utils.convertStyleNameToPropertyName(mappings.correspondingCssProperty);\n\t\tif(mappings.mappings[convertedProperty]) {\n\t\t\tnewEventName = mappings.mappings[convertedProperty];\n\t\t}\n\t}\n\t// Put it in the cache too\n\teventNameCache[eventName] = newEventName;\n\treturn newEventName;\n};\n\n/*\nReturn the names of the fullscreen APIs\n*/\nexports.getFullScreenApis = function() {\n\tvar d = document,\n\t\tdb = d.body,\n\t\tresult = {\n\t\t\"_requestFullscreen\": db.webkitRequestFullscreen !== undefined ? \"webkitRequestFullscreen\" :\n\t\t\t\t\t\t\tdb.mozRequestFullScreen !== undefined ? \"mozRequestFullScreen\" :\n\t\t\t\t\t\t\tdb.msRequestFullscreen !== undefined ? \"msRequestFullscreen\" :\n\t\t\t\t\t\t\tdb.requestFullscreen !== undefined ? \"requestFullscreen\" : \"\",\n\t\t\"_exitFullscreen\": d.webkitExitFullscreen !== undefined ? \"webkitExitFullscreen\" :\n\t\t\t\t\t\t\td.mozCancelFullScreen !== undefined ? \"mozCancelFullScreen\" :\n\t\t\t\t\t\t\td.msExitFullscreen !== undefined ? \"msExitFullscreen\" :\n\t\t\t\t\t\t\td.exitFullscreen !== undefined ? \"exitFullscreen\" : \"\",\n\t\t\"_fullscreenElement\": d.webkitFullscreenElement !== undefined ? \"webkitFullscreenElement\" :\n\t\t\t\t\t\t\td.mozFullScreenElement !== undefined ? \"mozFullScreenElement\" :\n\t\t\t\t\t\t\td.msFullscreenElement !== undefined ? \"msFullscreenElement\" :\n\t\t\t\t\t\t\td.fullscreenElement !== undefined ? \"fullscreenElement\" : \"\",\n\t\t\"_fullscreenChange\": d.webkitFullscreenElement !== undefined ? \"webkitfullscreenchange\" :\n\t\t\t\t\t\t\td.mozFullScreenElement !== undefined ? \"mozfullscreenchange\" :\n\t\t\t\t\t\t\td.msFullscreenElement !== undefined ? \"MSFullscreenChange\" :\n\t\t\t\t\t\t\td.fullscreenElement !== undefined ? \"fullscreenchange\" : \"\"\n\t};\n\tif(!result._requestFullscreen || !result._exitFullscreen || !result._fullscreenElement || !result._fullscreenChange) {\n\t\treturn null;\n\t} else {\n\t\treturn result;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/browser.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/csscolorparser.js": {
"text": "// (c) Dean McNamee <dean@gmail.com>, 2012.\n//\n// https://github.com/deanm/css-color-parser-js\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to\n// deal in the Software without restriction, including without limitation the\n// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n// sell copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n// IN THE SOFTWARE.\n\n// http://www.w3.org/TR/css3-color/\nvar kCSSColorTable = {\n \"transparent\": [0,0,0,0], \"aliceblue\": [240,248,255,1],\n \"antiquewhite\": [250,235,215,1], \"aqua\": [0,255,255,1],\n \"aquamarine\": [127,255,212,1], \"azure\": [240,255,255,1],\n \"beige\": [245,245,220,1], \"bisque\": [255,228,196,1],\n \"black\": [0,0,0,1], \"blanchedalmond\": [255,235,205,1],\n \"blue\": [0,0,255,1], \"blueviolet\": [138,43,226,1],\n \"brown\": [165,42,42,1], \"burlywood\": [222,184,135,1],\n \"cadetblue\": [95,158,160,1], \"chartreuse\": [127,255,0,1],\n \"chocolate\": [210,105,30,1], \"coral\": [255,127,80,1],\n \"cornflowerblue\": [100,149,237,1], \"cornsilk\": [255,248,220,1],\n \"crimson\": [220,20,60,1], \"cyan\": [0,255,255,1],\n \"darkblue\": [0,0,139,1], \"darkcyan\": [0,139,139,1],\n \"darkgoldenrod\": [184,134,11,1], \"darkgray\": [169,169,169,1],\n \"darkgreen\": [0,100,0,1], \"darkgrey\": [169,169,169,1],\n \"darkkhaki\": [189,183,107,1], \"darkmagenta\": [139,0,139,1],\n \"darkolivegreen\": [85,107,47,1], \"darkorange\": [255,140,0,1],\n \"darkorchid\": [153,50,204,1], \"darkred\": [139,0,0,1],\n \"darksalmon\": [233,150,122,1], \"darkseagreen\": [143,188,143,1],\n \"darkslateblue\": [72,61,139,1], \"darkslategray\": [47,79,79,1],\n \"darkslategrey\": [47,79,79,1], \"darkturquoise\": [0,206,209,1],\n \"darkviolet\": [148,0,211,1], \"deeppink\": [255,20,147,1],\n \"deepskyblue\": [0,191,255,1], \"dimgray\": [105,105,105,1],\n \"dimgrey\": [105,105,105,1], \"dodgerblue\": [30,144,255,1],\n \"firebrick\": [178,34,34,1], \"floralwhite\": [255,250,240,1],\n \"forestgreen\": [34,139,34,1], \"fuchsia\": [255,0,255,1],\n \"gainsboro\": [220,220,220,1], \"ghostwhite\": [248,248,255,1],\n \"gold\": [255,215,0,1], \"goldenrod\": [218,165,32,1],\n \"gray\": [128,128,128,1], \"green\": [0,128,0,1],\n \"greenyellow\": [173,255,47,1], \"grey\": [128,128,128,1],\n \"honeydew\": [240,255,240,1], \"hotpink\": [255,105,180,1],\n \"indianred\": [205,92,92,1], \"indigo\": [75,0,130,1],\n \"ivory\": [255,255,240,1], \"khaki\": [240,230,140,1],\n \"lavender\": [230,230,250,1], \"lavenderblush\": [255,240,245,1],\n \"lawngreen\": [124,252,0,1], \"lemonchiffon\": [255,250,205,1],\n \"lightblue\": [173,216,230,1], \"lightcoral\": [240,128,128,1],\n \"lightcyan\": [224,255,255,1], \"lightgoldenrodyellow\": [250,250,210,1],\n \"lightgray\": [211,211,211,1], \"lightgreen\": [144,238,144,1],\n \"lightgrey\": [211,211,211,1], \"lightpink\": [255,182,193,1],\n \"lightsalmon\": [255,160,122,1], \"lightseagreen\": [32,178,170,1],\n \"lightskyblue\": [135,206,250,1], \"lightslategray\": [119,136,153,1],\n \"lightslategrey\": [119,136,153,1], \"lightsteelblue\": [176,196,222,1],\n \"lightyellow\": [255,255,224,1], \"lime\": [0,255,0,1],\n \"limegreen\": [50,205,50,1], \"linen\": [250,240,230,1],\n \"magenta\": [255,0,255,1], \"maroon\": [128,0,0,1],\n \"mediumaquamarine\": [102,205,170,1], \"mediumblue\": [0,0,205,1],\n \"mediumorchid\": [186,85,211,1], \"mediumpurple\": [147,112,219,1],\n \"mediumseagreen\": [60,179,113,1], \"mediumslateblue\": [123,104,238,1],\n \"mediumspringgreen\": [0,250,154,1], \"mediumturquoise\": [72,209,204,1],\n \"mediumvioletred\": [199,21,133,1], \"midnightblue\": [25,25,112,1],\n \"mintcream\": [245,255,250,1], \"mistyrose\": [255,228,225,1],\n \"moccasin\": [255,228,181,1], \"navajowhite\": [255,222,173,1],\n \"navy\": [0,0,128,1], \"oldlace\": [253,245,230,1],\n \"olive\": [128,128,0,1], \"olivedrab\": [107,142,35,1],\n \"orange\": [255,165,0,1], \"orangered\": [255,69,0,1],\n \"orchid\": [218,112,214,1], \"palegoldenrod\": [238,232,170,1],\n \"palegreen\": [152,251,152,1], \"paleturquoise\": [175,238,238,1],\n \"palevioletred\": [219,112,147,1], \"papayawhip\": [255,239,213,1],\n \"peachpuff\": [255,218,185,1], \"peru\": [205,133,63,1],\n \"pink\": [255,192,203,1], \"plum\": [221,160,221,1],\n \"powderblue\": [176,224,230,1], \"purple\": [128,0,128,1],\n \"red\": [255,0,0,1], \"rosybrown\": [188,143,143,1],\n \"royalblue\": [65,105,225,1], \"saddlebrown\": [139,69,19,1],\n \"salmon\": [250,128,114,1], \"sandybrown\": [244,164,96,1],\n \"seagreen\": [46,139,87,1], \"seashell\": [255,245,238,1],\n \"sienna\": [160,82,45,1], \"silver\": [192,192,192,1],\n \"skyblue\": [135,206,235,1], \"slateblue\": [106,90,205,1],\n \"slategray\": [112,128,144,1], \"slategrey\": [112,128,144,1],\n \"snow\": [255,250,250,1], \"springgreen\": [0,255,127,1],\n \"steelblue\": [70,130,180,1], \"tan\": [210,180,140,1],\n \"teal\": [0,128,128,1], \"thistle\": [216,191,216,1],\n \"tomato\": [255,99,71,1], \"turquoise\": [64,224,208,1],\n \"violet\": [238,130,238,1], \"wheat\": [245,222,179,1],\n \"white\": [255,255,255,1], \"whitesmoke\": [245,245,245,1],\n \"yellow\": [255,255,0,1], \"yellowgreen\": [154,205,50,1]}\n\nfunction clamp_css_byte(i) { // Clamp to integer 0 .. 255.\n i = Math.round(i); // Seems to be what Chrome does (vs truncation).\n return i < 0 ? 0 : i > 255 ? 255 : i;\n}\n\nfunction clamp_css_float(f) { // Clamp to float 0.0 .. 1.0.\n return f < 0 ? 0 : f > 1 ? 1 : f;\n}\n\nfunction parse_css_int(str) { // int or percentage.\n if (str[str.length - 1] === '%')\n return clamp_css_byte(parseFloat(str) / 100 * 255);\n return clamp_css_byte(parseInt(str));\n}\n\nfunction parse_css_float(str) { // float or percentage.\n if (str[str.length - 1] === '%')\n return clamp_css_float(parseFloat(str) / 100);\n return clamp_css_float(parseFloat(str));\n}\n\nfunction css_hue_to_rgb(m1, m2, h) {\n if (h < 0) h += 1;\n else if (h > 1) h -= 1;\n\n if (h * 6 < 1) return m1 + (m2 - m1) * h * 6;\n if (h * 2 < 1) return m2;\n if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6;\n return m1;\n}\n\nfunction parseCSSColor(css_str) {\n // Remove all whitespace, not compliant, but should just be more accepting.\n var str = css_str.replace(/ /g, '').toLowerCase();\n\n // Color keywords (and transparent) lookup.\n if (str in kCSSColorTable) return kCSSColorTable[str].slice(); // dup.\n\n // #abc and #abc123 syntax.\n if (str[0] === '#') {\n if (str.length === 4) {\n var iv = parseInt(str.substr(1), 16); // TODO(deanm): Stricter parsing.\n if (!(iv >= 0 && iv <= 0xfff)) return null; // Covers NaN.\n return [((iv & 0xf00) >> 4) | ((iv & 0xf00) >> 8),\n (iv & 0xf0) | ((iv & 0xf0) >> 4),\n (iv & 0xf) | ((iv & 0xf) << 4),\n 1];\n } else if (str.length === 7) {\n var iv = parseInt(str.substr(1), 16); // TODO(deanm): Stricter parsing.\n if (!(iv >= 0 && iv <= 0xffffff)) return null; // Covers NaN.\n return [(iv & 0xff0000) >> 16,\n (iv & 0xff00) >> 8,\n iv & 0xff,\n 1];\n }\n\n return null;\n }\n\n var op = str.indexOf('('), ep = str.indexOf(')');\n if (op !== -1 && ep + 1 === str.length) {\n var fname = str.substr(0, op);\n var params = str.substr(op+1, ep-(op+1)).split(',');\n var alpha = 1; // To allow case fallthrough.\n switch (fname) {\n case 'rgba':\n if (params.length !== 4) return null;\n alpha = parse_css_float(params.pop());\n // Fall through.\n case 'rgb':\n if (params.length !== 3) return null;\n return [parse_css_int(params[0]),\n parse_css_int(params[1]),\n parse_css_int(params[2]),\n alpha];\n case 'hsla':\n if (params.length !== 4) return null;\n alpha = parse_css_float(params.pop());\n // Fall through.\n case 'hsl':\n if (params.length !== 3) return null;\n var h = (((parseFloat(params[0]) % 360) + 360) % 360) / 360; // 0 .. 1\n // NOTE(deanm): According to the CSS spec s/l should only be\n // percentages, but we don't bother and let float or percentage.\n var s = parse_css_float(params[1]);\n var l = parse_css_float(params[2]);\n var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s;\n var m1 = l * 2 - m2;\n return [clamp_css_byte(css_hue_to_rgb(m1, m2, h+1/3) * 255),\n clamp_css_byte(css_hue_to_rgb(m1, m2, h) * 255),\n clamp_css_byte(css_hue_to_rgb(m1, m2, h-1/3) * 255),\n alpha];\n default:\n return null;\n }\n }\n\n return null;\n}\n\ntry { exports.parseCSSColor = parseCSSColor } catch(e) { }\n",
"title": "$:/core/modules/utils/dom/csscolorparser.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom.js\ntype: application/javascript\nmodule-type: utils\n\nVarious static DOM-related utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nDetermines whether element 'a' contains element 'b'\nCode thanks to John Resig, http://ejohn.org/blog/comparing-document-position/\n*/\nexports.domContains = function(a,b) {\n\treturn a.contains ?\n\t\ta !== b && a.contains(b) :\n\t\t!!(a.compareDocumentPosition(b) & 16);\n};\n\nexports.removeChildren = function(node) {\n\twhile(node.hasChildNodes()) {\n\t\tnode.removeChild(node.firstChild);\n\t}\n};\n\nexports.hasClass = function(el,className) {\n\treturn el && el.className && el.className.toString().split(\" \").indexOf(className) !== -1;\n};\n\nexports.addClass = function(el,className) {\n\tvar c = el.className.split(\" \");\n\tif(c.indexOf(className) === -1) {\n\t\tc.push(className);\n\t}\n\tel.className = c.join(\" \");\n};\n\nexports.removeClass = function(el,className) {\n\tvar c = el.className.split(\" \"),\n\t\tp = c.indexOf(className);\n\tif(p !== -1) {\n\t\tc.splice(p,1);\n\t\tel.className = c.join(\" \");\n\t}\n};\n\nexports.toggleClass = function(el,className,status) {\n\tif(status === undefined) {\n\t\tstatus = !exports.hasClass(el,className);\n\t}\n\tif(status) {\n\t\texports.addClass(el,className);\n\t} else {\n\t\texports.removeClass(el,className);\n\t}\n};\n\n/*\nGet the first parent element that has scrollbars or use the body as fallback.\n*/\nexports.getScrollContainer = function(el) {\n\tvar doc = el.ownerDocument;\n\twhile(el.parentNode) {\t\n\t\tel = el.parentNode;\n\t\tif(el.scrollTop) {\n\t\t\treturn el;\n\t\t}\n\t}\n\treturn doc.body;\n};\n\n/*\nGet the scroll position of the viewport\nReturns:\n\t{\n\t\tx: horizontal scroll position in pixels,\n\t\ty: vertical scroll position in pixels\n\t}\n*/\nexports.getScrollPosition = function() {\n\tif(\"scrollX\" in window) {\n\t\treturn {x: window.scrollX, y: window.scrollY};\n\t} else {\n\t\treturn {x: document.documentElement.scrollLeft, y: document.documentElement.scrollTop};\n\t}\n};\n\n/*\nAdjust the height of a textarea to fit its content, preserving scroll position, and return the height\n*/\nexports.resizeTextAreaToFit = function(domNode,minHeight) {\n\t// Get the scroll container and register the current scroll position\n\tvar container = $tw.utils.getScrollContainer(domNode),\n\t\tscrollTop = container.scrollTop;\n // Measure the specified minimum height\n\tdomNode.style.height = minHeight;\n\tvar measuredHeight = domNode.offsetHeight;\n\t// Set its height to auto so that it snaps to the correct height\n\tdomNode.style.height = \"auto\";\n\t// Calculate the revised height\n\tvar newHeight = Math.max(domNode.scrollHeight + domNode.offsetHeight - domNode.clientHeight,measuredHeight);\n\t// Only try to change the height if it has changed\n\tif(newHeight !== domNode.offsetHeight) {\n\t\tdomNode.style.height = newHeight + \"px\";\n\t\t// Make sure that the dimensions of the textarea are recalculated\n\t\t$tw.utils.forceLayout(domNode);\n\t\t// Set the container to the position we registered at the beginning\n\t\tcontainer.scrollTop = scrollTop;\n\t}\n\treturn newHeight;\n};\n\n/*\nGets the bounding rectangle of an element in absolute page coordinates\n*/\nexports.getBoundingPageRect = function(element) {\n\tvar scrollPos = $tw.utils.getScrollPosition(),\n\t\tclientRect = element.getBoundingClientRect();\n\treturn {\n\t\tleft: clientRect.left + scrollPos.x,\n\t\twidth: clientRect.width,\n\t\tright: clientRect.right + scrollPos.x,\n\t\ttop: clientRect.top + scrollPos.y,\n\t\theight: clientRect.height,\n\t\tbottom: clientRect.bottom + scrollPos.y\n\t};\n};\n\n/*\nSaves a named password in the browser\n*/\nexports.savePassword = function(name,password) {\n\ttry {\n\t\tif(window.localStorage) {\n\t\t\tlocalStorage.setItem(\"tw5-password-\" + name,password);\n\t\t}\n\t} catch(e) {\n\t}\n};\n\n/*\nRetrieve a named password from the browser\n*/\nexports.getPassword = function(name) {\n\ttry {\n\t\treturn window.localStorage ? localStorage.getItem(\"tw5-password-\" + name) : \"\";\n\t} catch(e) {\n\t\treturn \"\";\n\t}\n};\n\n/*\nForce layout of a dom node and its descendents\n*/\nexports.forceLayout = function(element) {\n\tvar dummy = element.offsetWidth;\n};\n\n/*\nPulse an element for debugging purposes\n*/\nexports.pulseElement = function(element) {\n\t// Event handler to remove the class at the end\n\telement.addEventListener($tw.browser.animationEnd,function handler(event) {\n\t\telement.removeEventListener($tw.browser.animationEnd,handler,false);\n\t\t$tw.utils.removeClass(element,\"pulse\");\n\t},false);\n\t// Apply the pulse class\n\t$tw.utils.removeClass(element,\"pulse\");\n\t$tw.utils.forceLayout(element);\n\t$tw.utils.addClass(element,\"pulse\");\n};\n\n/*\nAttach specified event handlers to a DOM node\ndomNode: where to attach the event handlers\nevents: array of event handlers to be added (see below)\nEach entry in the events array is an object with these properties:\nhandlerFunction: optional event handler function\nhandlerObject: optional event handler object\nhandlerMethod: optionally specifies object handler method name (defaults to `handleEvent`)\n*/\nexports.addEventListeners = function(domNode,events) {\n\t$tw.utils.each(events,function(eventInfo) {\n\t\tvar handler;\n\t\tif(eventInfo.handlerFunction) {\n\t\t\thandler = eventInfo.handlerFunction;\n\t\t} else if(eventInfo.handlerObject) {\n\t\t\tif(eventInfo.handlerMethod) {\n\t\t\t\thandler = function(event) {\n\t\t\t\t\teventInfo.handlerObject[eventInfo.handlerMethod].call(eventInfo.handlerObject,event);\n\t\t\t\t};\t\n\t\t\t} else {\n\t\t\t\thandler = eventInfo.handlerObject;\n\t\t\t}\n\t\t}\n\t\tdomNode.addEventListener(eventInfo.name,handler,false);\n\t});\n};\n\n/*\nGet the computed styles applied to an element as an array of strings of individual CSS properties\n*/\nexports.getComputedStyles = function(domNode) {\n\tvar textAreaStyles = window.getComputedStyle(domNode,null),\n\t\tstyleDefs = [],\n\t\tname;\n\tfor(var t=0; t<textAreaStyles.length; t++) {\n\t\tname = textAreaStyles[t];\n\t\tstyleDefs.push(name + \": \" + textAreaStyles.getPropertyValue(name) + \";\");\n\t}\n\treturn styleDefs;\n};\n\n/*\nApply a set of styles passed as an array of strings of individual CSS properties\n*/\nexports.setStyles = function(domNode,styleDefs) {\n\tdomNode.style.cssText = styleDefs.join(\"\");\n};\n\n/*\nCopy the computed styles from a source element to a destination element\n*/\nexports.copyStyles = function(srcDomNode,dstDomNode) {\n\t$tw.utils.setStyles(dstDomNode,$tw.utils.getComputedStyles(srcDomNode));\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/http.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/http.js\ntype: application/javascript\nmodule-type: utils\n\nBrowser HTTP support\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nA quick and dirty HTTP function; to be refactored later. Options are:\n\turl: URL to retrieve\n\ttype: GET, PUT, POST etc\n\tcallback: function invoked with (err,data)\n*/\nexports.httpRequest = function(options) {\n\tvar type = options.type || \"GET\",\n\t\theaders = options.headers || {accept: \"application/json\"},\n\t\trequest = new XMLHttpRequest(),\n\t\tdata = \"\",\n\t\tf,results;\n\t// Massage the data hashmap into a string\n\tif(options.data) {\n\t\tif(typeof options.data === \"string\") { // Already a string\n\t\t\tdata = options.data;\n\t\t} else { // A hashmap of strings\n\t\t\tresults = [];\n\t\t\t$tw.utils.each(options.data,function(dataItem,dataItemTitle) {\n\t\t\t\tresults.push(dataItemTitle + \"=\" + encodeURIComponent(dataItem));\n\t\t\t});\n\t\t\tdata = results.join(\"&\");\n\t\t}\n\t}\n\t// Set up the state change handler\n\trequest.onreadystatechange = function() {\n\t\tif(this.readyState === 4) {\n\t\t\tif(this.status === 200 || this.status === 201 || this.status === 204) {\n\t\t\t\t// Success!\n\t\t\t\toptions.callback(null,this.responseText,this);\n\t\t\t\treturn;\n\t\t\t}\n\t\t// Something went wrong\n\t\toptions.callback($tw.language.getString(\"Error/XMLHttpRequest\") + \": \" + this.status);\n\t\t}\n\t};\n\t// Make the request\n\trequest.open(type,options.url,true);\n\tif(headers) {\n\t\t$tw.utils.each(headers,function(header,headerTitle,object) {\n\t\t\trequest.setRequestHeader(headerTitle,header);\n\t\t});\n\t}\n\tif(data && !$tw.utils.hop(headers,\"Content-type\")) {\n\t\trequest.setRequestHeader(\"Content-type\",\"application/x-www-form-urlencoded; charset=UTF-8\");\n\t}\n\ttry {\n\t\trequest.send(data);\n\t} catch(e) {\n\t\toptions.callback(e);\n\t}\n\treturn request;\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/http.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/keyboard.js\ntype: application/javascript\nmodule-type: utils\n\nKeyboard utilities; now deprecated. Instead, use $tw.keyboardManager\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n[\"parseKeyDescriptor\",\"checkKeyDescriptor\"].forEach(function(method) {\n\texports[method] = function() {\n\t\tif($tw.keyboardManager) {\n\t\t\treturn $tw.keyboardManager[method].apply($tw.keyboardManager,Array.prototype.slice.call(arguments,0));\n\t\t} else {\n\t\t\treturn null\n\t\t}\n\t};\n});\n\n})();\n",
"title": "$:/core/modules/utils/dom/keyboard.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/modal.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/modal.js\ntype: application/javascript\nmodule-type: utils\n\nModal message mechanism\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar Modal = function(wiki) {\n\tthis.wiki = wiki;\n\tthis.modalCount = 0;\n};\n\n/*\nDisplay a modal dialogue\n\ttitle: Title of tiddler to display\n\toptions: see below\nOptions include:\n\tdownloadLink: Text of a big download link to include\n*/\nModal.prototype.display = function(title,options) {\n\toptions = options || {};\n\tvar self = this,\n\t\trefreshHandler,\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\ttiddler = this.wiki.getTiddler(title);\n\t// Don't do anything if the tiddler doesn't exist\n\tif(!tiddler) {\n\t\treturn;\n\t}\n\t// Create the variables\n\tvar variables = $tw.utils.extend({currentTiddler: title},options.variables);\n\t// Create the wrapper divs\n\tvar wrapper = document.createElement(\"div\"),\n\t\tmodalBackdrop = document.createElement(\"div\"),\n\t\tmodalWrapper = document.createElement(\"div\"),\n\t\tmodalHeader = document.createElement(\"div\"),\n\t\theaderTitle = document.createElement(\"h3\"),\n\t\tmodalBody = document.createElement(\"div\"),\n\t\tmodalLink = document.createElement(\"a\"),\n\t\tmodalFooter = document.createElement(\"div\"),\n\t\tmodalFooterHelp = document.createElement(\"span\"),\n\t\tmodalFooterButtons = document.createElement(\"span\");\n\t// Up the modal count and adjust the body class\n\tthis.modalCount++;\n\tthis.adjustPageClass();\n\t// Add classes\n\t$tw.utils.addClass(wrapper,\"tc-modal-wrapper\");\n\t$tw.utils.addClass(modalBackdrop,\"tc-modal-backdrop\");\n\t$tw.utils.addClass(modalWrapper,\"tc-modal\");\n\t$tw.utils.addClass(modalHeader,\"tc-modal-header\");\n\t$tw.utils.addClass(modalBody,\"tc-modal-body\");\n\t$tw.utils.addClass(modalFooter,\"tc-modal-footer\");\n\t// Join them together\n\twrapper.appendChild(modalBackdrop);\n\twrapper.appendChild(modalWrapper);\n\tmodalHeader.appendChild(headerTitle);\n\tmodalWrapper.appendChild(modalHeader);\n\tmodalWrapper.appendChild(modalBody);\n\tmodalFooter.appendChild(modalFooterHelp);\n\tmodalFooter.appendChild(modalFooterButtons);\n\tmodalWrapper.appendChild(modalFooter);\n\t// Render the title of the message\n\tvar headerWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tfield: \"subtitle\",\n\t\tmode: \"inline\",\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\tattributes: {\n\t\t\t\ttext: {\n\t\t\t\t\ttype: \"string\",\n\t\t\t\t\tvalue: title\n\t\t}}}],\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\theaderWidgetNode.render(headerTitle,null);\n\t// Render the body of the message\n\tvar bodyWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\tbodyWidgetNode.render(modalBody,null);\n\t// Setup the link if present\n\tif(options.downloadLink) {\n\t\tmodalLink.href = options.downloadLink;\n\t\tmodalLink.appendChild(document.createTextNode(\"Right-click to save changes\"));\n\t\tmodalBody.appendChild(modalLink);\n\t}\n\t// Render the footer of the message\n\tif(tiddler && tiddler.fields && tiddler.fields.help) {\n\t\tvar link = document.createElement(\"a\");\n\t\tlink.setAttribute(\"href\",tiddler.fields.help);\n\t\tlink.setAttribute(\"target\",\"_blank\");\n\t\tlink.setAttribute(\"rel\",\"noopener noreferrer\");\n\t\tlink.appendChild(document.createTextNode(\"Help\"));\n\t\tmodalFooterHelp.appendChild(link);\n\t\tmodalFooterHelp.style.float = \"left\";\n\t}\n\tvar footerWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tfield: \"footer\",\n\t\tmode: \"inline\",\n\t\tchildren: [{\n\t\t\ttype: \"button\",\n\t\t\tattributes: {\n\t\t\t\tmessage: {\n\t\t\t\t\ttype: \"string\",\n\t\t\t\t\tvalue: \"tm-close-tiddler\"\n\t\t\t\t}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\tattributes: {\n\t\t\t\t\ttext: {\n\t\t\t\t\t\ttype: \"string\",\n\t\t\t\t\t\tvalue: $tw.language.getString(\"Buttons/Close/Caption\")\n\t\t\t}}}\n\t\t]}],\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\tfooterWidgetNode.render(modalFooterButtons,null);\n\t// Set up the refresh handler\n\trefreshHandler = function(changes) {\n\t\theaderWidgetNode.refresh(changes,modalHeader,null);\n\t\tbodyWidgetNode.refresh(changes,modalBody,null);\n\t\tfooterWidgetNode.refresh(changes,modalFooterButtons,null);\n\t};\n\tthis.wiki.addEventListener(\"change\",refreshHandler);\n\t// Add the close event handler\n\tvar closeHandler = function(event) {\n\t\t// Remove our refresh handler\n\t\tself.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t// Decrease the modal count and adjust the body class\n\t\tself.modalCount--;\n\t\tself.adjustPageClass();\n\t\t// Force layout and animate the modal message away\n\t\t$tw.utils.forceLayout(modalBackdrop);\n\t\t$tw.utils.forceLayout(modalWrapper);\n\t\t$tw.utils.setStyle(modalBackdrop,[\n\t\t\t{opacity: \"0\"}\n\t\t]);\n\t\t$tw.utils.setStyle(modalWrapper,[\n\t\t\t{transform: \"translateY(\" + window.innerHeight + \"px)\"}\n\t\t]);\n\t\t// Set up an event for the transition end\n\t\twindow.setTimeout(function() {\n\t\t\tif(wrapper.parentNode) {\n\t\t\t\t// Remove the modal message from the DOM\n\t\t\t\tdocument.body.removeChild(wrapper);\n\t\t\t}\n\t\t},duration);\n\t\t// Don't let anyone else handle the tm-close-tiddler message\n\t\treturn false;\n\t};\n\theaderWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\tbodyWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\tfooterWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\t// Set the initial styles for the message\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{opacity: \"0\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transformOrigin: \"0% 0%\"},\n\t\t{transform: \"translateY(\" + (-window.innerHeight) + \"px)\"}\n\t]);\n\t// Put the message into the document\n\tdocument.body.appendChild(wrapper);\n\t// Set up animation for the styles\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{transition: \"opacity \" + duration + \"ms ease-out\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out\"}\n\t]);\n\t// Force layout\n\t$tw.utils.forceLayout(modalBackdrop);\n\t$tw.utils.forceLayout(modalWrapper);\n\t// Set final animated styles\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{opacity: \"0.7\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transform: \"translateY(0px)\"}\n\t]);\n};\n\nModal.prototype.adjustPageClass = function() {\n\tif($tw.pageContainer) {\n\t\t$tw.utils.toggleClass($tw.pageContainer,\"tc-modal-displayed\",this.modalCount > 0);\n\t}\n};\n\nexports.Modal = Modal;\n\n})();\n",
"title": "$:/core/modules/utils/dom/modal.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/notifier.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/notifier.js\ntype: application/javascript\nmodule-type: utils\n\nNotifier mechanism\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar Notifier = function(wiki) {\n\tthis.wiki = wiki;\n};\n\n/*\nDisplay a notification\n\ttitle: Title of tiddler containing the notification text\n\toptions: see below\nOptions include:\n*/\nNotifier.prototype.display = function(title,options) {\n\toptions = options || {};\n\t// Create the wrapper divs\n\tvar self = this,\n\t\tnotification = document.createElement(\"div\"),\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\trefreshHandler;\n\t// Don't do anything if the tiddler doesn't exist\n\tif(!tiddler) {\n\t\treturn;\n\t}\n\t// Add classes\n\t$tw.utils.addClass(notification,\"tc-notification\");\n\t// Create the variables\n\tvar variables = $tw.utils.extend({currentTiddler: title},options.variables);\n\t// Render the body of the notification\n\tvar widgetNode = this.wiki.makeTranscludeWidget(title,{parentWidget: $tw.rootWidget, document: document, variables: variables});\n\twidgetNode.render(notification,null);\n\trefreshHandler = function(changes) {\n\t\twidgetNode.refresh(changes,notification,null);\n\t};\n\tthis.wiki.addEventListener(\"change\",refreshHandler);\n\t// Set the initial styles for the notification\n\t$tw.utils.setStyle(notification,[\n\t\t{opacity: \"0\"},\n\t\t{transformOrigin: \"0% 0%\"},\n\t\t{transform: \"translateY(\" + (-window.innerHeight) + \"px)\"},\n\t\t{transition: \"opacity \" + duration + \"ms ease-out, \" + $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out\"}\n\t]);\n\t// Add the notification to the DOM\n\tdocument.body.appendChild(notification);\n\t// Force layout\n\t$tw.utils.forceLayout(notification);\n\t// Set final animated styles\n\t$tw.utils.setStyle(notification,[\n\t\t{opacity: \"1.0\"},\n\t\t{transform: \"translateY(0px)\"}\n\t]);\n\t// Set a timer to remove the notification\n\twindow.setTimeout(function() {\n\t\t// Remove our change event handler\n\t\tself.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t// Force layout and animate the notification away\n\t\t$tw.utils.forceLayout(notification);\n\t\t$tw.utils.setStyle(notification,[\n\t\t\t{opacity: \"0.0\"},\n\t\t\t{transform: \"translateX(\" + (notification.offsetWidth) + \"px)\"}\n\t\t]);\n\t\t// Remove the modal message from the DOM once the transition ends\n\t\tsetTimeout(function() {\n\t\t\tif(notification.parentNode) {\n\t\t\t\tdocument.body.removeChild(notification);\n\t\t\t}\n\t\t},duration);\n\t},$tw.config.preferences.notificationDuration);\n};\n\nexports.Notifier = Notifier;\n\n})();\n",
"title": "$:/core/modules/utils/dom/notifier.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/popup.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/popup.js\ntype: application/javascript\nmodule-type: utils\n\nModule that creates a $tw.utils.Popup object prototype that manages popups in the browser\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreates a Popup object with these options:\n\trootElement: the DOM element to which the popup zapper should be attached\n*/\nvar Popup = function(options) {\n\toptions = options || {};\n\tthis.rootElement = options.rootElement || document.documentElement;\n\tthis.popups = []; // Array of {title:,wiki:,domNode:} objects\n};\n\n/*\nTrigger a popup open or closed. Parameters are in a hashmap:\n\ttitle: title of the tiddler where the popup details are stored\n\tdomNode: dom node to which the popup will be positioned\n\twiki: wiki\n\tforce: if specified, forces the popup state to true or false (instead of toggling it)\n*/\nPopup.prototype.triggerPopup = function(options) {\n\t// Check if this popup is already active\n\tvar index = this.findPopup(options.title);\n\t// Compute the new state\n\tvar state = index === -1;\n\tif(options.force !== undefined) {\n\t\tstate = options.force;\n\t}\n\t// Show or cancel the popup according to the new state\n\tif(state) {\n\t\tthis.show(options);\n\t} else {\n\t\tthis.cancel(index);\n\t}\n};\n\nPopup.prototype.findPopup = function(title) {\n\tvar index = -1;\n\tfor(var t=0; t<this.popups.length; t++) {\n\t\tif(this.popups[t].title === title) {\n\t\t\tindex = t;\n\t\t}\n\t}\n\treturn index;\n};\n\nPopup.prototype.handleEvent = function(event) {\n\tif(event.type === \"click\") {\n\t\t// Find out what was clicked on\n\t\tvar info = this.popupInfo(event.target),\n\t\t\tcancelLevel = info.popupLevel - 1;\n\t\t// Don't remove the level that was clicked on if we clicked on a handle\n\t\tif(info.isHandle) {\n\t\t\tcancelLevel++;\n\t\t}\n\t\t// Cancel\n\t\tthis.cancel(cancelLevel);\n\t}\n};\n\n/*\nFind the popup level containing a DOM node. Returns:\npopupLevel: count of the number of nested popups containing the specified element\nisHandle: true if the specified element is within a popup handle\n*/\nPopup.prototype.popupInfo = function(domNode) {\n\tvar isHandle = false,\n\t\tpopupCount = 0,\n\t\tnode = domNode;\n\t// First check ancestors to see if we're within a popup handle\n\twhile(node) {\n\t\tif($tw.utils.hasClass(node,\"tc-popup-handle\")) {\n\t\t\tisHandle = true;\n\t\t\tpopupCount++;\n\t\t}\n\t\tif($tw.utils.hasClass(node,\"tc-popup-keep\")) {\n\t\t\tisHandle = true;\n\t\t}\n\t\tnode = node.parentNode;\n\t}\n\t// Then count the number of ancestor popups\n\tnode = domNode;\n\twhile(node) {\n\t\tif($tw.utils.hasClass(node,\"tc-popup\")) {\n\t\t\tpopupCount++;\n\t\t}\n\t\tnode = node.parentNode;\n\t}\n\tvar info = {\n\t\tpopupLevel: popupCount,\n\t\tisHandle: isHandle\n\t};\n\treturn info;\n};\n\n/*\nDisplay a popup by adding it to the stack\n*/\nPopup.prototype.show = function(options) {\n\t// Find out what was clicked on\n\tvar info = this.popupInfo(options.domNode);\n\t// Cancel any higher level popups\n\tthis.cancel(info.popupLevel);\n\t// Store the popup details if not already there\n\tif(this.findPopup(options.title) === -1) {\n\t\tthis.popups.push({\n\t\t\ttitle: options.title,\n\t\t\twiki: options.wiki,\n\t\t\tdomNode: options.domNode\n\t\t});\n\t}\n\t// Set the state tiddler\n\toptions.wiki.setTextReference(options.title,\n\t\t\t\"(\" + options.domNode.offsetLeft + \",\" + options.domNode.offsetTop + \",\" + \n\t\t\t\toptions.domNode.offsetWidth + \",\" + options.domNode.offsetHeight + \")\");\n\t// Add the click handler if we have any popups\n\tif(this.popups.length > 0) {\n\t\tthis.rootElement.addEventListener(\"click\",this,true);\t\t\n\t}\n};\n\n/*\nCancel all popups at or above a specified level or DOM node\nlevel: popup level to cancel (0 cancels all popups)\n*/\nPopup.prototype.cancel = function(level) {\n\tvar numPopups = this.popups.length;\n\tlevel = Math.max(0,Math.min(level,numPopups));\n\tfor(var t=level; t<numPopups; t++) {\n\t\tvar popup = this.popups.pop();\n\t\tif(popup.title) {\n\t\t\tpopup.wiki.deleteTiddler(popup.title);\n\t\t}\n\t}\n\tif(this.popups.length === 0) {\n\t\tthis.rootElement.removeEventListener(\"click\",this,false);\n\t}\n};\n\n/*\nReturns true if the specified title and text identifies an active popup\n*/\nPopup.prototype.readPopupState = function(text) {\n\tvar popupLocationRegExp = /^\\((-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+)\\)$/;\n\treturn popupLocationRegExp.test(text);\n};\n\nexports.Popup = Popup;\n\n})();\n",
"title": "$:/core/modules/utils/dom/popup.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/scroller.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/scroller.js\ntype: application/javascript\nmodule-type: utils\n\nModule that creates a $tw.utils.Scroller object prototype that manages scrolling in the browser\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nEvent handler for when the `tm-scroll` event hits the document body\n*/\nvar PageScroller = function() {\n\tthis.idRequestFrame = null;\n\tthis.requestAnimationFrame = window.requestAnimationFrame ||\n\t\twindow.webkitRequestAnimationFrame ||\n\t\twindow.mozRequestAnimationFrame ||\n\t\tfunction(callback) {\n\t\t\treturn window.setTimeout(callback, 1000/60);\n\t\t};\n\tthis.cancelAnimationFrame = window.cancelAnimationFrame ||\n\t\twindow.webkitCancelAnimationFrame ||\n\t\twindow.webkitCancelRequestAnimationFrame ||\n\t\twindow.mozCancelAnimationFrame ||\n\t\twindow.mozCancelRequestAnimationFrame ||\n\t\tfunction(id) {\n\t\t\twindow.clearTimeout(id);\n\t\t};\n};\n\nPageScroller.prototype.cancelScroll = function() {\n\tif(this.idRequestFrame) {\n\t\tthis.cancelAnimationFrame.call(window,this.idRequestFrame);\n\t\tthis.idRequestFrame = null;\n\t}\n};\n\n/*\nHandle an event\n*/\nPageScroller.prototype.handleEvent = function(event) {\n\tif(event.type === \"tm-scroll\") {\n\t\treturn this.scrollIntoView(event.target);\n\t}\n\treturn true;\n};\n\n/*\nHandle a scroll event hitting the page document\n*/\nPageScroller.prototype.scrollIntoView = function(element) {\n\tvar duration = $tw.utils.getAnimationDuration();\n\t// Now get ready to scroll the body\n\tthis.cancelScroll();\n\tthis.startTime = Date.now();\n\tvar scrollPosition = $tw.utils.getScrollPosition();\n\t// Get the client bounds of the element and adjust by the scroll position\n\tvar clientBounds = element.getBoundingClientRect(),\n\t\tbounds = {\n\t\t\tleft: clientBounds.left + scrollPosition.x,\n\t\t\ttop: clientBounds.top + scrollPosition.y,\n\t\t\twidth: clientBounds.width,\n\t\t\theight: clientBounds.height\n\t\t};\n\t// We'll consider the horizontal and vertical scroll directions separately via this function\n\t// targetPos/targetSize - position and size of the target element\n\t// currentPos/currentSize - position and size of the current scroll viewport\n\t// returns: new position of the scroll viewport\n\tvar getEndPos = function(targetPos,targetSize,currentPos,currentSize) {\n\t\t\tvar newPos = currentPos;\n\t\t\t// If the target is above/left of the current view, then scroll to it's top/left\n\t\t\tif(targetPos <= currentPos) {\n\t\t\t\tnewPos = targetPos;\n\t\t\t// If the target is smaller than the window and the scroll position is too far up, then scroll till the target is at the bottom of the window\n\t\t\t} else if(targetSize < currentSize && currentPos < (targetPos + targetSize - currentSize)) {\n\t\t\t\tnewPos = targetPos + targetSize - currentSize;\n\t\t\t// If the target is big, then just scroll to the top\n\t\t\t} else if(currentPos < targetPos) {\n\t\t\t\tnewPos = targetPos;\n\t\t\t// Otherwise, stay where we are\n\t\t\t} else {\n\t\t\t\tnewPos = currentPos;\n\t\t\t}\n\t\t\t// If we are scrolling within 50 pixels of the top/left then snap to zero\n\t\t\tif(newPos < 50) {\n\t\t\t\tnewPos = 0;\n\t\t\t}\n\t\t\treturn newPos;\n\t\t},\n\t\tendX = getEndPos(bounds.left,bounds.width,scrollPosition.x,window.innerWidth),\n\t\tendY = getEndPos(bounds.top,bounds.height,scrollPosition.y,window.innerHeight);\n\t// Only scroll if the position has changed\n\tif(endX !== scrollPosition.x || endY !== scrollPosition.y) {\n\t\tvar self = this,\n\t\t\tdrawFrame;\n\t\tdrawFrame = function () {\n\t\t\tvar t;\n\t\t\tif(duration <= 0) {\n\t\t\t\tt = 1;\n\t\t\t} else {\n\t\t\t\tt = ((Date.now()) - self.startTime) / duration;\t\n\t\t\t}\n\t\t\tif(t >= 1) {\n\t\t\t\tself.cancelScroll();\n\t\t\t\tt = 1;\n\t\t\t}\n\t\t\tt = $tw.utils.slowInSlowOut(t);\n\t\t\twindow.scrollTo(scrollPosition.x + (endX - scrollPosition.x) * t,scrollPosition.y + (endY - scrollPosition.y) * t);\n\t\t\tif(t < 1) {\n\t\t\t\tself.idRequestFrame = self.requestAnimationFrame.call(window,drawFrame);\n\t\t\t}\n\t\t};\n\t\tdrawFrame();\n\t}\n};\n\nexports.PageScroller = PageScroller;\n\n})();\n",
"title": "$:/core/modules/utils/dom/scroller.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/edition-info.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/edition-info.js\ntype: application/javascript\nmodule-type: utils-node\n\nInformation about the available editions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar fs = require(\"fs\"),\n\tpath = require(\"path\");\n\nvar editionInfo;\n\nexports.getEditionInfo = function() {\n\tif(!editionInfo) {\n\t\t// Enumerate the edition paths\n\t\tvar editionPaths = $tw.getLibraryItemSearchPaths($tw.config.editionsPath,$tw.config.editionsEnvVar);\n\t\teditionInfo = {};\n\t\tfor(var editionIndex=0; editionIndex<editionPaths.length; editionIndex++) {\n\t\t\tvar editionPath = editionPaths[editionIndex];\n\t\t\t// Enumerate the folders\n\t\t\tvar entries = fs.readdirSync(editionPath);\n\t\t\tfor(var entryIndex=0; entryIndex<entries.length; entryIndex++) {\n\t\t\t\tvar entry = entries[entryIndex];\n\t\t\t\t// Check if directories have a valid tiddlywiki.info\n\t\t\t\tif(!editionInfo[entry] && $tw.utils.isDirectory(path.resolve(editionPath,entry))) {\n\t\t\t\t\tvar info;\n\t\t\t\t\ttry {\n\t\t\t\t\t\tinfo = JSON.parse(fs.readFileSync(path.resolve(editionPath,entry,\"tiddlywiki.info\"),\"utf8\"));\n\t\t\t\t\t} catch(ex) {\n\t\t\t\t\t}\n\t\t\t\t\tif(info) {\n\t\t\t\t\t\teditionInfo[entry] = info;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn editionInfo;\n};\n\n})();\n",
"title": "$:/core/modules/utils/edition-info.js",
"type": "application/javascript",
"module-type": "utils-node"
},
"$:/core/modules/utils/fakedom.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/fakedom.js\ntype: application/javascript\nmodule-type: global\n\nA barebones implementation of DOM interfaces needed by the rendering mechanism.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Sequence number used to enable us to track objects for testing\nvar sequenceNumber = null;\n\nvar bumpSequenceNumber = function(object) {\n\tif(sequenceNumber !== null) {\n\t\tobject.sequenceNumber = sequenceNumber++;\n\t}\n};\n\nvar TW_TextNode = function(text) {\n\tbumpSequenceNumber(this);\n\tthis.textContent = text;\n};\n\nObject.defineProperty(TW_TextNode.prototype, \"nodeType\", {\n\tget: function() {\n\t\treturn 3;\n\t}\n});\n\nObject.defineProperty(TW_TextNode.prototype, \"formattedTextContent\", {\n\tget: function() {\n\t\treturn this.textContent.replace(/(\\r?\\n)/g,\"\");\n\t}\n});\n\nvar TW_Element = function(tag,namespace) {\n\tbumpSequenceNumber(this);\n\tthis.isTiddlyWikiFakeDom = true;\n\tthis.tag = tag;\n\tthis.attributes = {};\n\tthis.isRaw = false;\n\tthis.children = [];\n\tthis.style = {};\n\tthis.namespaceURI = namespace || \"http://www.w3.org/1999/xhtml\";\n};\n\nObject.defineProperty(TW_Element.prototype, \"nodeType\", {\n\tget: function() {\n\t\treturn 1;\n\t}\n});\n\nTW_Element.prototype.getAttribute = function(name) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot getAttribute on a raw TW_Element\";\n\t}\n\treturn this.attributes[name];\n};\n\nTW_Element.prototype.setAttribute = function(name,value) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot setAttribute on a raw TW_Element\";\n\t}\n\tthis.attributes[name] = value;\n};\n\nTW_Element.prototype.setAttributeNS = function(namespace,name,value) {\n\tthis.setAttribute(name,value);\n};\n\nTW_Element.prototype.removeAttribute = function(name) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot removeAttribute on a raw TW_Element\";\n\t}\n\tif($tw.utils.hop(this.attributes,name)) {\n\t\tdelete this.attributes[name];\n\t}\n};\n\nTW_Element.prototype.appendChild = function(node) {\n\tthis.children.push(node);\n\tnode.parentNode = this;\n};\n\nTW_Element.prototype.insertBefore = function(node,nextSibling) {\n\tif(nextSibling) {\n\t\tvar p = this.children.indexOf(nextSibling);\n\t\tif(p !== -1) {\n\t\t\tthis.children.splice(p,0,node);\n\t\t\tnode.parentNode = this;\n\t\t} else {\n\t\t\tthis.appendChild(node);\n\t\t}\n\t} else {\n\t\tthis.appendChild(node);\n\t}\n};\n\nTW_Element.prototype.removeChild = function(node) {\n\tvar p = this.children.indexOf(node);\n\tif(p !== -1) {\n\t\tthis.children.splice(p,1);\n\t}\n};\n\nTW_Element.prototype.hasChildNodes = function() {\n\treturn !!this.children.length;\n};\n\nObject.defineProperty(TW_Element.prototype, \"childNodes\", {\n\tget: function() {\n\t\treturn this.children;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"firstChild\", {\n\tget: function() {\n\t\treturn this.children[0];\n\t}\n});\n\nTW_Element.prototype.addEventListener = function(type,listener,useCapture) {\n\t// Do nothing\n};\n\nObject.defineProperty(TW_Element.prototype, \"tagName\", {\n\tget: function() {\n\t\treturn this.tag || \"\";\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"className\", {\n\tget: function() {\n\t\treturn this.attributes[\"class\"] || \"\";\n\t},\n\tset: function(value) {\n\t\tthis.attributes[\"class\"] = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"value\", {\n\tget: function() {\n\t\treturn this.attributes.value || \"\";\n\t},\n\tset: function(value) {\n\t\tthis.attributes.value = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"outerHTML\", {\n\tget: function() {\n\t\tvar output = [],attr,a,v;\n\t\toutput.push(\"<\",this.tag);\n\t\tif(this.attributes) {\n\t\t\tattr = [];\n\t\t\tfor(a in this.attributes) {\n\t\t\t\tattr.push(a);\n\t\t\t}\n\t\t\tattr.sort();\n\t\t\tfor(a=0; a<attr.length; a++) {\n\t\t\t\tv = this.attributes[attr[a]];\n\t\t\t\tif(v !== undefined) {\n\t\t\t\t\toutput.push(\" \",attr[a],\"=\\\"\",$tw.utils.htmlEncode(v),\"\\\"\");\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(this.style) {\n\t\t\tvar style = [];\n\t\t\tfor(var s in this.style) {\n\t\t\t\tstyle.push(s + \":\" + this.style[s] + \";\");\n\t\t\t}\n\t\t\tif(style.length > 0) {\n\t\t\t\toutput.push(\" style=\\\"\",style.join(\"\"),\"\\\"\")\n\t\t\t}\n\t\t}\n\t\toutput.push(\">\");\n\t\tif($tw.config.htmlVoidElements.indexOf(this.tag) === -1) {\n\t\t\toutput.push(this.innerHTML);\n\t\t\toutput.push(\"</\",this.tag,\">\");\n\t\t}\n\t\treturn output.join(\"\");\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"innerHTML\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\treturn this.rawHTML;\n\t\t} else {\n\t\t\tvar b = [];\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tif(node instanceof TW_Element) {\n\t\t\t\t\tb.push(node.outerHTML);\n\t\t\t\t} else if(node instanceof TW_TextNode) {\n\t\t\t\t\tb.push($tw.utils.htmlEncode(node.textContent));\n\t\t\t\t}\n\t\t\t});\n\t\t\treturn b.join(\"\");\n\t\t}\n\t},\n\tset: function(value) {\n\t\tthis.isRaw = true;\n\t\tthis.rawHTML = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"textContent\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\tthrow \"Cannot get textContent on a raw TW_Element\";\n\t\t} else {\n\t\t\tvar b = [];\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tb.push(node.textContent);\n\t\t\t});\n\t\t\treturn b.join(\"\");\n\t\t}\n\t},\n\tset: function(value) {\n\t\tthis.children = [new TW_TextNode(value)];\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"formattedTextContent\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\tthrow \"Cannot get formattedTextContent on a raw TW_Element\";\n\t\t} else {\n\t\t\tvar b = [],\n\t\t\t\tisBlock = $tw.config.htmlBlockElements.indexOf(this.tag) !== -1;\n\t\t\tif(isBlock) {\n\t\t\t\tb.push(\"\\n\");\n\t\t\t}\n\t\t\tif(this.tag === \"li\") {\n\t\t\t\tb.push(\"* \");\n\t\t\t}\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tb.push(node.formattedTextContent);\n\t\t\t});\n\t\t\tif(isBlock) {\n\t\t\t\tb.push(\"\\n\");\n\t\t\t}\n\t\t\treturn b.join(\"\");\n\t\t}\n\t}\n});\n\nvar document = {\n\tsetSequenceNumber: function(value) {\n\t\tsequenceNumber = value;\n\t},\n\tcreateElementNS: function(namespace,tag) {\n\t\treturn new TW_Element(tag,namespace);\n\t},\n\tcreateElement: function(tag) {\n\t\treturn new TW_Element(tag);\n\t},\n\tcreateTextNode: function(text) {\n\t\treturn new TW_TextNode(text);\n\t},\n\tcompatMode: \"CSS1Compat\", // For KaTeX to know that we're not a browser in quirks mode\n\tisTiddlyWikiFakeDom: true\n};\n\nexports.fakeDocument = document;\n\n})();\n",
"title": "$:/core/modules/utils/fakedom.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/utils/filesystem.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/filesystem.js\ntype: application/javascript\nmodule-type: utils-node\n\nFile system utilities\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar fs = require(\"fs\"),\n\tpath = require(\"path\");\n\n/*\nRecursively (and synchronously) copy a directory and all its content\n*/\nexports.copyDirectory = function(srcPath,dstPath) {\n\t// Remove any trailing path separators\n\tsrcPath = $tw.utils.removeTrailingSeparator(srcPath);\n\tdstPath = $tw.utils.removeTrailingSeparator(dstPath);\n\t// Create the destination directory\n\tvar err = $tw.utils.createDirectory(dstPath);\n\tif(err) {\n\t\treturn err;\n\t}\n\t// Function to copy a folder full of files\n\tvar copy = function(srcPath,dstPath) {\n\t\tvar srcStats = fs.lstatSync(srcPath),\n\t\t\tdstExists = fs.existsSync(dstPath);\n\t\tif(srcStats.isFile()) {\n\t\t\t$tw.utils.copyFile(srcPath,dstPath);\n\t\t} else if(srcStats.isDirectory()) {\n\t\t\tvar items = fs.readdirSync(srcPath);\n\t\t\tfor(var t=0; t<items.length; t++) {\n\t\t\t\tvar item = items[t],\n\t\t\t\t\terr = copy(srcPath + path.sep + item,dstPath + path.sep + item);\n\t\t\t\tif(err) {\n\t\t\t\t\treturn err;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t};\n\tcopy(srcPath,dstPath);\n\treturn null;\n};\n\n/*\nCopy a file\n*/\nvar FILE_BUFFER_LENGTH = 64 * 1024,\n\tfileBuffer;\n\nexports.copyFile = function(srcPath,dstPath) {\n\t// Create buffer if required\n\tif(!fileBuffer) {\n\t\tfileBuffer = new Buffer(FILE_BUFFER_LENGTH);\n\t}\n\t// Create any directories in the destination\n\t$tw.utils.createDirectory(path.dirname(dstPath));\n\t// Copy the file\n\tvar srcFile = fs.openSync(srcPath,\"r\"),\n\t\tdstFile = fs.openSync(dstPath,\"w\"),\n\t\tbytesRead = 1,\n\t\tpos = 0;\n\twhile (bytesRead > 0) {\n\t\tbytesRead = fs.readSync(srcFile,fileBuffer,0,FILE_BUFFER_LENGTH,pos);\n\t\tfs.writeSync(dstFile,fileBuffer,0,bytesRead);\n\t\tpos += bytesRead;\n\t}\n\tfs.closeSync(srcFile);\n\tfs.closeSync(dstFile);\n\treturn null;\n};\n\n/*\nRemove trailing path separator\n*/\nexports.removeTrailingSeparator = function(dirPath) {\n\tvar len = dirPath.length;\n\tif(dirPath.charAt(len-1) === path.sep) {\n\t\tdirPath = dirPath.substr(0,len-1);\n\t}\n\treturn dirPath;\n};\n\n/*\nRecursively create a directory\n*/\nexports.createDirectory = function(dirPath) {\n\tif(dirPath.substr(dirPath.length-1,1) !== path.sep) {\n\t\tdirPath = dirPath + path.sep;\n\t}\n\tvar pos = 1;\n\tpos = dirPath.indexOf(path.sep,pos);\n\twhile(pos !== -1) {\n\t\tvar subDirPath = dirPath.substr(0,pos);\n\t\tif(!$tw.utils.isDirectory(subDirPath)) {\n\t\t\ttry {\n\t\t\t\tfs.mkdirSync(subDirPath);\n\t\t\t} catch(e) {\n\t\t\t\treturn \"Error creating directory '\" + subDirPath + \"'\";\n\t\t\t}\n\t\t}\n\t\tpos = dirPath.indexOf(path.sep,pos + 1);\n\t}\n\treturn null;\n};\n\n/*\nRecursively create directories needed to contain a specified file\n*/\nexports.createFileDirectories = function(filePath) {\n\treturn $tw.utils.createDirectory(path.dirname(filePath));\n};\n\n/*\nRecursively delete a directory\n*/\nexports.deleteDirectory = function(dirPath) {\n\tif(fs.existsSync(dirPath)) {\n\t\tvar entries = fs.readdirSync(dirPath);\n\t\tfor(var entryIndex=0; entryIndex<entries.length; entryIndex++) {\n\t\t\tvar currPath = dirPath + path.sep + entries[entryIndex];\n\t\t\tif(fs.lstatSync(currPath).isDirectory()) {\n\t\t\t\t$tw.utils.deleteDirectory(currPath);\n\t\t\t} else {\n\t\t\t\tfs.unlinkSync(currPath);\n\t\t\t}\n\t\t}\n\tfs.rmdirSync(dirPath);\n\t}\n\treturn null;\n};\n\n/*\nCheck if a path identifies a directory\n*/\nexports.isDirectory = function(dirPath) {\n\treturn fs.existsSync(dirPath) && fs.statSync(dirPath).isDirectory();\n};\n\n/*\nCheck if a path identifies a directory that is empty\n*/\nexports.isDirectoryEmpty = function(dirPath) {\n\tif(!$tw.utils.isDirectory(dirPath)) {\n\t\treturn false;\n\t}\n\tvar files = fs.readdirSync(dirPath),\n\t\tempty = true;\n\t$tw.utils.each(files,function(file,index) {\n\t\tif(file.charAt(0) !== \".\") {\n\t\t\tempty = false;\n\t\t}\n\t});\n\treturn empty;\n};\n\n/*\nRecursively delete a tree of empty directories\n*/\nexports.deleteEmptyDirs = function(dirpath,callback) {\n\tvar self = this;\n\tfs.readdir(dirpath,function(err,files) {\n\t\tif(err) {\n\t\t\treturn callback(err);\n\t\t}\n\t\tif(files.length > 0) {\n\t\t\treturn callback(null);\n\t\t}\n\t\tfs.rmdir(dirpath,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tself.deleteEmptyDirs(path.dirname(dirpath),callback);\n\t\t});\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/utils/filesystem.js",
"type": "application/javascript",
"module-type": "utils-node"
},
"$:/core/modules/utils/logger.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/logger.js\ntype: application/javascript\nmodule-type: utils\n\nA basic logging implementation\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar ALERT_TAG = \"$:/tags/Alert\";\n\n/*\nMake a new logger\n*/\nfunction Logger(componentName) {\n\tthis.componentName = componentName || \"\";\n}\n\n/*\nLog a message\n*/\nLogger.prototype.log = function(/* args */) {\n\tif(console !== undefined && console.log !== undefined) {\n\t\treturn Function.apply.call(console.log, console, [this.componentName + \":\"].concat(Array.prototype.slice.call(arguments,0)));\n\t}\n};\n\n/*\nAlert a message\n*/\nLogger.prototype.alert = function(/* args */) {\n\t// Prepare the text of the alert\n\tvar text = Array.prototype.join.call(arguments,\" \");\n\t// Create alert tiddlers in the browser\n\tif($tw.browser) {\n\t\t// Check if there is an existing alert with the same text and the same component\n\t\tvar existingAlerts = $tw.wiki.getTiddlersWithTag(ALERT_TAG),\n\t\t\talertFields,\n\t\t\texistingCount,\n\t\t\tself = this;\n\t\t$tw.utils.each(existingAlerts,function(title) {\n\t\t\tvar tiddler = $tw.wiki.getTiddler(title);\n\t\t\tif(tiddler.fields.text === text && tiddler.fields.component === self.componentName && tiddler.fields.modified && (!alertFields || tiddler.fields.modified < alertFields.modified)) {\n\t\t\t\t\talertFields = $tw.utils.extend({},tiddler.fields);\n\t\t\t}\n\t\t});\n\t\tif(alertFields) {\n\t\t\texistingCount = alertFields.count || 1;\n\t\t} else {\n\t\t\talertFields = {\n\t\t\t\ttitle: $tw.wiki.generateNewTitle(\"$:/temp/alerts/alert\",{prefix: \"\"}),\n\t\t\t\ttext: text,\n\t\t\t\ttags: [ALERT_TAG],\n\t\t\t\tcomponent: this.componentName\n\t\t\t};\n\t\t\texistingCount = 0;\n\t\t}\n\t\talertFields.modified = new Date();\n\t\tif(++existingCount > 1) {\n\t\t\talertFields.count = existingCount;\n\t\t} else {\n\t\t\talertFields.count = undefined;\n\t\t}\n\t\t$tw.wiki.addTiddler(new $tw.Tiddler(alertFields));\n\t\t// Log the alert as well\n\t\tthis.log.apply(this,Array.prototype.slice.call(arguments,0));\n\t} else {\n\t\t// Print an orange message to the console if not in the browser\n\t\tconsole.error(\"\\x1b[1;33m\" + text + \"\\x1b[0m\");\n\t}\n};\n\nexports.Logger = Logger;\n\n})();\n",
"title": "$:/core/modules/utils/logger.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/parsetree.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/parsetree.js\ntype: application/javascript\nmodule-type: utils\n\nParse tree utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.addAttributeToParseTreeNode = function(node,name,value) {\n\tnode.attributes = node.attributes || {};\n\tnode.attributes[name] = {type: \"string\", value: value};\n};\n\nexports.getAttributeValueFromParseTreeNode = function(node,name,defaultValue) {\n\tif(node.attributes && node.attributes[name] && node.attributes[name].value !== undefined) {\n\t\treturn node.attributes[name].value;\n\t}\n\treturn defaultValue;\n};\n\nexports.addClassToParseTreeNode = function(node,classString) {\n\tvar classes = [];\n\tnode.attributes = node.attributes || {};\n\tnode.attributes[\"class\"] = node.attributes[\"class\"] || {type: \"string\", value: \"\"};\n\tif(node.attributes[\"class\"].type === \"string\") {\n\t\tif(node.attributes[\"class\"].value !== \"\") {\n\t\t\tclasses = node.attributes[\"class\"].value.split(\" \");\n\t\t}\n\t\tif(classString !== \"\") {\n\t\t\t$tw.utils.pushTop(classes,classString.split(\" \"));\n\t\t}\n\t\tnode.attributes[\"class\"].value = classes.join(\" \");\n\t}\n};\n\nexports.addStyleToParseTreeNode = function(node,name,value) {\n\t\tnode.attributes = node.attributes || {};\n\t\tnode.attributes.style = node.attributes.style || {type: \"string\", value: \"\"};\n\t\tif(node.attributes.style.type === \"string\") {\n\t\t\tnode.attributes.style.value += name + \":\" + value + \";\";\n\t\t}\n};\n\nexports.findParseTreeNode = function(nodeArray,search) {\n\tfor(var t=0; t<nodeArray.length; t++) {\n\t\tif(nodeArray[t].type === search.type && nodeArray[t].tag === search.tag) {\n\t\t\treturn nodeArray[t];\n\t\t}\n\t}\n\treturn undefined;\n};\n\n/*\nHelper to get the text of a parse tree node or array of nodes\n*/\nexports.getParseTreeText = function getParseTreeText(tree) {\n\tvar output = [];\n\tif($tw.utils.isArray(tree)) {\n\t\t$tw.utils.each(tree,function(node) {\n\t\t\toutput.push(getParseTreeText(node));\n\t\t});\n\t} else {\n\t\tif(tree.type === \"text\") {\n\t\t\toutput.push(tree.text);\n\t\t}\n\t\tif(tree.children) {\n\t\t\treturn getParseTreeText(tree.children);\n\t\t}\n\t}\n\treturn output.join(\"\");\n};\n\n})();\n",
"title": "$:/core/modules/utils/parsetree.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/performance.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/performance.js\ntype: application/javascript\nmodule-type: global\n\nPerformance measurement.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction Performance(enabled) {\n\tthis.enabled = !!enabled;\n\tthis.measures = {}; // Hashmap of current values of measurements\n\tthis.logger = new $tw.utils.Logger(\"performance\");\n}\n\n/*\nWrap performance reporting around a top level function\n*/\nPerformance.prototype.report = function(name,fn) {\n\tvar self = this;\n\tif(this.enabled) {\n\t\treturn function() {\n\t\t\tself.measures = {};\n\t\t\tvar startTime = $tw.utils.timer(),\n\t\t\t\tresult = fn.apply(this,arguments);\n\t\t\tself.logger.log(name + \": \" + $tw.utils.timer(startTime).toFixed(2) + \"ms\");\n\t\t\tfor(var m in self.measures) {\n\t\t\t\tself.logger.log(\"+\" + m + \": \" + self.measures[m].toFixed(2) + \"ms\");\n\t\t\t}\n\t\t\treturn result;\n\t\t};\n\t} else {\n\t\treturn fn;\n\t}\n};\n\n/*\nWrap performance measurements around a subfunction\n*/\nPerformance.prototype.measure = function(name,fn) {\n\tvar self = this;\n\tif(this.enabled) {\n\t\treturn function() {\n\t\t\tvar startTime = $tw.utils.timer(),\n\t\t\t\tresult = fn.apply(this,arguments),\n\t\t\t\tvalue = self.measures[name] || 0;\n\t\t\tself.measures[name] = value + $tw.utils.timer(startTime);\n\t\t\treturn result;\n\t\t};\n\t} else {\n\t\treturn fn;\n\t}\n};\n\nexports.Performance = Performance;\n\n})();\n",
"title": "$:/core/modules/utils/performance.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/utils/pluginmaker.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/pluginmaker.js\ntype: application/javascript\nmodule-type: utils\n\nA quick and dirty way to pack up plugins within the browser.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nRepack a plugin, and then delete any non-shadow payload tiddlers\n*/\nexports.repackPlugin = function(title,additionalTiddlers,excludeTiddlers) {\n\tadditionalTiddlers = additionalTiddlers || [];\n\texcludeTiddlers = excludeTiddlers || [];\n\t// Get the plugin tiddler\n\tvar pluginTiddler = $tw.wiki.getTiddler(title);\n\tif(!pluginTiddler) {\n\t\tthrow \"No such tiddler as \" + title;\n\t}\n\t// Extract the JSON\n\tvar jsonPluginTiddler;\n\ttry {\n\t\tjsonPluginTiddler = JSON.parse(pluginTiddler.fields.text);\n\t} catch(e) {\n\t\tthrow \"Cannot parse plugin tiddler \" + title + \"\\n\" + $tw.language.getString(\"Error/Caption\") + \": \" + e;\n\t}\n\t// Get the list of tiddlers\n\tvar tiddlers = Object.keys(jsonPluginTiddler.tiddlers);\n\t// Add the additional tiddlers\n\t$tw.utils.pushTop(tiddlers,additionalTiddlers);\n\t// Remove any excluded tiddlers\n\tfor(var t=tiddlers.length-1; t>=0; t--) {\n\t\tif(excludeTiddlers.indexOf(tiddlers[t]) !== -1) {\n\t\t\ttiddlers.splice(t,1);\n\t\t}\n\t}\n\t// Pack up the tiddlers into a block of JSON\n\tvar plugins = {};\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = $tw.wiki.getTiddler(title),\n\t\t\tfields = {};\n\t\t$tw.utils.each(tiddler.fields,function (value,name) {\n\t\t\tfields[name] = tiddler.getFieldString(name);\n\t\t});\n\t\tplugins[title] = fields;\n\t});\n\t// Retrieve and bump the version number\n\tvar pluginVersion = $tw.utils.parseVersion(pluginTiddler.getFieldString(\"version\") || \"0.0.0\") || {\n\t\t\tmajor: \"0\",\n\t\t\tminor: \"0\",\n\t\t\tpatch: \"0\"\n\t\t};\n\tpluginVersion.patch++;\n\tvar version = pluginVersion.major + \".\" + pluginVersion.minor + \".\" + pluginVersion.patch;\n\tif(pluginVersion.prerelease) {\n\t\tversion += \"-\" + pluginVersion.prerelease;\n\t}\n\tif(pluginVersion.build) {\n\t\tversion += \"+\" + pluginVersion.build;\n\t}\n\t// Save the tiddler\n\t$tw.wiki.addTiddler(new $tw.Tiddler(pluginTiddler,{text: JSON.stringify({tiddlers: plugins},null,4), version: version}));\n\t// Delete any non-shadow constituent tiddlers\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tif($tw.wiki.tiddlerExists(title)) {\n\t\t\t$tw.wiki.deleteTiddler(title);\n\t\t}\n\t});\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\t// Return a heartwarming confirmation\n\treturn \"Plugin \" + title + \" successfully saved\";\n};\n\n})();\n",
"title": "$:/core/modules/utils/pluginmaker.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/utils.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/utils.js\ntype: application/javascript\nmodule-type: utils\n\nVarious static utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nDisplay a warning, in colour if we're on a terminal\n*/\nexports.warning = function(text) {\n\tconsole.log($tw.node ? \"\\x1b[1;33m\" + text + \"\\x1b[0m\" : text);\n};\n\n/*\nRepeats a string\n*/\nexports.repeat = function(str,count) {\n\tvar result = \"\";\n\tfor(var t=0;t<count;t++) {\n\t\tresult += str;\n\t}\n\treturn result;\n};\n\n/*\nTrim whitespace from the start and end of a string\nThanks to Steven Levithan, http://blog.stevenlevithan.com/archives/faster-trim-javascript\n*/\nexports.trim = function(str) {\n\tif(typeof str === \"string\") {\n\t\treturn str.replace(/^\\s\\s*/, '').replace(/\\s\\s*$/, '');\n\t} else {\n\t\treturn str;\n\t}\n};\n\n/*\nFind the line break preceding a given position in a string\nReturns position immediately after that line break, or the start of the string\n*/\nexports.findPrecedingLineBreak = function(text,pos) {\n\tvar result = text.lastIndexOf(\"\\n\",pos - 1);\n\tif(result === -1) {\n\t\tresult = 0;\n\t} else {\n\t\tresult++;\n\t\tif(text.charAt(result) === \"\\r\") {\n\t\t\tresult++;\n\t\t}\n\t}\n\treturn result;\n};\n\n/*\nFind the line break following a given position in a string\n*/\nexports.findFollowingLineBreak = function(text,pos) {\n\t// Cut to just past the following line break, or to the end of the text\n\tvar result = text.indexOf(\"\\n\",pos);\n\tif(result === -1) {\n\t\tresult = text.length;\n\t} else {\n\t\tif(text.charAt(result) === \"\\r\") {\n\t\t\tresult++;\n\t\t}\n\t}\n\treturn result;\n};\n\n/*\nReturn the number of keys in an object\n*/\nexports.count = function(object) {\n\treturn Object.keys(object || {}).length;\n};\n\n/*\nCheck if an array is equal by value and by reference.\n*/\nexports.isArrayEqual = function(array1,array2) {\n\tif(array1 === array2) {\n\t\treturn true;\n\t}\n\tarray1 = array1 || [];\n\tarray2 = array2 || [];\n\tif(array1.length !== array2.length) {\n\t\treturn false;\n\t}\n\treturn array1.every(function(value,index) {\n\t\treturn value === array2[index];\n\t});\n};\n\n/*\nPush entries onto an array, removing them first if they already exist in the array\n\tarray: array to modify (assumed to be free of duplicates)\n\tvalue: a single value to push or an array of values to push\n*/\nexports.pushTop = function(array,value) {\n\tvar t,p;\n\tif($tw.utils.isArray(value)) {\n\t\t// Remove any array entries that are duplicated in the new values\n\t\tif(value.length !== 0) {\n\t\t\tif(array.length !== 0) {\n\t\t\t\tif(value.length < array.length) {\n\t\t\t\t\tfor(t=0; t<value.length; t++) {\n\t\t\t\t\t\tp = array.indexOf(value[t]);\n\t\t\t\t\t\tif(p !== -1) {\n\t\t\t\t\t\t\tarray.splice(p,1);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tfor(t=array.length-1; t>=0; t--) {\n\t\t\t\t\t\tp = value.indexOf(array[t]);\n\t\t\t\t\t\tif(p !== -1) {\n\t\t\t\t\t\t\tarray.splice(t,1);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Push the values on top of the main array\n\t\t\tarray.push.apply(array,value);\n\t\t}\n\t} else {\n\t\tp = array.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tarray.splice(p,1);\n\t\t}\n\t\tarray.push(value);\n\t}\n\treturn array;\n};\n\n/*\nRemove entries from an array\n\tarray: array to modify\n\tvalue: a single value to remove, or an array of values to remove\n*/\nexports.removeArrayEntries = function(array,value) {\n\tvar t,p;\n\tif($tw.utils.isArray(value)) {\n\t\tfor(t=0; t<value.length; t++) {\n\t\t\tp = array.indexOf(value[t]);\n\t\t\tif(p !== -1) {\n\t\t\t\tarray.splice(p,1);\n\t\t\t}\n\t\t}\n\t} else {\n\t\tp = array.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tarray.splice(p,1);\n\t\t}\n\t}\n};\n\n/*\nCheck whether any members of a hashmap are present in another hashmap\n*/\nexports.checkDependencies = function(dependencies,changes) {\n\tvar hit = false;\n\t$tw.utils.each(changes,function(change,title) {\n\t\tif($tw.utils.hop(dependencies,title)) {\n\t\t\thit = true;\n\t\t}\n\t});\n\treturn hit;\n};\n\nexports.extend = function(object /* [, src] */) {\n\t$tw.utils.each(Array.prototype.slice.call(arguments, 1), function(source) {\n\t\tif(source) {\n\t\t\tfor(var property in source) {\n\t\t\t\tobject[property] = source[property];\n\t\t\t}\n\t\t}\n\t});\n\treturn object;\n};\n\nexports.deepCopy = function(object) {\n\tvar result,t;\n\tif($tw.utils.isArray(object)) {\n\t\t// Copy arrays\n\t\tresult = object.slice(0);\n\t} else if(typeof object === \"object\") {\n\t\tresult = {};\n\t\tfor(t in object) {\n\t\t\tif(object[t] !== undefined) {\n\t\t\t\tresult[t] = $tw.utils.deepCopy(object[t]);\n\t\t\t}\n\t\t}\n\t} else {\n\t\tresult = object;\n\t}\n\treturn result;\n};\n\nexports.extendDeepCopy = function(object,extendedProperties) {\n\tvar result = $tw.utils.deepCopy(object),t;\n\tfor(t in extendedProperties) {\n\t\tif(extendedProperties[t] !== undefined) {\n\t\t\tresult[t] = $tw.utils.deepCopy(extendedProperties[t]);\n\t\t}\n\t}\n\treturn result;\n};\n\nexports.deepFreeze = function deepFreeze(object) {\n\tvar property, key;\n\tObject.freeze(object);\n\tfor(key in object) {\n\t\tproperty = object[key];\n\t\tif($tw.utils.hop(object,key) && (typeof property === \"object\") && !Object.isFrozen(property)) {\n\t\t\tdeepFreeze(property);\n\t\t}\n\t}\n};\n\nexports.slowInSlowOut = function(t) {\n\treturn (1 - ((Math.cos(t * Math.PI) + 1) / 2));\n};\n\nexports.formatDateString = function(date,template) {\n\tvar result = \"\",\n\t\tt = template,\n\t\tmatches = [\n\t\t\t[/^0hh12/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getHours12(date));\n\t\t\t}],\n\t\t\t[/^wYYYY/, function() {\n\t\t\t\treturn $tw.utils.getYearForWeekNo(date);\n\t\t\t}],\n\t\t\t[/^hh12/, function() {\n\t\t\t\treturn $tw.utils.getHours12(date);\n\t\t\t}],\n\t\t\t[/^DDth/, function() {\n\t\t\t\treturn date.getDate() + $tw.utils.getDaySuffix(date);\n\t\t\t}],\n\t\t\t[/^YYYY/, function() {\n\t\t\t\treturn date.getFullYear();\n\t\t\t}],\n\t\t\t[/^0hh/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getHours());\n\t\t\t}],\n\t\t\t[/^0mm/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getMinutes());\n\t\t\t}],\n\t\t\t[/^0ss/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getSeconds());\n\t\t\t}],\n\t\t\t[/^0DD/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getDate());\n\t\t\t}],\n\t\t\t[/^0MM/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getMonth()+1);\n\t\t\t}],\n\t\t\t[/^0WW/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getWeek(date));\n\t\t\t}],\n\t\t\t[/^ddd/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Short/Day/\" + date.getDay());\n\t\t\t}],\n\t\t\t[/^mmm/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Short/Month/\" + (date.getMonth() + 1));\n\t\t\t}],\n\t\t\t[/^DDD/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Long/Day/\" + date.getDay());\n\t\t\t}],\n\t\t\t[/^MMM/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Long/Month/\" + (date.getMonth() + 1));\n\t\t\t}],\n\t\t\t[/^TZD/, function() {\n\t\t\t\tvar tz = date.getTimezoneOffset(),\n\t\t\t\tatz = Math.abs(tz);\n\t\t\t\treturn (tz < 0 ? '+' : '-') + $tw.utils.pad(Math.floor(atz / 60)) + ':' + $tw.utils.pad(atz % 60);\n\t\t\t}],\n\t\t\t[/^wYY/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getYearForWeekNo(date) - 2000);\n\t\t\t}],\n\t\t\t[/^[ap]m/, function() {\n\t\t\t\treturn $tw.utils.getAmPm(date).toLowerCase();\n\t\t\t}],\n\t\t\t[/^hh/, function() {\n\t\t\t\treturn date.getHours();\n\t\t\t}],\n\t\t\t[/^mm/, function() {\n\t\t\t\treturn date.getMinutes();\n\t\t\t}],\n\t\t\t[/^ss/, function() {\n\t\t\t\treturn date.getSeconds();\n\t\t\t}],\n\t\t\t[/^[AP]M/, function() {\n\t\t\t\treturn $tw.utils.getAmPm(date).toUpperCase();\n\t\t\t}],\n\t\t\t[/^DD/, function() {\n\t\t\t\treturn date.getDate();\n\t\t\t}],\n\t\t\t[/^MM/, function() {\n\t\t\t\treturn date.getMonth() + 1;\n\t\t\t}],\n\t\t\t[/^WW/, function() {\n\t\t\t\treturn $tw.utils.getWeek(date);\n\t\t\t}],\n\t\t\t[/^YY/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getFullYear() - 2000);\n\t\t\t}]\n\t\t];\n\twhile(t.length){\n\t\tvar matchString = \"\";\n\t\t$tw.utils.each(matches, function(m) {\n\t\t\tvar match = m[0].exec(t);\n\t\t\tif(match) {\n\t\t\t\tmatchString = m[1].call();\n\t\t\t\tt = t.substr(match[0].length);\n\t\t\t\treturn false;\n\t\t\t}\n\t\t});\n\t\tif(matchString) {\n\t\t\tresult += matchString;\n\t\t} else {\n\t\t\tresult += t.charAt(0);\n\t\t\tt = t.substr(1);\n\t\t}\n\t}\n\tresult = result.replace(/\\\\(.)/g,\"$1\");\n\treturn result;\n};\n\nexports.getAmPm = function(date) {\n\treturn $tw.language.getString(\"Date/Period/\" + (date.getHours() >= 12 ? \"pm\" : \"am\"));\n};\n\nexports.getDaySuffix = function(date) {\n\treturn $tw.language.getString(\"Date/DaySuffix/\" + date.getDate());\n};\n\nexports.getWeek = function(date) {\n\tvar dt = new Date(date.getTime());\n\tvar d = dt.getDay();\n\tif(d === 0) {\n\t\td = 7; // JavaScript Sun=0, ISO Sun=7\n\t}\n\tdt.setTime(dt.getTime() + (4 - d) * 86400000);// shift day to Thurs of same week to calculate weekNo\n\tvar n = Math.floor((dt.getTime()-new Date(dt.getFullYear(),0,1) + 3600000) / 86400000);\n\treturn Math.floor(n / 7) + 1;\n};\n\nexports.getYearForWeekNo = function(date) {\n\tvar dt = new Date(date.getTime());\n\tvar d = dt.getDay();\n\tif(d === 0) {\n\t\td = 7; // JavaScript Sun=0, ISO Sun=7\n\t}\n\tdt.setTime(dt.getTime() + (4 - d) * 86400000);// shift day to Thurs of same week\n\treturn dt.getFullYear();\n};\n\nexports.getHours12 = function(date) {\n\tvar h = date.getHours();\n\treturn h > 12 ? h-12 : ( h > 0 ? h : 12 );\n};\n\n/*\nConvert a date delta in milliseconds into a string representation of \"23 seconds ago\", \"27 minutes ago\" etc.\n\tdelta: delta in milliseconds\nReturns an object with these members:\n\tdescription: string describing the delta period\n\tupdatePeriod: time in millisecond until the string will be inaccurate\n*/\nexports.getRelativeDate = function(delta) {\n\tvar futurep = false;\n\tif(delta < 0) {\n\t\tdelta = -1 * delta;\n\t\tfuturep = true;\n\t}\n\tvar units = [\n\t\t{name: \"Years\", duration: 365 * 24 * 60 * 60 * 1000},\n\t\t{name: \"Months\", duration: (365/12) * 24 * 60 * 60 * 1000},\n\t\t{name: \"Days\", duration: 24 * 60 * 60 * 1000},\n\t\t{name: \"Hours\", duration: 60 * 60 * 1000},\n\t\t{name: \"Minutes\", duration: 60 * 1000},\n\t\t{name: \"Seconds\", duration: 1000}\n\t];\n\tfor(var t=0; t<units.length; t++) {\n\t\tvar result = Math.floor(delta / units[t].duration);\n\t\tif(result >= 2) {\n\t\t\treturn {\n\t\t\t\tdelta: delta,\n\t\t\t\tdescription: $tw.language.getString(\n\t\t\t\t\t\"RelativeDate/\" + (futurep ? \"Future\" : \"Past\") + \"/\" + units[t].name,\n\t\t\t\t\t{variables:\n\t\t\t\t\t\t{period: result.toString()}\n\t\t\t\t\t}\n\t\t\t\t),\n\t\t\t\tupdatePeriod: units[t].duration\n\t\t\t};\n\t\t}\n\t}\n\treturn {\n\t\tdelta: delta,\n\t\tdescription: $tw.language.getString(\n\t\t\t\"RelativeDate/\" + (futurep ? \"Future\" : \"Past\") + \"/Second\",\n\t\t\t{variables:\n\t\t\t\t{period: \"1\"}\n\t\t\t}\n\t\t),\n\t\tupdatePeriod: 1000\n\t};\n};\n\n// Convert & to \"&\", < to \"<\", > to \">\", \" to \""\"\nexports.htmlEncode = function(s) {\n\tif(s) {\n\t\treturn s.toString().replace(/&/mg,\"&\").replace(/</mg,\"<\").replace(/>/mg,\">\").replace(/\\\"/mg,\""\");\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n// Converts all HTML entities to their character equivalents\nexports.entityDecode = function(s) {\n\tvar converter = String.fromCodePoint || String.fromCharCode,\n\t\te = s.substr(1,s.length-2); // Strip the & and the ;\n\tif(e.charAt(0) === \"#\") {\n\t\tif(e.charAt(1) === \"x\" || e.charAt(1) === \"X\") {\n\t\t\treturn converter(parseInt(e.substr(2),16));\t\n\t\t} else {\n\t\t\treturn converter(parseInt(e.substr(1),10));\n\t\t}\n\t} else {\n\t\tvar c = $tw.config.htmlEntities[e];\n\t\tif(c) {\n\t\t\treturn converter(c);\n\t\t} else {\n\t\t\treturn s; // Couldn't convert it as an entity, just return it raw\n\t\t}\n\t}\n};\n\nexports.unescapeLineBreaks = function(s) {\n\treturn s.replace(/\\\\n/mg,\"\\n\").replace(/\\\\b/mg,\" \").replace(/\\\\s/mg,\"\\\\\").replace(/\\r/mg,\"\");\n};\n\n/*\n * Returns an escape sequence for given character. Uses \\x for characters <=\n * 0xFF to save space, \\u for the rest.\n *\n * The code needs to be in sync with th code template in the compilation\n * function for \"action\" nodes.\n */\n// Copied from peg.js, thanks to David Majda\nexports.escape = function(ch) {\n\tvar charCode = ch.charCodeAt(0);\n\tif(charCode <= 0xFF) {\n\t\treturn '\\\\x' + $tw.utils.pad(charCode.toString(16).toUpperCase());\n\t} else {\n\t\treturn '\\\\u' + $tw.utils.pad(charCode.toString(16).toUpperCase(),4);\n\t}\n};\n\n// Turns a string into a legal JavaScript string\n// Copied from peg.js, thanks to David Majda\nexports.stringify = function(s) {\n\t/*\n\t* ECMA-262, 5th ed., 7.8.4: All characters may appear literally in a string\n\t* literal except for the closing quote character, backslash, carriage return,\n\t* line separator, paragraph separator, and line feed. Any character may\n\t* appear in the form of an escape sequence.\n\t*\n\t* For portability, we also escape all non-ASCII characters.\n\t*/\n\treturn (s || \"\")\n\t\t.replace(/\\\\/g, '\\\\\\\\') // backslash\n\t\t.replace(/\"/g, '\\\\\"') // double quote character\n\t\t.replace(/'/g, \"\\\\'\") // single quote character\n\t\t.replace(/\\r/g, '\\\\r') // carriage return\n\t\t.replace(/\\n/g, '\\\\n') // line feed\n\t\t.replace(/[\\x80-\\uFFFF]/g, exports.escape); // non-ASCII characters\n};\n\n/*\nEscape the RegExp special characters with a preceding backslash\n*/\nexports.escapeRegExp = function(s) {\n return s.replace(/[\\-\\/\\\\\\^\\$\\*\\+\\?\\.\\(\\)\\|\\[\\]\\{\\}]/g, '\\\\$&');\n};\n\n// Checks whether a link target is external, i.e. not a tiddler title\nexports.isLinkExternal = function(to) {\n\tvar externalRegExp = /^(?:file|http|https|mailto|ftp|irc|news|data|skype):[^\\s<>{}\\[\\]`|\"\\\\^]+(?:\\/|\\b)/i;\n\treturn externalRegExp.test(to);\n};\n\nexports.nextTick = function(fn) {\n/*global window: false */\n\tif(typeof process === \"undefined\") {\n\t\t// Apparently it would be faster to use postMessage - http://dbaron.org/log/20100309-faster-timeouts\n\t\twindow.setTimeout(fn,4);\n\t} else {\n\t\tprocess.nextTick(fn);\n\t}\n};\n\n/*\nConvert a hyphenated CSS property name into a camel case one\n*/\nexports.unHyphenateCss = function(propName) {\n\treturn propName.replace(/-([a-z])/gi, function(match0,match1) {\n\t\treturn match1.toUpperCase();\n\t});\n};\n\n/*\nConvert a camelcase CSS property name into a dashed one (\"backgroundColor\" --> \"background-color\")\n*/\nexports.hyphenateCss = function(propName) {\n\treturn propName.replace(/([A-Z])/g, function(match0,match1) {\n\t\treturn \"-\" + match1.toLowerCase();\n\t});\n};\n\n/*\nParse a text reference of one of these forms:\n* title\n* !!field\n* title!!field\n* title##index\n* etc\nReturns an object with the following fields, all optional:\n* title: tiddler title\n* field: tiddler field name\n* index: JSON property index\n*/\nexports.parseTextReference = function(textRef) {\n\t// Separate out the title, field name and/or JSON indices\n\tvar reTextRef = /(?:(.*?)!!(.+))|(?:(.*?)##(.+))|(.*)/mg,\n\t\tmatch = reTextRef.exec(textRef),\n\t\tresult = {};\n\tif(match && reTextRef.lastIndex === textRef.length) {\n\t\t// Return the parts\n\t\tif(match[1]) {\n\t\t\tresult.title = match[1];\n\t\t}\n\t\tif(match[2]) {\n\t\t\tresult.field = match[2];\n\t\t}\n\t\tif(match[3]) {\n\t\t\tresult.title = match[3];\n\t\t}\n\t\tif(match[4]) {\n\t\t\tresult.index = match[4];\n\t\t}\n\t\tif(match[5]) {\n\t\t\tresult.title = match[5];\n\t\t}\n\t} else {\n\t\t// If we couldn't parse it\n\t\tresult.title = textRef\n\t}\n\treturn result;\n};\n\n/*\nChecks whether a string is a valid fieldname\n*/\nexports.isValidFieldName = function(name) {\n\tif(!name || typeof name !== \"string\") {\n\t\treturn false;\n\t}\n\tname = name.toLowerCase().trim();\n\tvar fieldValidatorRegEx = /^[a-z0-9\\-\\._]+$/mg;\n\treturn fieldValidatorRegEx.test(name);\n};\n\n/*\nExtract the version number from the meta tag or from the boot file\n*/\n\n// Browser version\nexports.extractVersionInfo = function() {\n\tif($tw.packageInfo) {\n\t\treturn $tw.packageInfo.version;\n\t} else {\n\t\tvar metatags = document.getElementsByTagName(\"meta\");\n\t\tfor(var t=0; t<metatags.length; t++) {\n\t\t\tvar m = metatags[t];\n\t\t\tif(m.name === \"tiddlywiki-version\") {\n\t\t\t\treturn m.content;\n\t\t\t}\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nGet the animation duration in ms\n*/\nexports.getAnimationDuration = function() {\n\treturn parseInt($tw.wiki.getTiddlerText(\"$:/config/AnimationDuration\",\"400\"),10);\n};\n\n/*\nHash a string to a number\nDerived from http://stackoverflow.com/a/15710692\n*/\nexports.hashString = function(str) {\n\treturn str.split(\"\").reduce(function(a,b) {\n\t\ta = ((a << 5) - a) + b.charCodeAt(0);\n\t\treturn a & a;\n\t},0);\n};\n\n/*\nDecode a base64 string\n*/\nexports.base64Decode = function(string64) {\n\tif($tw.browser) {\n\t\t// TODO\n\t\tthrow \"$tw.utils.base64Decode() doesn't work in the browser\";\n\t} else {\n\t\treturn (new Buffer(string64,\"base64\")).toString();\n\t}\n};\n\n/*\nConvert a hashmap into a tiddler dictionary format sequence of name:value pairs\n*/\nexports.makeTiddlerDictionary = function(data) {\n\tvar output = [];\n\tfor(var name in data) {\n\t\toutput.push(name + \": \" + data[name]);\n\t}\n\treturn output.join(\"\\n\");\n};\n\n/*\nHigh resolution microsecond timer for profiling\n*/\nexports.timer = function(base) {\n\tvar m;\n\tif($tw.node) {\n\t\tvar r = process.hrtime();\t\t\n\t\tm = r[0] * 1e3 + (r[1] / 1e6);\n\t} else if(window.performance) {\n\t\tm = performance.now();\n\t} else {\n\t\tm = Date.now();\n\t}\n\tif(typeof base !== \"undefined\") {\n\t\tm = m - base;\n\t}\n\treturn m;\n};\n\n/*\nConvert text and content type to a data URI\n*/\nexports.makeDataUri = function(text,type) {\n\ttype = type || \"text/vnd.tiddlywiki\";\n\tvar typeInfo = $tw.config.contentTypeInfo[type] || $tw.config.contentTypeInfo[\"text/plain\"],\n\t\tisBase64 = typeInfo.encoding === \"base64\",\n\t\tparts = [];\n\tparts.push(\"data:\");\n\tparts.push(type);\n\tparts.push(isBase64 ? \";base64\" : \"\");\n\tparts.push(\",\");\n\tparts.push(isBase64 ? text : encodeURIComponent(text));\n\treturn parts.join(\"\");\n};\n\n/*\nUseful for finding out the fully escaped CSS selector equivalent to a given tag. For example:\n\n$tw.utils.tagToCssSelector(\"$:/tags/Stylesheet\") --> tc-tagged-\\%24\\%3A\\%2Ftags\\%2FStylesheet\n*/\nexports.tagToCssSelector = function(tagName) {\n\treturn \"tc-tagged-\" + encodeURIComponent(tagName).replace(/[!\"#$%&'()*+,\\-./:;<=>?@[\\\\\\]^`{\\|}~,]/mg,function(c) {\n\t\treturn \"\\\\\" + c;\n\t});\n};\n\n\n/*\nIE does not have sign function\n*/\nexports.sign = Math.sign || function(x) {\n\tx = +x; // convert to a number\n\tif (x === 0 || isNaN(x)) {\n\t\treturn x;\n\t}\n\treturn x > 0 ? 1 : -1;\n};\n\n/*\nIE does not have an endsWith function\n*/\nexports.strEndsWith = function(str,ending,position) {\n\tif(str.endsWith) {\n\t\treturn str.endsWith(ending,position);\n\t} else {\n\t\tif (typeof position !== 'number' || !isFinite(position) || Math.floor(position) !== position || position > str.length) {\n\t\t\tposition = str.length;\n\t\t}\n\t\tposition -= str.length;\n\t\tvar lastIndex = str.indexOf(ending, position);\n\t\treturn lastIndex !== -1 && lastIndex === position;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/utils.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/widgets/action-deletefield.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-deletefield.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to delete fields of a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DeleteFieldWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDeleteFieldWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDeleteFieldWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nDeleteFieldWidget.prototype.execute = function() {\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.actionField = this.getAttribute(\"$field\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nDeleteFieldWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$tiddler\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nDeleteFieldWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar self = this,\n\t\ttiddler = this.wiki.getTiddler(self.actionTiddler),\n\t\tremoveFields = {};\n\tif(this.actionField) {\n\t\tremoveFields[this.actionField] = undefined;\n\t}\n\tif(tiddler) {\n\t\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\t\tif(name.charAt(0) !== \"$\" && name !== \"title\") {\n\t\t\t\tremoveFields[name] = undefined;\n\t\t\t}\n\t\t});\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getModificationFields(),tiddler,removeFields,this.wiki.getCreationFields()));\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-deletefield\"] = DeleteFieldWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-deletefield.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-deletetiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-deletetiddler.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to delete a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DeleteTiddlerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDeleteTiddlerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDeleteTiddlerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nDeleteTiddlerWidget.prototype.execute = function() {\n\tthis.actionFilter = this.getAttribute(\"$filter\");\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nDeleteTiddlerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$filter\"] || changedAttributes[\"$tiddler\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nDeleteTiddlerWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar tiddlers = [];\n\tif(this.actionFilter) {\n\t\ttiddlers = this.wiki.filterTiddlers(this.actionFilter,this);\n\t}\n\tif(this.actionTiddler) {\n\t\ttiddlers.push(this.actionTiddler);\n\t}\n\tfor(var t=0; t<tiddlers.length; t++) {\n\t\tthis.wiki.deleteTiddler(tiddlers[t]);\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-deletetiddler\"] = DeleteTiddlerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-deletetiddler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-listops.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-listops.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to apply list operations to any tiddler field (defaults to the 'list' field of the current tiddler)\n\n\\*/\n(function() {\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\nvar ActionListopsWidget = function(parseTreeNode, options) {\n\tthis.initialise(parseTreeNode, options);\n};\n/**\n * Inherit from the base widget class\n */\nActionListopsWidget.prototype = new Widget();\n/**\n * Render this widget into the DOM\n */\nActionListopsWidget.prototype.render = function(parent, nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n/**\n * Compute the internal state of the widget\n */\nActionListopsWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.target = this.getAttribute(\"$tiddler\", this.getVariable(\n\t\t\"currentTiddler\"));\n\tthis.filter = this.getAttribute(\"$filter\");\n\tthis.subfilter = this.getAttribute(\"$subfilter\");\n\tthis.listField = this.getAttribute(\"$field\", \"list\");\n\tthis.listIndex = this.getAttribute(\"$index\");\n\tthis.filtertags = this.getAttribute(\"$tags\");\n};\n/**\n * \tRefresh the widget by ensuring our attributes are up to date\n */\nActionListopsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.$tiddler || changedAttributes.$filter ||\n\t\tchangedAttributes.$subfilter || changedAttributes.$field ||\n\t\tchangedAttributes.$index || changedAttributes.$tags) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n/**\n * \tInvoke the action associated with this widget\n */\nActionListopsWidget.prototype.invokeAction = function(triggeringWidget,\n\tevent) {\n\t//Apply the specified filters to the lists\n\tvar field = this.listField,\n\t\tindex,\n\t\ttype = \"!!\",\n\t\tlist = this.listField;\n\tif(this.listIndex) {\n\t\tfield = undefined;\n\t\tindex = this.listIndex;\n\t\ttype = \"##\";\n\t\tlist = this.listIndex;\n\t}\n\tif(this.filter) {\n\t\tthis.wiki.setText(this.target, field, index, $tw.utils.stringifyList(\n\t\t\tthis.wiki\n\t\t\t.filterTiddlers(this.filter, this)));\n\t}\n\tif(this.subfilter) {\n\t\tvar subfilter = \"[list[\" + this.target + type + list + \"]] \" + this.subfilter;\n\t\tthis.wiki.setText(this.target, field, index, $tw.utils.stringifyList(\n\t\t\tthis.wiki\n\t\t\t.filterTiddlers(subfilter, this)));\n\t}\n\tif(this.filtertags) {\n\t\tvar tagfilter = \"[list[\" + this.target + \"!!tags]] \" + this.filtertags;\n\t\tthis.wiki.setText(this.target, \"tags\", undefined, $tw.utils.stringifyList(\n\t\t\tthis.wiki.filterTiddlers(tagfilter, this)));\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-listops\"] = ActionListopsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-listops.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-navigate.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-navigate.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to navigate to a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar NavigateWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nNavigateWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nNavigateWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nNavigateWidget.prototype.execute = function() {\n\tthis.actionTo = this.getAttribute(\"$to\");\n\tthis.actionScroll = this.getAttribute(\"$scroll\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nNavigateWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$to\"] || changedAttributes[\"$scroll\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nNavigateWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar bounds = triggeringWidget && triggeringWidget.getBoundingClientRect && triggeringWidget.getBoundingClientRect(),\n\t\tsuppressNavigation = event.metaKey || event.ctrlKey || (event.button === 1);\n\tif(this.actionScroll === \"yes\") {\n\t\tsuppressNavigation = false;\n\t} else if(this.actionScroll === \"no\") {\n\t\tsuppressNavigation = true;\n\t}\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.actionTo === undefined ? this.getVariable(\"currentTiddler\") : this.actionTo,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: triggeringWidget,\n\t\tnavigateFromClientRect: bounds && { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: suppressNavigation\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-navigate\"] = NavigateWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-navigate.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-sendmessage.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-sendmessage.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to send a message\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SendMessageWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSendMessageWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSendMessageWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nSendMessageWidget.prototype.execute = function() {\n\tthis.actionMessage = this.getAttribute(\"$message\");\n\tthis.actionParam = this.getAttribute(\"$param\");\n\tthis.actionName = this.getAttribute(\"$name\");\n\tthis.actionValue = this.getAttribute(\"$value\",\"\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nSendMessageWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(Object.keys(changedAttributes).length) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nSendMessageWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\t// Get the string parameter\n\tvar param = this.actionParam;\n\t// Assemble the attributes as a hashmap\n\tvar paramObject = Object.create(null);\n\tvar count = 0;\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tparamObject[name] = attribute;\n\t\t\tcount++;\n\t\t}\n\t});\n\t// Add name/value pair if present\n\tif(this.actionName) {\n\t\tparamObject[this.actionName] = this.actionValue;\n\t}\n\t// Dispatch the message\n\tthis.dispatchEvent({\n\t\ttype: this.actionMessage,\n\t\tparam: param,\n\t\tparamObject: paramObject,\n\t\ttiddlerTitle: this.getVariable(\"currentTiddler\"),\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\")\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-sendmessage\"] = SendMessageWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-sendmessage.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-setfield.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-setfield.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to set a single field or index on a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SetFieldWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSetFieldWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSetFieldWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nSetFieldWidget.prototype.execute = function() {\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.actionField = this.getAttribute(\"$field\");\n\tthis.actionIndex = this.getAttribute(\"$index\");\n\tthis.actionValue = this.getAttribute(\"$value\");\n\tthis.actionTimestamp = this.getAttribute(\"$timestamp\",\"yes\") === \"yes\";\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nSetFieldWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$tiddler\"] || changedAttributes[\"$field\"] || changedAttributes[\"$index\"] || changedAttributes[\"$value\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nSetFieldWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar self = this,\n\t\toptions = {};\n\toptions.suppressTimestamp = !this.actionTimestamp;\n\tif((typeof this.actionField == \"string\") || (typeof this.actionIndex == \"string\") || (typeof this.actionValue == \"string\")) {\n\t\tthis.wiki.setText(this.actionTiddler,this.actionField,this.actionIndex,this.actionValue,options);\n\t}\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tself.wiki.setText(self.actionTiddler,name,undefined,attribute,options);\n\t\t}\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-setfield\"] = SetFieldWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-setfield.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/browse.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/browse.js\ntype: application/javascript\nmodule-type: widget\n\nBrowse widget for browsing for files to import\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar BrowseWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nBrowseWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nBrowseWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"input\");\n\tdomNode.setAttribute(\"type\",\"file\");\n\tif(this.browseMultiple) {\n\t\tdomNode.setAttribute(\"multiple\",\"multiple\");\n\t}\n\tif(this.tooltip) {\n\t\tdomNode.setAttribute(\"title\",this.tooltip);\n\t}\n\t// Nw.js supports \"nwsaveas\" to force a \"save as\" dialogue that allows a new or existing file to be selected\n\tif(this.nwsaveas) {\n\t\tdomNode.setAttribute(\"nwsaveas\",this.nwsaveas);\n\t}\n\t// Nw.js supports \"webkitdirectory\" to allow a directory to be selected\n\tif(this.webkitdirectory) {\n\t\tdomNode.setAttribute(\"webkitdirectory\",this.webkitdirectory);\n\t}\n\t// Add a click event handler\n\tdomNode.addEventListener(\"change\",function (event) {\n\t\tif(self.message) {\n\t\t\tself.dispatchEvent({type: self.message, param: self.param, files: event.target.files});\n\t\t} else {\n\t\t\tself.wiki.readFiles(event.target.files,function(tiddlerFieldsArray) {\n\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t\t\t});\n\t\t}\n\t\treturn false;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nBrowseWidget.prototype.execute = function() {\n\tthis.browseMultiple = this.getAttribute(\"multiple\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis.nwsaveas = this.getAttribute(\"nwsaveas\");\n\tthis.webkitdirectory = this.getAttribute(\"webkitdirectory\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nBrowseWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.browse = BrowseWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/browse.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/button.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/button.js\ntype: application/javascript\nmodule-type: widget\n\nButton widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ButtonWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nButtonWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nButtonWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar tag = \"button\";\n\tif(this.buttonTag && $tw.config.htmlUnsafeElements.indexOf(this.buttonTag) === -1) {\n\t\ttag = this.buttonTag;\n\t}\n\tvar domNode = this.document.createElement(tag);\n\t// Assign classes\n\tvar classes = this[\"class\"].split(\" \") || [],\n\t\tisPoppedUp = this.popup && this.isPoppedUp();\n\tif(this.selectedClass) {\n\t\tif(this.set && this.setTo && this.isSelected()) {\n\t\t\t$tw.utils.pushTop(classes,this.selectedClass.split(\" \"));\n\t\t}\n\t\tif(isPoppedUp) {\n\t\t\t$tw.utils.pushTop(classes,this.selectedClass.split(\" \"));\n\t\t}\n\t}\n\tif(isPoppedUp) {\n\t\t$tw.utils.pushTop(classes,\"tc-popup-handle\");\n\t}\n\tdomNode.className = classes.join(\" \");\n\t// Assign other attributes\n\tif(this.style) {\n\t\tdomNode.setAttribute(\"style\",this.style);\n\t}\n\tif(this.tooltip) {\n\t\tdomNode.setAttribute(\"title\",this.tooltip);\n\t}\n\tif(this[\"aria-label\"]) {\n\t\tdomNode.setAttribute(\"aria-label\",this[\"aria-label\"]);\n\t}\n\t// Add a click event handler\n\tdomNode.addEventListener(\"click\",function (event) {\n\t\tvar handled = false;\n\t\tif(self.invokeActions(this,event)) {\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.to) {\n\t\t\tself.navigateTo(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.message) {\n\t\t\tself.dispatchMessage(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.popup) {\n\t\t\tself.triggerPopup(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.set) {\n\t\t\tself.setTiddler();\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.actions) {\n\t\t\tself.invokeActionString(self.actions,self,event);\n\t\t}\n\t\tif(handled) {\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t}\n\t\treturn handled;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nWe don't allow actions to propagate because we trigger actions ourselves\n*/\nButtonWidget.prototype.allowActionPropagation = function() {\n\treturn false;\n};\n\nButtonWidget.prototype.getBoundingClientRect = function() {\n\treturn this.domNodes[0].getBoundingClientRect();\n};\n\nButtonWidget.prototype.isSelected = function() {\n return this.wiki.getTextReference(this.set,this.defaultSetValue,this.getVariable(\"currentTiddler\")) === this.setTo;\n};\n\nButtonWidget.prototype.isPoppedUp = function() {\n\tvar tiddler = this.wiki.getTiddler(this.popup);\n\tvar result = tiddler && tiddler.fields.text ? $tw.popup.readPopupState(tiddler.fields.text) : false;\n\treturn result;\n};\n\nButtonWidget.prototype.navigateTo = function(event) {\n\tvar bounds = this.getBoundingClientRect();\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.to,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: this,\n\t\tnavigateFromClientRect: { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: event.metaKey || event.ctrlKey || (event.button === 1)\n\t});\n};\n\nButtonWidget.prototype.dispatchMessage = function(event) {\n\tthis.dispatchEvent({type: this.message, param: this.param, tiddlerTitle: this.getVariable(\"currentTiddler\")});\n};\n\nButtonWidget.prototype.triggerPopup = function(event) {\n\t$tw.popup.triggerPopup({\n\t\tdomNode: this.domNodes[0],\n\t\ttitle: this.popup,\n\t\twiki: this.wiki\n\t});\n};\n\nButtonWidget.prototype.setTiddler = function() {\n\tthis.wiki.setTextReference(this.set,this.setTo,this.getVariable(\"currentTiddler\"));\n};\n\n/*\nCompute the internal state of the widget\n*/\nButtonWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.actions = this.getAttribute(\"actions\");\n\tthis.to = this.getAttribute(\"to\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.set = this.getAttribute(\"set\");\n\tthis.setTo = this.getAttribute(\"setTo\");\n\tthis.popup = this.getAttribute(\"popup\");\n\tthis.hover = this.getAttribute(\"hover\");\n\tthis[\"class\"] = this.getAttribute(\"class\",\"\");\n\tthis[\"aria-label\"] = this.getAttribute(\"aria-label\");\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis.style = this.getAttribute(\"style\");\n\tthis.selectedClass = this.getAttribute(\"selectedClass\");\n\tthis.defaultSetValue = this.getAttribute(\"default\",\"\");\n\tthis.buttonTag = this.getAttribute(\"tag\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nButtonWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedAttributes.message || changedAttributes.param || changedAttributes.set || changedAttributes.setTo || changedAttributes.popup || changedAttributes.hover || changedAttributes[\"class\"] || changedAttributes.selectedClass || changedAttributes.style || (this.set && changedTiddlers[this.set]) || (this.popup && changedTiddlers[this.popup])) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.button = ButtonWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/button.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/checkbox.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/checkbox.js\ntype: application/javascript\nmodule-type: widget\n\nCheckbox widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CheckboxWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCheckboxWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCheckboxWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create our elements\n\tthis.labelDomNode = this.document.createElement(\"label\");\n\tthis.labelDomNode.setAttribute(\"class\",this.checkboxClass);\n\tthis.inputDomNode = this.document.createElement(\"input\");\n\tthis.inputDomNode.setAttribute(\"type\",\"checkbox\");\n\tif(this.getValue()) {\n\t\tthis.inputDomNode.setAttribute(\"checked\",\"true\");\n\t}\n\tthis.labelDomNode.appendChild(this.inputDomNode);\n\tthis.spanDomNode = this.document.createElement(\"span\");\n\tthis.labelDomNode.appendChild(this.spanDomNode);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(this.inputDomNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(this.labelDomNode,nextSibling);\n\tthis.renderChildren(this.spanDomNode,null);\n\tthis.domNodes.push(this.labelDomNode);\n};\n\nCheckboxWidget.prototype.getValue = function() {\n\tvar tiddler = this.wiki.getTiddler(this.checkboxTitle);\n\tif(tiddler) {\n\t\tif(this.checkboxTag) {\n\t\t\tif(this.checkboxInvertTag) {\n\t\t\t\treturn !tiddler.hasTag(this.checkboxTag);\n\t\t\t} else {\n\t\t\t\treturn tiddler.hasTag(this.checkboxTag);\n\t\t\t}\n\t\t}\n\t\tif(this.checkboxField) {\n\t\t\tvar value = tiddler.fields[this.checkboxField] || this.checkboxDefault || \"\";\n\t\t\tif(value === this.checkboxChecked) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif(value === this.checkboxUnchecked) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t} else {\n\t\tif(this.checkboxTag) {\n\t\t\treturn false;\n\t\t}\n\t\tif(this.checkboxField) {\n\t\t\tif(this.checkboxDefault === this.checkboxChecked) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif(this.checkboxDefault === this.checkboxUnchecked) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\nCheckboxWidget.prototype.handleChangeEvent = function(event) {\n\tvar checked = this.inputDomNode.checked,\n\t\ttiddler = this.wiki.getTiddler(this.checkboxTitle),\n\t\tfallbackFields = {text: \"\"},\n\t\tnewFields = {title: this.checkboxTitle},\n\t\thasChanged = false,\n\t\ttagCheck = false,\n\t\thasTag = tiddler && tiddler.hasTag(this.checkboxTag);\n\tif(this.checkboxTag && this.checkboxInvertTag === \"yes\") {\n\t\ttagCheck = hasTag === checked;\n\t} else {\n\t\ttagCheck = hasTag !== checked;\n\t}\n\t// Set the tag if specified\n\tif(this.checkboxTag && (!tiddler || tagCheck)) {\n\t\tnewFields.tags = tiddler ? (tiddler.fields.tags || []).slice(0) : [];\n\t\tvar pos = newFields.tags.indexOf(this.checkboxTag);\n\t\tif(pos !== -1) {\n\t\t\tnewFields.tags.splice(pos,1);\n\t\t}\n\t\tif(this.checkboxInvertTag === \"yes\" && !checked) {\n\t\t\tnewFields.tags.push(this.checkboxTag);\n\t\t} else if(this.checkboxInvertTag !== \"yes\" && checked) {\n\t\t\tnewFields.tags.push(this.checkboxTag);\n\t\t}\n\t\thasChanged = true;\n\t}\n\t// Set the field if specified\n\tif(this.checkboxField) {\n\t\tvar value = checked ? this.checkboxChecked : this.checkboxUnchecked;\n\t\tif(!tiddler || tiddler.fields[this.checkboxField] !== value) {\n\t\t\tnewFields[this.checkboxField] = value;\n\t\t\thasChanged = true;\n\t\t}\n\t}\n\tif(hasChanged) {\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getCreationFields(),fallbackFields,tiddler,newFields,this.wiki.getModificationFields()));\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nCheckboxWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.checkboxTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.checkboxTag = this.getAttribute(\"tag\");\n\tthis.checkboxField = this.getAttribute(\"field\");\n\tthis.checkboxChecked = this.getAttribute(\"checked\");\n\tthis.checkboxUnchecked = this.getAttribute(\"unchecked\");\n\tthis.checkboxDefault = this.getAttribute(\"default\");\n\tthis.checkboxClass = this.getAttribute(\"class\",\"\");\n\tthis.checkboxInvertTag = this.getAttribute(\"invertTag\",\"\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCheckboxWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.tag || changedAttributes.invertTag || changedAttributes.field || changedAttributes.checked || changedAttributes.unchecked || changedAttributes[\"default\"] || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false;\n\t\tif(changedTiddlers[this.checkboxTitle]) {\n\t\t\tthis.inputDomNode.checked = this.getValue();\n\t\t\trefreshed = true;\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\nexports.checkbox = CheckboxWidget;\n\n})();",
"title": "$:/core/modules/widgets/checkbox.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/codeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/codeblock.js\ntype: application/javascript\nmodule-type: widget\n\nCode block node widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CodeBlockWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCodeBlockWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCodeBlockWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar codeNode = this.document.createElement(\"code\"),\n\t\tdomNode = this.document.createElement(\"pre\");\n\tcodeNode.appendChild(this.document.createTextNode(this.getAttribute(\"code\")));\n\tdomNode.appendChild(codeNode);\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.domNodes.push(domNode);\n\tif(this.postRender) {\n\t\tthis.postRender();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nCodeBlockWidget.prototype.execute = function() {\n\tthis.language = this.getAttribute(\"language\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCodeBlockWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.codeblock = CodeBlockWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/codeblock.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/count.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/count.js\ntype: application/javascript\nmodule-type: widget\n\nCount widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CountWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCountWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCountWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.currentCount);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nCountWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.filter = this.getAttribute(\"filter\");\n\t// Execute the filter\n\tif(this.filter) {\n\t\tthis.currentCount = this.wiki.filterTiddlers(this.filter,this).length;\n\t} else {\n\t\tthis.currentCount = undefined;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCountWidget.prototype.refresh = function(changedTiddlers) {\n\t// Re-execute the filter to get the count\n\tthis.computeAttributes();\n\tvar oldCount = this.currentCount;\n\tthis.execute();\n\tif(this.currentCount !== oldCount) {\n\t\t// Regenerate and rerender the widget and replace the existing DOM node\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n\n};\n\nexports.count = CountWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/count.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/dropzone.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/dropzone.js\ntype: application/javascript\nmodule-type: widget\n\nDropzone widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DropZoneWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDropZoneWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDropZoneWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"div\");\n\tdomNode.className = \"tc-dropzone\";\n\t// Add event handlers\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"dragenter\", handlerObject: this, handlerMethod: \"handleDragEnterEvent\"},\n\t\t{name: \"dragover\", handlerObject: this, handlerMethod: \"handleDragOverEvent\"},\n\t\t{name: \"dragleave\", handlerObject: this, handlerMethod: \"handleDragLeaveEvent\"},\n\t\t{name: \"drop\", handlerObject: this, handlerMethod: \"handleDropEvent\"},\n\t\t{name: \"paste\", handlerObject: this, handlerMethod: \"handlePasteEvent\"}\n\t]);\n\tdomNode.addEventListener(\"click\",function (event) {\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nDropZoneWidget.prototype.enterDrag = function() {\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\t// We count enter/leave events\n\tthis.dragEnterCount = (this.dragEnterCount || 0) + 1;\n\t// If we're entering for the first time we need to apply highlighting\n\tif(this.dragEnterCount === 1) {\n\t\t$tw.utils.addClass(this.domNodes[0],\"tc-dragover\");\n\t}\n};\n\nDropZoneWidget.prototype.leaveDrag = function() {\n\t// Reduce the enter count\n\tthis.dragEnterCount = (this.dragEnterCount || 0) - 1;\n\t// Remove highlighting if we're leaving externally\n\tif(this.dragEnterCount <= 0) {\n\t\t$tw.utils.removeClass(this.domNodes[0],\"tc-dragover\");\n\t}\n};\n\nDropZoneWidget.prototype.handleDragEnterEvent = function(event) {\n\tthis.enterDrag();\n\t// Tell the browser that we're ready to handle the drop\n\tevent.preventDefault();\n\t// Tell the browser not to ripple the drag up to any parent drop handlers\n\tevent.stopPropagation();\n};\n\nDropZoneWidget.prototype.handleDragOverEvent = function(event) {\n\t// Check for being over a TEXTAREA or INPUT\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) !== -1) {\n\t\treturn false;\n\t}\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\t// Tell the browser that we're still interested in the drop\n\tevent.preventDefault();\n\tevent.dataTransfer.dropEffect = \"copy\"; // Explicitly show this is a copy\n};\n\nDropZoneWidget.prototype.handleDragLeaveEvent = function(event) {\n\tthis.leaveDrag();\n};\n\nDropZoneWidget.prototype.handleDropEvent = function(event) {\n\tthis.leaveDrag();\n\t// Check for being over a TEXTAREA or INPUT\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) !== -1) {\n\t\treturn false;\n\t}\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\tvar self = this,\n\t\tdataTransfer = event.dataTransfer;\n\t// Reset the enter count\n\tthis.dragEnterCount = 0;\n\t// Remove highlighting\n\t$tw.utils.removeClass(this.domNodes[0],\"tc-dragover\");\n\t// Import any files in the drop\n\tvar numFiles = this.wiki.readFiles(dataTransfer.files,function(tiddlerFieldsArray) {\n\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t});\n\t// Try to import the various data types we understand\n\tif(numFiles === 0) {\n\t\tthis.importData(dataTransfer);\n\t}\n\t// Tell the browser that we handled the drop\n\tevent.preventDefault();\n\t// Stop the drop ripple up to any parent handlers\n\tevent.stopPropagation();\n};\n\nDropZoneWidget.prototype.importData = function(dataTransfer) {\n\t// Try each provided data type in turn\n\tfor(var t=0; t<this.importDataTypes.length; t++) {\n\t\tif(!$tw.browser.isIE || this.importDataTypes[t].IECompatible) {\n\t\t\t// Get the data\n\t\t\tvar dataType = this.importDataTypes[t];\n\t\t\t\tvar data = dataTransfer.getData(dataType.type);\n\t\t\t// Import the tiddlers in the data\n\t\t\tif(data !== \"\" && data !== null) {\n\t\t\t\tif($tw.log.IMPORT) {\n\t\t\t\t\tconsole.log(\"Importing data type '\" + dataType.type + \"', data: '\" + data + \"'\")\n\t\t\t\t}\n\t\t\t\tvar tiddlerFields = dataType.convertToFields(data);\n\t\t\t\tif(!tiddlerFields.title) {\n\t\t\t\t\ttiddlerFields.title = this.wiki.generateNewTitle(\"Untitled\");\n\t\t\t\t}\n\t\t\t\tthis.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify([tiddlerFields])});\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n};\n\nDropZoneWidget.prototype.importDataTypes = [\n\t{type: \"text/vnd.tiddler\", IECompatible: false, convertToFields: function(data) {\n\t\treturn JSON.parse(data);\n\t}},\n\t{type: \"URL\", IECompatible: true, convertToFields: function(data) {\n\t\t// Check for tiddler data URI\n\t\tvar match = decodeURIComponent(data).match(/^data\\:text\\/vnd\\.tiddler,(.*)/i);\n\t\tif(match) {\n\t\t\treturn JSON.parse(match[1]);\n\t\t} else {\n\t\t\treturn { // As URL string\n\t\t\t\ttext: data\n\t\t\t};\n\t\t}\n\t}},\n\t{type: \"text/x-moz-url\", IECompatible: false, convertToFields: function(data) {\n\t\t// Check for tiddler data URI\n\t\tvar match = decodeURIComponent(data).match(/^data\\:text\\/vnd\\.tiddler,(.*)/i);\n\t\tif(match) {\n\t\t\treturn JSON.parse(match[1]);\n\t\t} else {\n\t\t\treturn { // As URL string\n\t\t\t\ttext: data\n\t\t\t};\n\t\t}\n\t}},\n\t{type: \"text/html\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"text/plain\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"Text\", IECompatible: true, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"text/uri-list\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}}\n];\n\nDropZoneWidget.prototype.handlePasteEvent = function(event) {\n\t// Let the browser handle it if we're in a textarea or input box\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) == -1) {\n\t\tvar self = this,\n\t\t\titems = event.clipboardData.items;\n\t\t// Enumerate the clipboard items\n\t\tfor(var t = 0; t<items.length; t++) {\n\t\t\tvar item = items[t];\n\t\t\tif(item.kind === \"file\") {\n\t\t\t\t// Import any files\n\t\t\t\tthis.wiki.readFile(item.getAsFile(),function(tiddlerFieldsArray) {\n\t\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t\t\t\t});\n\t\t\t} else if(item.kind === \"string\") {\n\t\t\t\t// Create tiddlers from string items\n\t\t\t\tvar type = item.type;\n\t\t\t\titem.getAsString(function(str) {\n\t\t\t\t\tvar tiddlerFields = {\n\t\t\t\t\t\ttitle: self.wiki.generateNewTitle(\"Untitled\"),\n\t\t\t\t\t\ttext: str,\n\t\t\t\t\t\ttype: type\n\t\t\t\t\t};\n\t\t\t\t\tif($tw.log.IMPORT) {\n\t\t\t\t\t\tconsole.log(\"Importing string '\" + str + \"', type: '\" + type + \"'\");\n\t\t\t\t\t}\n\t\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify([tiddlerFields])});\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\t// Tell the browser that we've handled the paste\n\t\tevent.stopPropagation();\n\t\tevent.preventDefault();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nDropZoneWidget.prototype.execute = function() {\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nDropZoneWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.dropzone = DropZoneWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/dropzone.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-binary.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-binary.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-binary widget; placeholder for editing binary tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar BINARY_WARNING_MESSAGE = \"$:/core/ui/BinaryWarning\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditBinaryWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditBinaryWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditBinaryWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditBinaryWidget.prototype.execute = function() {\n\t// Construct the child widgets\n\tthis.makeChildWidgets([{\n\t\ttype: \"transclude\",\n\t\tattributes: {\n\t\t\ttiddler: {type: \"string\", value: BINARY_WARNING_MESSAGE}\n\t\t}\n\t}]);\n};\n\n/*\nRefresh by refreshing our child widget\n*/\nEditBinaryWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports[\"edit-binary\"] = EditBinaryWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-binary.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-bitmap.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-bitmap.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-bitmap widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Default image sizes\nvar DEFAULT_IMAGE_WIDTH = 600,\n\tDEFAULT_IMAGE_HEIGHT = 370;\n\n// Configuration tiddlers\nvar LINE_WIDTH_TITLE = \"$:/config/BitmapEditor/LineWidth\",\n\tLINE_COLOUR_TITLE = \"$:/config/BitmapEditor/Colour\",\n\tLINE_OPACITY_TITLE = \"$:/config/BitmapEditor/Opacity\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditBitmapWidget = function(parseTreeNode,options) {\n\t// Initialise the editor operations if they've not been done already\n\tif(!this.editorOperations) {\n\t\tEditBitmapWidget.prototype.editorOperations = {};\n\t\t$tw.modules.applyMethods(\"bitmapeditoroperation\",this.editorOperations);\n\t}\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditBitmapWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditBitmapWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create the wrapper for the toolbar and render its content\n\tthis.toolbarNode = this.document.createElement(\"div\");\n\tthis.toolbarNode.className = \"tc-editor-toolbar\";\n\tparent.insertBefore(this.toolbarNode,nextSibling);\n\tthis.domNodes.push(this.toolbarNode);\n\t// Create the on-screen canvas\n\tthis.canvasDomNode = $tw.utils.domMaker(\"canvas\",{\n\t\tdocument: this.document,\n\t\t\"class\":\"tc-edit-bitmapeditor\",\n\t\teventListeners: [{\n\t\t\tname: \"touchstart\", handlerObject: this, handlerMethod: \"handleTouchStartEvent\"\n\t\t},{\n\t\t\tname: \"touchmove\", handlerObject: this, handlerMethod: \"handleTouchMoveEvent\"\n\t\t},{\n\t\t\tname: \"touchend\", handlerObject: this, handlerMethod: \"handleTouchEndEvent\"\n\t\t},{\n\t\t\tname: \"mousedown\", handlerObject: this, handlerMethod: \"handleMouseDownEvent\"\n\t\t},{\n\t\t\tname: \"mousemove\", handlerObject: this, handlerMethod: \"handleMouseMoveEvent\"\n\t\t},{\n\t\t\tname: \"mouseup\", handlerObject: this, handlerMethod: \"handleMouseUpEvent\"\n\t\t}]\n\t});\n\t// Set the width and height variables\n\tthis.setVariable(\"tv-bitmap-editor-width\",this.canvasDomNode.width + \"px\");\n\tthis.setVariable(\"tv-bitmap-editor-height\",this.canvasDomNode.height + \"px\");\n\t// Render toolbar child widgets\n\tthis.renderChildren(this.toolbarNode,null);\n\t// // Insert the elements into the DOM\n\tparent.insertBefore(this.canvasDomNode,nextSibling);\n\tthis.domNodes.push(this.canvasDomNode);\n\t// Load the image into the canvas\n\tif($tw.browser) {\n\t\tthis.loadCanvas();\n\t}\n\t// Add widget message listeners\n\tthis.addEventListeners([\n\t\t{type: \"tm-edit-bitmap-operation\", handler: \"handleEditBitmapOperationMessage\"}\n\t]);\n};\n\n/*\nHandle an edit bitmap operation message from the toolbar\n*/\nEditBitmapWidget.prototype.handleEditBitmapOperationMessage = function(event) {\n\t// Invoke the handler\n\tvar handler = this.editorOperations[event.param];\n\tif(handler) {\n\t\thandler.call(this,event);\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditBitmapWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nJust refresh the toolbar\n*/\nEditBitmapWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nSet the bitmap size variables and refresh the toolbar\n*/\nEditBitmapWidget.prototype.refreshToolbar = function() {\n\t// Set the width and height variables\n\tthis.setVariable(\"tv-bitmap-editor-width\",this.canvasDomNode.width + \"px\");\n\tthis.setVariable(\"tv-bitmap-editor-height\",this.canvasDomNode.height + \"px\");\n\t// Refresh each of our child widgets\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\tchildWidget.refreshSelf();\n\t});\n};\n\nEditBitmapWidget.prototype.loadCanvas = function() {\n\tvar tiddler = this.wiki.getTiddler(this.editTitle),\n\t\tcurrImage = new Image();\n\t// Set up event handlers for loading the image\n\tvar self = this;\n\tcurrImage.onload = function() {\n\t\t// Copy the image to the on-screen canvas\n\t\tself.initCanvas(self.canvasDomNode,currImage.width,currImage.height,currImage);\n\t\t// And also copy the current bitmap to the off-screen canvas\n\t\tself.currCanvas = self.document.createElement(\"canvas\");\n\t\tself.initCanvas(self.currCanvas,currImage.width,currImage.height,currImage);\n\t\t// Set the width and height input boxes\n\t\tself.refreshToolbar();\n\t};\n\tcurrImage.onerror = function() {\n\t\t// Set the on-screen canvas size and clear it\n\t\tself.initCanvas(self.canvasDomNode,DEFAULT_IMAGE_WIDTH,DEFAULT_IMAGE_HEIGHT);\n\t\t// Set the off-screen canvas size and clear it\n\t\tself.currCanvas = self.document.createElement(\"canvas\");\n\t\tself.initCanvas(self.currCanvas,DEFAULT_IMAGE_WIDTH,DEFAULT_IMAGE_HEIGHT);\n\t\t// Set the width and height input boxes\n\t\tself.refreshToolbar();\n\t};\n\t// Get the current bitmap into an image object\n\tcurrImage.src = \"data:\" + tiddler.fields.type + \";base64,\" + tiddler.fields.text;\n};\n\nEditBitmapWidget.prototype.initCanvas = function(canvas,width,height,image) {\n\tcanvas.width = width;\n\tcanvas.height = height;\n\tvar ctx = canvas.getContext(\"2d\");\n\tif(image) {\n\t\tctx.drawImage(image,0,0);\n\t} else {\n\t\tctx.fillStyle = \"#fff\";\n\t\tctx.fillRect(0,0,canvas.width,canvas.height);\n\t}\n};\n\n/*\n** Change the size of the canvas, preserving the current image\n*/\nEditBitmapWidget.prototype.changeCanvasSize = function(newWidth,newHeight) {\n\t// Create and size a new canvas\n\tvar newCanvas = this.document.createElement(\"canvas\");\n\tthis.initCanvas(newCanvas,newWidth,newHeight);\n\t// Copy the old image\n\tvar ctx = newCanvas.getContext(\"2d\");\n\tctx.drawImage(this.currCanvas,0,0);\n\t// Set the new canvas as the current one\n\tthis.currCanvas = newCanvas;\n\t// Set the size of the onscreen canvas\n\tthis.canvasDomNode.width = newWidth;\n\tthis.canvasDomNode.height = newHeight;\n\t// Paint the onscreen canvas with the offscreen canvas\n\tctx = this.canvasDomNode.getContext(\"2d\");\n\tctx.drawImage(this.currCanvas,0,0);\n};\n\nEditBitmapWidget.prototype.handleTouchStartEvent = function(event) {\n\tthis.brushDown = true;\n\tthis.strokeStart(event.touches[0].clientX,event.touches[0].clientY);\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleTouchMoveEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.strokeMove(event.touches[0].clientX,event.touches[0].clientY);\n\t}\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleTouchEndEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.brushDown = false;\n\t\tthis.strokeEnd();\n\t}\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleMouseDownEvent = function(event) {\n\tthis.strokeStart(event.clientX,event.clientY);\n\tthis.brushDown = true;\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleMouseMoveEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.strokeMove(event.clientX,event.clientY);\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn false;\n\t}\n\treturn true;\n};\n\nEditBitmapWidget.prototype.handleMouseUpEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.brushDown = false;\n\t\tthis.strokeEnd();\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn false;\n\t}\n\treturn true;\n};\n\nEditBitmapWidget.prototype.adjustCoordinates = function(x,y) {\n\tvar canvasRect = this.canvasDomNode.getBoundingClientRect(),\n\t\tscale = this.canvasDomNode.width/canvasRect.width;\n\treturn {x: (x - canvasRect.left) * scale, y: (y - canvasRect.top) * scale};\n};\n\nEditBitmapWidget.prototype.strokeStart = function(x,y) {\n\t// Start off a new stroke\n\tthis.stroke = [this.adjustCoordinates(x,y)];\n};\n\nEditBitmapWidget.prototype.strokeMove = function(x,y) {\n\tvar ctx = this.canvasDomNode.getContext(\"2d\"),\n\t\tt;\n\t// Add the new position to the end of the stroke\n\tthis.stroke.push(this.adjustCoordinates(x,y));\n\t// Redraw the previous image\n\tctx.drawImage(this.currCanvas,0,0);\n\t// Render the stroke\n\tctx.globalAlpha = parseFloat(this.wiki.getTiddlerText(LINE_OPACITY_TITLE,\"1.0\"));\n\tctx.strokeStyle = this.wiki.getTiddlerText(LINE_COLOUR_TITLE,\"#ff0\");\n\tctx.lineWidth = parseFloat(this.wiki.getTiddlerText(LINE_WIDTH_TITLE,\"3\"));\n\tctx.lineCap = \"round\";\n\tctx.lineJoin = \"round\";\n\tctx.beginPath();\n\tctx.moveTo(this.stroke[0].x,this.stroke[0].y);\n\tfor(t=1; t<this.stroke.length-1; t++) {\n\t\tvar s1 = this.stroke[t],\n\t\t\ts2 = this.stroke[t-1],\n\t\t\ttx = (s1.x + s2.x)/2,\n\t\t\tty = (s1.y + s2.y)/2;\n\t\tctx.quadraticCurveTo(s2.x,s2.y,tx,ty);\n\t}\n\tctx.stroke();\n};\n\nEditBitmapWidget.prototype.strokeEnd = function() {\n\t// Copy the bitmap to the off-screen canvas\n\tvar ctx = this.currCanvas.getContext(\"2d\");\n\tctx.drawImage(this.canvasDomNode,0,0);\n\t// Save the image into the tiddler\n\tthis.saveChanges();\n};\n\nEditBitmapWidget.prototype.saveChanges = function() {\n\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\tif(tiddler) {\n\t\t// data URIs look like \"data:<type>;base64,<text>\"\n\t\tvar dataURL = this.canvasDomNode.toDataURL(tiddler.fields.type),\n\t\t\tposColon = dataURL.indexOf(\":\"),\n\t\t\tposSemiColon = dataURL.indexOf(\";\"),\n\t\t\tposComma = dataURL.indexOf(\",\"),\n\t\t\ttype = dataURL.substring(posColon+1,posSemiColon),\n\t\t\ttext = dataURL.substring(posComma+1);\n\t\tvar update = {type: type, text: text};\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getModificationFields(),tiddler,update,this.wiki.getCreationFields()));\n\t}\n};\n\nexports[\"edit-bitmap\"] = EditBitmapWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-bitmap.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-shortcut.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-shortcut.js\ntype: application/javascript\nmodule-type: widget\n\nWidget to display an editable keyboard shortcut\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditShortcutWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditShortcutWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditShortcutWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.inputNode = this.document.createElement(\"input\");\n\t// Assign classes\n\tif(this.shortcutClass) {\n\t\tthis.inputNode.className = this.shortcutClass;\t\t\n\t}\n\t// Assign other attributes\n\tif(this.shortcutStyle) {\n\t\tthis.inputNode.setAttribute(\"style\",this.shortcutStyle);\n\t}\n\tif(this.shortcutTooltip) {\n\t\tthis.inputNode.setAttribute(\"title\",this.shortcutTooltip);\n\t}\n\tif(this.shortcutPlaceholder) {\n\t\tthis.inputNode.setAttribute(\"placeholder\",this.shortcutPlaceholder);\n\t}\n\tif(this.shortcutAriaLabel) {\n\t\tthis.inputNode.setAttribute(\"aria-label\",this.shortcutAriaLabel);\n\t}\n\t// Assign the current shortcut\n\tthis.updateInputNode();\n\t// Add event handlers\n\t$tw.utils.addEventListeners(this.inputNode,[\n\t\t{name: \"keydown\", handlerObject: this, handlerMethod: \"handleKeydownEvent\"}\n\t]);\n\t// Link into the DOM\n\tparent.insertBefore(this.inputNode,nextSibling);\n\tthis.domNodes.push(this.inputNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditShortcutWidget.prototype.execute = function() {\n\tthis.shortcutTiddler = this.getAttribute(\"tiddler\");\n\tthis.shortcutField = this.getAttribute(\"field\");\n\tthis.shortcutIndex = this.getAttribute(\"index\");\n\tthis.shortcutPlaceholder = this.getAttribute(\"placeholder\");\n\tthis.shortcutDefault = this.getAttribute(\"default\",\"\");\n\tthis.shortcutClass = this.getAttribute(\"class\");\n\tthis.shortcutStyle = this.getAttribute(\"style\");\n\tthis.shortcutTooltip = this.getAttribute(\"tooltip\");\n\tthis.shortcutAriaLabel = this.getAttribute(\"aria-label\");\n};\n\n/*\nUpdate the value of the input node\n*/\nEditShortcutWidget.prototype.updateInputNode = function() {\n\tif(this.shortcutField) {\n\t\tvar tiddler = this.wiki.getTiddler(this.shortcutTiddler);\n\t\tif(tiddler && $tw.utils.hop(tiddler.fields,this.shortcutField)) {\n\t\t\tthis.inputNode.value = tiddler.getFieldString(this.shortcutField);\n\t\t} else {\n\t\t\tthis.inputNode.value = this.shortcutDefault;\n\t\t}\n\t} else if(this.shortcutIndex) {\n\t\tthis.inputNode.value = this.wiki.extractTiddlerDataItem(this.shortcutTiddler,this.shortcutIndex,this.shortcutDefault);\n\t} else {\n\t\tthis.inputNode.value = this.wiki.getTiddlerText(this.shortcutTiddler,this.shortcutDefault);\n\t}\n};\n\n/*\nHandle a dom \"keydown\" event\n*/\nEditShortcutWidget.prototype.handleKeydownEvent = function(event) {\n\t// Ignore shift, ctrl, meta, alt\n\tif(event.keyCode && $tw.keyboardManager.getModifierKeys().indexOf(event.keyCode) === -1) {\n\t\t// Get the shortcut text representation\n\t\tvar value = $tw.keyboardManager.getPrintableShortcuts([{\n\t\t\tctrlKey: event.ctrlKey,\n\t\t\tshiftKey: event.shiftKey,\n\t\t\taltKey: event.altKey,\n\t\t\tmetaKey: event.metaKey,\n\t\t\tkeyCode: event.keyCode\n\t\t}]);\n\t\tif(value.length > 0) {\n\t\t\tthis.wiki.setText(this.shortcutTiddler,this.shortcutField,this.shortcutIndex,value[0]);\n\t\t}\n\t\t// Ignore the keydown if it was already handled\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn true;\t\t\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget needed re-rendering\n*/\nEditShortcutWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes.placeholder || changedAttributes[\"default\"] || changedAttributes[\"class\"] || changedAttributes.style || changedAttributes.tooltip || changedAttributes[\"aria-label\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else if(changedTiddlers[this.shortcutTiddler]) {\n\t\tthis.updateInputNode();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports[\"edit-shortcut\"] = EditShortcutWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-shortcut.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-text.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-text.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-text widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar editTextWidgetFactory = require(\"$:/core/modules/editor/factory.js\").editTextWidgetFactory,\n\tFramedEngine = require(\"$:/core/modules/editor/engines/framed.js\").FramedEngine,\n\tSimpleEngine = require(\"$:/core/modules/editor/engines/simple.js\").SimpleEngine;\n\nexports[\"edit-text\"] = editTextWidgetFactory(FramedEngine,SimpleEngine);\n\n})();\n",
"title": "$:/core/modules/widgets/edit-text.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit.js\ntype: application/javascript\nmodule-type: widget\n\nEdit widget is a meta-widget chooses the appropriate actual editting widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n// Mappings from content type to editor type are stored in tiddlers with this prefix\nvar EDITOR_MAPPING_PREFIX = \"$:/config/EditorTypeMappings/\";\n\n/*\nCompute the internal state of the widget\n*/\nEditWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.editField = this.getAttribute(\"field\",\"text\");\n\tthis.editIndex = this.getAttribute(\"index\");\n\tthis.editClass = this.getAttribute(\"class\");\n\tthis.editPlaceholder = this.getAttribute(\"placeholder\");\n\t// Choose the appropriate edit widget\n\tthis.editorType = this.getEditorType();\n\t// Make the child widgets\n\tthis.makeChildWidgets([{\n\t\ttype: \"edit-\" + this.editorType,\n\t\tattributes: {\n\t\t\ttiddler: {type: \"string\", value: this.editTitle},\n\t\t\tfield: {type: \"string\", value: this.editField},\n\t\t\tindex: {type: \"string\", value: this.editIndex},\n\t\t\t\"class\": {type: \"string\", value: this.editClass},\n\t\t\t\"placeholder\": {type: \"string\", value: this.editPlaceholder}\n\t\t},\n\t\tchildren: this.parseTreeNode.children\n\t}]);\n};\n\nEditWidget.prototype.getEditorType = function() {\n\t// Get the content type of the thing we're editing\n\tvar type;\n\tif(this.editField === \"text\") {\n\t\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\t\tif(tiddler) {\n\t\t\ttype = tiddler.fields.type;\n\t\t}\n\t}\n\ttype = type || \"text/vnd.tiddlywiki\";\n\tvar editorType = this.wiki.getTiddlerText(EDITOR_MAPPING_PREFIX + type);\n\tif(!editorType) {\n\t\tvar typeInfo = $tw.config.contentTypeInfo[type];\n\t\tif(typeInfo && typeInfo.encoding === \"base64\") {\n\t\t\teditorType = \"binary\";\n\t\t} else {\n\t\t\teditorType = \"text\";\n\t\t}\n\t}\n\treturn editorType;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEditWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// Refresh if an attribute has changed, or the type associated with the target tiddler has changed\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || (changedTiddlers[this.editTitle] && this.getEditorType() !== this.editorType)) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.edit = EditWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/element.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/element.js\ntype: application/javascript\nmodule-type: widget\n\nElement widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ElementWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nElementWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nElementWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Neuter blacklisted elements\n\tvar tag = this.parseTreeNode.tag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"safe-\" + tag;\n\t}\n\tvar domNode = this.document.createElementNS(this.namespace,tag);\n\tthis.assignAttributes(domNode,{excludeEventAttributes: true});\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nElementWidget.prototype.execute = function() {\n\t// Select the namespace for the tag\n\tvar tagNamespaces = {\n\t\t\tsvg: \"http://www.w3.org/2000/svg\",\n\t\t\tmath: \"http://www.w3.org/1998/Math/MathML\",\n\t\t\tbody: \"http://www.w3.org/1999/xhtml\"\n\t\t};\n\tthis.namespace = tagNamespaces[this.parseTreeNode.tag];\n\tif(this.namespace) {\n\t\tthis.setVariable(\"namespace\",this.namespace);\n\t} else {\n\t\tthis.namespace = this.getVariable(\"namespace\",{defaultValue: \"http://www.w3.org/1999/xhtml\"});\n\t}\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nElementWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\thasChangedAttributes = $tw.utils.count(changedAttributes) > 0;\n\tif(hasChangedAttributes) {\n\t\t// Update our attributes\n\t\tthis.assignAttributes(this.domNodes[0],{excludeEventAttributes: true});\n\t}\n\treturn this.refreshChildren(changedTiddlers) || hasChangedAttributes;\n};\n\nexports.element = ElementWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/element.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/encrypt.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/encrypt.js\ntype: application/javascript\nmodule-type: widget\n\nEncrypt widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EncryptWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEncryptWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEncryptWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.encryptedText);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEncryptWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.filter = this.getAttribute(\"filter\",\"[!is[system]]\");\n\t// Encrypt the filtered tiddlers\n\tvar tiddlers = this.wiki.filterTiddlers(this.filter),\n\t\tjson = {},\n\t\tself = this;\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.wiki.getTiddler(title),\n\t\t\tjsonTiddler = {};\n\t\tfor(var f in tiddler.fields) {\n\t\t\tjsonTiddler[f] = tiddler.getFieldString(f);\n\t\t}\n\t\tjson[title] = jsonTiddler;\n\t});\n\tthis.encryptedText = $tw.utils.htmlEncode($tw.crypto.encrypt(JSON.stringify(json)));\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEncryptWidget.prototype.refresh = function(changedTiddlers) {\n\t// We don't need to worry about refreshing because the encrypt widget isn't for interactive use\n\treturn false;\n};\n\nexports.encrypt = EncryptWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/encrypt.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/entity.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/entity.js\ntype: application/javascript\nmodule-type: widget\n\nHTML entity widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EntityWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEntityWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEntityWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tvar entityString = this.getAttribute(\"entity\",this.parseTreeNode.entity || \"\"),\n\t\ttextNode = this.document.createTextNode($tw.utils.entityDecode(entityString));\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEntityWidget.prototype.execute = function() {\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEntityWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.entity) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.entity = EntityWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/entity.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/fieldmangler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/fieldmangler.js\ntype: application/javascript\nmodule-type: widget\n\nField mangler widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar FieldManglerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-remove-field\", handler: \"handleRemoveFieldEvent\"},\n\t\t{type: \"tm-add-field\", handler: \"handleAddFieldEvent\"},\n\t\t{type: \"tm-remove-tag\", handler: \"handleRemoveTagEvent\"},\n\t\t{type: \"tm-add-tag\", handler: \"handleAddTagEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nFieldManglerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nFieldManglerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nFieldManglerWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.mangleTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nFieldManglerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nFieldManglerWidget.prototype.handleRemoveFieldEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle),\n\t\tdeletion = {};\n\tdeletion[event.param] = undefined;\n\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,deletion));\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleAddFieldEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle),\n\t\taddition = this.wiki.getModificationFields(),\n\t\thadInvalidFieldName = false,\n\t\taddField = function(name,value) {\n\t\t\tvar trimmedName = name.toLowerCase().trim();\n\t\t\tif(!$tw.utils.isValidFieldName(trimmedName)) {\n\t\t\t\tif(!hadInvalidFieldName) {\n\t\t\t\t\talert($tw.language.getString(\n\t\t\t\t\t\t\"InvalidFieldName\",\n\t\t\t\t\t\t{variables:\n\t\t\t\t\t\t\t{fieldName: trimmedName}\n\t\t\t\t\t\t}\n\t\t\t\t\t));\n\t\t\t\t\thadInvalidFieldName = true;\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif(!value && tiddler) {\n\t\t\t\t\tvalue = tiddler.fields[trimmedName];\n\t\t\t\t}\n\t\t\t\taddition[trimmedName] = value || \"\";\n\t\t\t}\n\t\t\treturn;\n\t\t};\n\taddition.title = this.mangleTitle;\n\tif(typeof event.param === \"string\") {\n\t\taddField(event.param,\"\");\n\t}\n\tif(typeof event.paramObject === \"object\") {\n\t\tfor(var name in event.paramObject) {\n\t\t\taddField(name,event.paramObject[name]);\n\t\t}\n\t}\n\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,addition));\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleRemoveTagEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle);\n\tif(tiddler && tiddler.fields.tags) {\n\t\tvar p = tiddler.fields.tags.indexOf(event.param);\n\t\tif(p !== -1) {\n\t\t\tvar modification = this.wiki.getModificationFields();\n\t\t\tmodification.tags = (tiddler.fields.tags || []).slice(0);\n\t\t\tmodification.tags.splice(p,1);\n\t\t\tif(modification.tags.length === 0) {\n\t\t\t\tmodification.tags = undefined;\n\t\t\t}\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,modification));\n\t\t}\n\t}\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleAddTagEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle);\n\tif(tiddler && typeof event.param === \"string\") {\n\t\tvar tag = event.param.trim();\n\t\tif(tag !== \"\") {\n\t\t\tvar modification = this.wiki.getModificationFields();\n\t\t\tmodification.tags = (tiddler.fields.tags || []).slice(0);\n\t\t\t$tw.utils.pushTop(modification.tags,tag);\n\t\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,modification));\t\t\t\n\t\t}\n\t} else if(typeof event.param === \"string\" && event.param.trim() !== \"\" && this.mangleTitle.trim() !== \"\") {\n\t\tvar tag = [];\n\t\ttag.push(event.param.trim());\n\t\tthis.wiki.addTiddler({title: this.mangleTitle, tags: tag});\t\t\n\t}\n\treturn true;\n};\n\nexports.fieldmangler = FieldManglerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/fieldmangler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/fields.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/fields.js\ntype: application/javascript\nmodule-type: widget\n\nFields widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar FieldsWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nFieldsWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nFieldsWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.text);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nFieldsWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.tiddlerTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.template = this.getAttribute(\"template\");\n\tthis.exclude = this.getAttribute(\"exclude\");\n\tthis.stripTitlePrefix = this.getAttribute(\"stripTitlePrefix\",\"no\") === \"yes\";\n\t// Get the value to display\n\tvar tiddler = this.wiki.getTiddler(this.tiddlerTitle);\n\t// Get the exclusion list\n\tvar exclude;\n\tif(this.exclude) {\n\t\texclude = this.exclude.split(\" \");\n\t} else {\n\t\texclude = [\"text\"]; \n\t}\n\t// Compose the template\n\tvar text = [];\n\tif(this.template && tiddler) {\n\t\tvar fields = [];\n\t\tfor(var fieldName in tiddler.fields) {\n\t\t\tif(exclude.indexOf(fieldName) === -1) {\n\t\t\t\tfields.push(fieldName);\n\t\t\t}\n\t\t}\n\t\tfields.sort();\n\t\tfor(var f=0; f<fields.length; f++) {\n\t\t\tfieldName = fields[f];\n\t\t\tif(exclude.indexOf(fieldName) === -1) {\n\t\t\t\tvar row = this.template,\n\t\t\t\t\tvalue = tiddler.getFieldString(fieldName);\n\t\t\t\tif(this.stripTitlePrefix && fieldName === \"title\") {\n\t\t\t\t\tvar reStrip = /^\\{[^\\}]+\\}(.+)/mg,\n\t\t\t\t\t\treMatch = reStrip.exec(value);\n\t\t\t\t\tif(reMatch) {\n\t\t\t\t\t\tvalue = reMatch[1];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\trow = row.replace(\"$name$\",fieldName);\n\t\t\t\trow = row.replace(\"$value$\",value);\n\t\t\t\trow = row.replace(\"$encoded_value$\",$tw.utils.htmlEncode(value));\n\t\t\t\ttext.push(row);\n\t\t\t}\n\t\t}\n\t}\n\tthis.text = text.join(\"\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nFieldsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.template || changedAttributes.exclude || changedAttributes.stripTitlePrefix || changedTiddlers[this.tiddlerTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.fields = FieldsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/fields.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/image.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/image.js\ntype: application/javascript\nmodule-type: widget\n\nThe image widget displays an image referenced with an external URI or with a local tiddler title.\n\n```\n<$image src=\"TiddlerTitle\" width=\"320\" height=\"400\" class=\"classnames\">\n```\n\nThe image source can be the title of an existing tiddler or the URL of an external image.\n\nExternal images always generate an HTML `<img>` tag.\n\nTiddlers that have a _canonical_uri field generate an HTML `<img>` tag with the src attribute containing the URI.\n\nTiddlers that contain image data generate an HTML `<img>` tag with the src attribute containing a base64 representation of the image.\n\nTiddlers that contain wikitext could be rendered to a DIV of the usual size of a tiddler, and then transformed to the size requested.\n\nThe width and height attributes are interpreted as a number of pixels, and do not need to include the \"px\" suffix.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ImageWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nImageWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nImageWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\t// Determine what type of image it is\n\tvar tag = \"img\", src = \"\",\n\t\ttiddler = this.wiki.getTiddler(this.imageSource);\n\tif(!tiddler) {\n\t\t// The source isn't the title of a tiddler, so we'll assume it's a URL\n\t\tsrc = this.getVariable(\"tv-get-export-image-link\",{params: [{name: \"src\",value: this.imageSource}],defaultValue: this.imageSource});\n\t} else {\n\t\t// Check if it is an image tiddler\n\t\tif(this.wiki.isImageTiddler(this.imageSource)) {\n\t\t\tvar type = tiddler.fields.type,\n\t\t\t\ttext = tiddler.fields.text,\n\t\t\t\t_canonical_uri = tiddler.fields._canonical_uri;\n\t\t\t// If the tiddler has body text then it doesn't need to be lazily loaded\n\t\t\tif(text) {\n\t\t\t\t// Render the appropriate element for the image type\n\t\t\t\tswitch(type) {\n\t\t\t\t\tcase \"application/pdf\":\n\t\t\t\t\t\ttag = \"embed\";\n\t\t\t\t\t\tsrc = \"data:application/pdf;base64,\" + text;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"image/svg+xml\":\n\t\t\t\t\t\tsrc = \"data:image/svg+xml,\" + encodeURIComponent(text);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tsrc = \"data:\" + type + \";base64,\" + text;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t} else if(_canonical_uri) {\n\t\t\t\tswitch(type) {\n\t\t\t\t\tcase \"application/pdf\":\n\t\t\t\t\t\ttag = \"embed\";\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"image/svg+xml\":\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\t\n\t\t\t} else {\n\t\t\t\t// Just trigger loading of the tiddler\n\t\t\t\tthis.wiki.getTiddlerText(this.imageSource);\n\t\t\t}\n\t\t}\n\t}\n\t// Create the element and assign the attributes\n\tvar domNode = this.document.createElement(tag);\n\tdomNode.setAttribute(\"src\",src);\n\tif(this.imageClass) {\n\t\tdomNode.setAttribute(\"class\",this.imageClass);\t\t\n\t}\n\tif(this.imageWidth) {\n\t\tdomNode.setAttribute(\"width\",this.imageWidth);\n\t}\n\tif(this.imageHeight) {\n\t\tdomNode.setAttribute(\"height\",this.imageHeight);\n\t}\n\tif(this.imageTooltip) {\n\t\tdomNode.setAttribute(\"title\",this.imageTooltip);\t\t\n\t}\n\tif(this.imageAlt) {\n\t\tdomNode.setAttribute(\"alt\",this.imageAlt);\t\t\n\t}\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nImageWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.imageSource = this.getAttribute(\"source\");\n\tthis.imageWidth = this.getAttribute(\"width\");\n\tthis.imageHeight = this.getAttribute(\"height\");\n\tthis.imageClass = this.getAttribute(\"class\");\n\tthis.imageTooltip = this.getAttribute(\"tooltip\");\n\tthis.imageAlt = this.getAttribute(\"alt\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nImageWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.source || changedAttributes.width || changedAttributes.height || changedAttributes[\"class\"] || changedAttributes.tooltip || changedTiddlers[this.imageSource]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\t\n\t}\n};\n\nexports.image = ImageWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/image.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/importvariables.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/importvariables.js\ntype: application/javascript\nmodule-type: widget\n\nImport variable definitions from other tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ImportVariablesWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nImportVariablesWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nImportVariablesWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nImportVariablesWidget.prototype.execute = function(tiddlerList) {\n\tvar self = this;\n\t// Get our parameters\n\tthis.filter = this.getAttribute(\"filter\");\n\t// Compute the filter\n\tthis.tiddlerList = tiddlerList || this.wiki.filterTiddlers(this.filter,this);\n\t// Accumulate the <$set> widgets from each tiddler\n\tvar widgetStackStart,widgetStackEnd;\n\tfunction addWidgetNode(widgetNode) {\n\t\tif(widgetNode) {\n\t\t\tif(!widgetStackStart && !widgetStackEnd) {\n\t\t\t\twidgetStackStart = widgetNode;\n\t\t\t\twidgetStackEnd = widgetNode;\n\t\t\t} else {\n\t\t\t\twidgetStackEnd.children = [widgetNode];\n\t\t\t\twidgetStackEnd = widgetNode;\n\t\t\t}\n\t\t}\n\t}\n\t$tw.utils.each(this.tiddlerList,function(title) {\n\t\tvar parser = self.wiki.parseTiddler(title);\n\t\tif(parser) {\n\t\t\tvar parseTreeNode = parser.tree[0];\n\t\t\twhile(parseTreeNode && parseTreeNode.type === \"set\") {\n\t\t\t\taddWidgetNode({\n\t\t\t\t\ttype: \"set\",\n\t\t\t\t\tattributes: parseTreeNode.attributes,\n\t\t\t\t\tparams: parseTreeNode.params\n\t\t\t\t});\n\t\t\t\tparseTreeNode = parseTreeNode.children[0];\n\t\t\t}\n\t\t} \n\t});\n\t// Add our own children to the end of the pile\n\tvar parseTreeNodes;\n\tif(widgetStackStart && widgetStackEnd) {\n\t\tparseTreeNodes = [widgetStackStart];\n\t\twidgetStackEnd.children = this.parseTreeNode.children;\n\t} else {\n\t\tparseTreeNodes = this.parseTreeNode.children;\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nImportVariablesWidget.prototype.refresh = function(changedTiddlers) {\n\t// Recompute our attributes and the filter list\n\tvar changedAttributes = this.computeAttributes(),\n\t\ttiddlerList = this.wiki.filterTiddlers(this.getAttribute(\"filter\"),this);\n\t// Refresh if the filter has changed, or the list of tiddlers has changed, or any of the tiddlers in the list has changed\n\tfunction haveListedTiddlersChanged() {\n\t\tvar changed = false;\n\t\ttiddlerList.forEach(function(title) {\n\t\t\tif(changedTiddlers[title]) {\n\t\t\t\tchanged = true;\n\t\t\t}\n\t\t});\n\t\treturn changed;\n\t}\n\tif(changedAttributes.filter || !$tw.utils.isArrayEqual(this.tiddlerList,tiddlerList) || haveListedTiddlersChanged()) {\n\t\t// Compute the filter\n\t\tthis.removeChildDomNodes();\n\t\tthis.execute(tiddlerList);\n\t\tthis.renderChildren(this.parentDomNode,this.findNextSiblingDomNode());\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.importvariables = ImportVariablesWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/importvariables.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/keyboard.js\ntype: application/javascript\nmodule-type: widget\n\nKeyboard shortcut widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar KeyboardWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nKeyboardWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nKeyboardWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"div\");\n\t// Assign classes\n\tvar classes = (this[\"class\"] || \"\").split(\" \");\n\tclasses.push(\"tc-keyboard\");\n\tdomNode.className = classes.join(\" \");\n\t// Add a keyboard event handler\n\tdomNode.addEventListener(\"keydown\",function (event) {\n\t\tif($tw.keyboardManager.checkKeyDescriptors(event,self.keyInfoArray)) {\n\t\t\tself.invokeActions(self,event);\n\t\t\tif(self.actions) {\n\t\t\t\tself.invokeActionString(self.actions,self,event);\n\t\t\t}\n\t\t\tself.dispatchMessage(event);\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t\treturn true;\n\t\t}\n\t\treturn false;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nKeyboardWidget.prototype.dispatchMessage = function(event) {\n\tthis.dispatchEvent({type: this.message, param: this.param, tiddlerTitle: this.getVariable(\"currentTiddler\")});\n};\n\n/*\nCompute the internal state of the widget\n*/\nKeyboardWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.actions = this.getAttribute(\"actions\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.key = this.getAttribute(\"key\");\n\tthis.keyInfoArray = $tw.keyboardManager.parseKeyDescriptors(this.key);\n\tthis[\"class\"] = this.getAttribute(\"class\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nKeyboardWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.message || changedAttributes.param || changedAttributes.key || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.keyboard = KeyboardWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/keyboard.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/link.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/link.js\ntype: application/javascript\nmodule-type: widget\n\nLink widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\nvar MISSING_LINK_CONFIG_TITLE = \"$:/config/MissingLinks\";\n\nvar LinkWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nLinkWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nLinkWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Get the value of the tv-wikilinks configuration macro\n\tvar wikiLinksMacro = this.getVariable(\"tv-wikilinks\"),\n\t\tuseWikiLinks = wikiLinksMacro ? (wikiLinksMacro.trim() !== \"no\") : true,\n\t\tmissingLinksEnabled = !(this.hideMissingLinks && this.isMissing && !this.isShadow);\n\t// Render the link if required\n\tif(useWikiLinks && missingLinksEnabled) {\n\t\tthis.renderLink(parent,nextSibling);\n\t} else {\n\t\t// Just insert the link text\n\t\tvar domNode = this.document.createElement(\"span\");\n\t\tparent.insertBefore(domNode,nextSibling);\n\t\tthis.renderChildren(domNode,null);\n\t\tthis.domNodes.push(domNode);\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nLinkWidget.prototype.renderLink = function(parent,nextSibling) {\n\tvar self = this;\n\t// Sanitise the specified tag\n\tvar tag = this.linkTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"a\";\n\t}\n\t// Create our element\n\tvar domNode = this.document.createElement(tag);\n\t// Assign classes\n\tvar classes = [];\n\tif(this.linkClasses) {\n\t\tclasses.push(this.linkClasses);\n\t}\n\tclasses.push(\"tc-tiddlylink\");\n\tif(this.isShadow) {\n\t\tclasses.push(\"tc-tiddlylink-shadow\");\n\t}\n\tif(this.isMissing && !this.isShadow) {\n\t\tclasses.push(\"tc-tiddlylink-missing\");\n\t} else {\n\t\tif(!this.isMissing) {\n\t\t\tclasses.push(\"tc-tiddlylink-resolves\");\n\t\t}\n\t}\n\tdomNode.setAttribute(\"class\",classes.join(\" \"));\n\t// Set an href\n\tvar wikiLinkTemplateMacro = this.getVariable(\"tv-wikilink-template\"),\n\t\twikiLinkTemplate = wikiLinkTemplateMacro ? wikiLinkTemplateMacro.trim() : \"#$uri_encoded$\",\n\t\twikiLinkText = wikiLinkTemplate.replace(\"$uri_encoded$\",encodeURIComponent(this.to));\n\twikiLinkText = wikiLinkText.replace(\"$uri_doubleencoded$\",encodeURIComponent(encodeURIComponent(this.to)));\n\twikiLinkText = this.getVariable(\"tv-get-export-link\",{params: [{name: \"to\",value: this.to}],defaultValue: wikiLinkText});\n\tif(tag === \"a\") {\n\t\tdomNode.setAttribute(\"href\",wikiLinkText);\n\t}\n\tif(this.tabIndex) {\n\t\tdomNode.setAttribute(\"tabindex\",this.tabIndex);\n\t}\n\t// Set the tooltip\n\t// HACK: Performance issues with re-parsing the tooltip prevent us defaulting the tooltip to \"<$transclude field='tooltip'><$transclude field='title'/></$transclude>\"\n\tvar tooltipWikiText = this.tooltip || this.getVariable(\"tv-wikilink-tooltip\");\n\tif(tooltipWikiText) {\n\t\tvar tooltipText = this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",tooltipWikiText,{\n\t\t\t\tparseAsInline: true,\n\t\t\t\tvariables: {\n\t\t\t\t\tcurrentTiddler: this.to\n\t\t\t\t},\n\t\t\t\tparentWidget: this\n\t\t\t});\n\t\tdomNode.setAttribute(\"title\",tooltipText);\n\t}\n\tif(this[\"aria-label\"]) {\n\t\tdomNode.setAttribute(\"aria-label\",this[\"aria-label\"]);\n\t}\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"click\", handlerObject: this, handlerMethod: \"handleClickEvent\"},\n\t]);\n\tif(this.draggable === \"yes\") {\n\t\t$tw.utils.addEventListeners(domNode,[\n\t\t\t{name: \"dragstart\", handlerObject: this, handlerMethod: \"handleDragStartEvent\"},\n\t\t\t{name: \"dragend\", handlerObject: this, handlerMethod: \"handleDragEndEvent\"}\n\t\t]);\n\t}\n\t// Insert the link into the DOM and render any children\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nLinkWidget.prototype.handleClickEvent = function(event) {\n\t// Send the click on its way as a navigate event\n\tvar bounds = this.domNodes[0].getBoundingClientRect();\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.to,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: this,\n\t\tnavigateFromClientRect: { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: event.metaKey || event.ctrlKey || (event.button === 1)\n\t});\n\tif(this.domNodes[0].hasAttribute(\"href\")) {\n\t\tevent.preventDefault();\n\t}\n\tevent.stopPropagation();\n\treturn false;\n};\n\nLinkWidget.prototype.handleDragStartEvent = function(event) {\n\tif(event.target === this.domNodes[0]) {\n\t\tif(this.to) {\n\t\t\t$tw.dragInProgress = true;\n\t\t\t// Set the dragging class on the element being dragged\n\t\t\t$tw.utils.addClass(event.target,\"tc-tiddlylink-dragging\");\n\t\t\t// Create the drag image elements\n\t\t\tthis.dragImage = this.document.createElement(\"div\");\n\t\t\tthis.dragImage.className = \"tc-tiddler-dragger\";\n\t\t\tvar inner = this.document.createElement(\"div\");\n\t\t\tinner.className = \"tc-tiddler-dragger-inner\";\n\t\t\tinner.appendChild(this.document.createTextNode(this.to));\n\t\t\tthis.dragImage.appendChild(inner);\n\t\t\tthis.document.body.appendChild(this.dragImage);\n\t\t\t// Astoundingly, we need to cover the dragger up: http://www.kryogenix.org/code/browser/custom-drag-image.html\n\t\t\tvar cover = this.document.createElement(\"div\");\n\t\t\tcover.className = \"tc-tiddler-dragger-cover\";\n\t\t\tcover.style.left = (inner.offsetLeft - 16) + \"px\";\n\t\t\tcover.style.top = (inner.offsetTop - 16) + \"px\";\n\t\t\tcover.style.width = (inner.offsetWidth + 32) + \"px\";\n\t\t\tcover.style.height = (inner.offsetHeight + 32) + \"px\";\n\t\t\tthis.dragImage.appendChild(cover);\n\t\t\t// Set the data transfer properties\n\t\t\tvar dataTransfer = event.dataTransfer;\n\t\t\t// First the image\n\t\t\tdataTransfer.effectAllowed = \"copy\";\n\t\t\tif(dataTransfer.setDragImage) {\n\t\t\t\tdataTransfer.setDragImage(this.dragImage.firstChild,-16,-16);\n\t\t\t}\n\t\t\t// Then the data\n\t\t\tdataTransfer.clearData();\n\t\t\tvar jsonData = this.wiki.getTiddlerAsJson(this.to),\n\t\t\t\ttextData = this.wiki.getTiddlerText(this.to,\"\"),\n\t\t\t\ttitle = (new RegExp(\"^\" + $tw.config.textPrimitives.wikiLink + \"$\",\"mg\")).exec(this.to) ? this.to : \"[[\" + this.to + \"]]\";\n\t\t\t// IE doesn't like these content types\n\t\t\tif(!$tw.browser.isIE) {\n\t\t\t\tdataTransfer.setData(\"text/vnd.tiddler\",jsonData);\n\t\t\t\tdataTransfer.setData(\"text/plain\",title);\n\t\t\t\tdataTransfer.setData(\"text/x-moz-url\",\"data:text/vnd.tiddler,\" + encodeURIComponent(jsonData));\n\t\t\t}\n\t\t\tdataTransfer.setData(\"URL\",\"data:text/vnd.tiddler,\" + encodeURIComponent(jsonData));\n\t\t\tdataTransfer.setData(\"Text\",title);\n\t\t\tevent.stopPropagation();\n\t\t} else {\n\t\t\tevent.preventDefault();\n\t\t}\n\t}\n};\n\nLinkWidget.prototype.handleDragEndEvent = function(event) {\n\tif(event.target === this.domNodes[0]) {\n\t\t$tw.dragInProgress = false;\n\t\t// Remove the dragging class on the element being dragged\n\t\t$tw.utils.removeClass(event.target,\"tc-tiddlylink-dragging\");\n\t\t// Delete the drag image element\n\t\tif(this.dragImage) {\n\t\t\tthis.dragImage.parentNode.removeChild(this.dragImage);\n\t\t}\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nLinkWidget.prototype.execute = function() {\n\t// Pick up our attributes\n\tthis.to = this.getAttribute(\"to\",this.getVariable(\"currentTiddler\"));\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis[\"aria-label\"] = this.getAttribute(\"aria-label\");\n\tthis.linkClasses = this.getAttribute(\"class\");\n\tthis.tabIndex = this.getAttribute(\"tabindex\");\n\tthis.draggable = this.getAttribute(\"draggable\",\"yes\");\n\tthis.linkTag = this.getAttribute(\"tag\",\"a\");\n\t// Determine the link characteristics\n\tthis.isMissing = !this.wiki.tiddlerExists(this.to);\n\tthis.isShadow = this.wiki.isShadowTiddler(this.to);\n\tthis.hideMissingLinks = ($tw.wiki.getTiddlerText(MISSING_LINK_CONFIG_TITLE,\"yes\") === \"no\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nLinkWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedTiddlers[this.to] || changedAttributes[\"aria-label\"] || changedAttributes.tooltip || changedTiddlers[MISSING_LINK_CONFIG_TITLE]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.link = LinkWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/link.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/linkcatcher.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/linkcatcher.js\ntype: application/javascript\nmodule-type: widget\n\nLinkcatcher widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar LinkCatcherWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-navigate\", handler: \"handleNavigateEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nLinkCatcherWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nLinkCatcherWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nLinkCatcherWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.catchTo = this.getAttribute(\"to\");\n\tthis.catchMessage = this.getAttribute(\"message\");\n\tthis.catchSet = this.getAttribute(\"set\");\n\tthis.catchSetTo = this.getAttribute(\"setTo\");\n\tthis.catchActions = this.getAttribute(\"actions\");\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nLinkCatcherWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedAttributes.message || changedAttributes.set || changedAttributes.setTo) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\n/*\nHandle a tm-navigate event\n*/\nLinkCatcherWidget.prototype.handleNavigateEvent = function(event) {\n\tif(this.catchTo) {\n\t\tthis.wiki.setTextReference(this.catchTo,event.navigateTo,this.getVariable(\"currentTiddler\"));\n\t}\n\tif(this.catchMessage && this.parentWidget) {\n\t\tthis.parentWidget.dispatchEvent({\n\t\t\ttype: this.catchMessage,\n\t\t\tparam: event.navigateTo,\n\t\t\tnavigateTo: event.navigateTo\n\t\t});\n\t}\n\tif(this.catchSet) {\n\t\tvar tiddler = this.wiki.getTiddler(this.catchSet);\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,{title: this.catchSet, text: this.catchSetTo}));\n\t}\n\tif(this.catchActions) {\n\t\tthis.invokeActionString(this.catchActions,this);\n\t}\n\treturn false;\n};\n\nexports.linkcatcher = LinkCatcherWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/linkcatcher.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/list.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/list.js\ntype: application/javascript\nmodule-type: widget\n\nList and list item widgets\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\n/*\nThe list widget creates list element sub-widgets that reach back into the list widget for their configuration\n*/\n\nvar ListWidget = function(parseTreeNode,options) {\n\t// Initialise the storyviews if they've not been done already\n\tif(!this.storyViews) {\n\t\tListWidget.prototype.storyViews = {};\n\t\t$tw.modules.applyMethods(\"storyview\",this.storyViews);\n\t}\n\t// Main initialisation inherited from widget.js\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nListWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nListWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n\t// Construct the storyview\n\tvar StoryView = this.storyViews[this.storyViewName];\n\tif(StoryView && !this.document.isTiddlyWikiFakeDom) {\n\t\tthis.storyview = new StoryView(this);\n\t} else {\n\t\tthis.storyview = null;\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nListWidget.prototype.execute = function() {\n\t// Get our attributes\n\tthis.template = this.getAttribute(\"template\");\n\tthis.editTemplate = this.getAttribute(\"editTemplate\");\n\tthis.variableName = this.getAttribute(\"variable\",\"currentTiddler\");\n\tthis.storyViewName = this.getAttribute(\"storyview\");\n\tthis.historyTitle = this.getAttribute(\"history\");\n\t// Compose the list elements\n\tthis.list = this.getTiddlerList();\n\tvar members = [],\n\t\tself = this;\n\t// Check for an empty list\n\tif(this.list.length === 0) {\n\t\tmembers = this.getEmptyMessage();\n\t} else {\n\t\t$tw.utils.each(this.list,function(title,index) {\n\t\t\tmembers.push(self.makeItemTemplate(title));\n\t\t});\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(members);\n\t// Clear the last history\n\tthis.history = [];\n};\n\nListWidget.prototype.getTiddlerList = function() {\n\tvar defaultFilter = \"[!is[system]sort[title]]\";\n\treturn this.wiki.filterTiddlers(this.getAttribute(\"filter\",defaultFilter),this);\n};\n\nListWidget.prototype.getEmptyMessage = function() {\n\tvar emptyMessage = this.getAttribute(\"emptyMessage\",\"\"),\n\t\tparser = this.wiki.parseText(\"text/vnd.tiddlywiki\",emptyMessage,{parseAsInline: true});\n\tif(parser) {\n\t\treturn parser.tree;\n\t} else {\n\t\treturn [];\n\t}\n};\n\n/*\nCompose the template for a list item\n*/\nListWidget.prototype.makeItemTemplate = function(title) {\n\t// Check if the tiddler is a draft\n\tvar tiddler = this.wiki.getTiddler(title),\n\t\tisDraft = tiddler && tiddler.hasField(\"draft.of\"),\n\t\ttemplate = this.template,\n\t\ttemplateTree;\n\tif(isDraft && this.editTemplate) {\n\t\ttemplate = this.editTemplate;\n\t}\n\t// Compose the transclusion of the template\n\tif(template) {\n\t\ttemplateTree = [{type: \"transclude\", attributes: {tiddler: {type: \"string\", value: template}}}];\n\t} else {\n\t\tif(this.parseTreeNode.children && this.parseTreeNode.children.length > 0) {\n\t\t\ttemplateTree = this.parseTreeNode.children;\n\t\t} else {\n\t\t\t// Default template is a link to the title\n\t\t\ttemplateTree = [{type: \"element\", tag: this.parseTreeNode.isBlock ? \"div\" : \"span\", children: [{type: \"link\", attributes: {to: {type: \"string\", value: title}}, children: [\n\t\t\t\t\t{type: \"text\", text: title}\n\t\t\t]}]}];\n\t\t}\n\t}\n\t// Return the list item\n\treturn {type: \"listitem\", itemTitle: title, variableName: this.variableName, children: templateTree};\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nListWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\tresult;\n\t// Call the storyview\n\tif(this.storyview && this.storyview.refreshStart) {\n\t\tthis.storyview.refreshStart(changedTiddlers,changedAttributes);\n\t}\n\t// Completely refresh if any of our attributes have changed\n\tif(changedAttributes.filter || changedAttributes.template || changedAttributes.editTemplate || changedAttributes.emptyMessage || changedAttributes.storyview || changedAttributes.history) {\n\t\tthis.refreshSelf();\n\t\tresult = true;\n\t} else {\n\t\t// Handle any changes to the list\n\t\tresult = this.handleListChanges(changedTiddlers);\n\t\t// Handle any changes to the history stack\n\t\tif(this.historyTitle && changedTiddlers[this.historyTitle]) {\n\t\t\tthis.handleHistoryChanges();\n\t\t}\n\t}\n\t// Call the storyview\n\tif(this.storyview && this.storyview.refreshEnd) {\n\t\tthis.storyview.refreshEnd(changedTiddlers,changedAttributes);\n\t}\n\treturn result;\n};\n\n/*\nHandle any changes to the history list\n*/\nListWidget.prototype.handleHistoryChanges = function() {\n\t// Get the history data\n\tvar newHistory = this.wiki.getTiddlerDataCached(this.historyTitle,[]);\n\t// Ignore any entries of the history that match the previous history\n\tvar entry = 0;\n\twhile(entry < newHistory.length && entry < this.history.length && newHistory[entry].title === this.history[entry].title) {\n\t\tentry++;\n\t}\n\t// Navigate forwards to each of the new tiddlers\n\twhile(entry < newHistory.length) {\n\t\tif(this.storyview && this.storyview.navigateTo) {\n\t\t\tthis.storyview.navigateTo(newHistory[entry]);\n\t\t}\n\t\tentry++;\n\t}\n\t// Update the history\n\tthis.history = newHistory;\n};\n\n/*\nProcess any changes to the list\n*/\nListWidget.prototype.handleListChanges = function(changedTiddlers) {\n\t// Get the new list\n\tvar prevList = this.list;\n\tthis.list = this.getTiddlerList();\n\t// Check for an empty list\n\tif(this.list.length === 0) {\n\t\t// Check if it was empty before\n\t\tif(prevList.length === 0) {\n\t\t\t// If so, just refresh the empty message\n\t\t\treturn this.refreshChildren(changedTiddlers);\n\t\t} else {\n\t\t\t// Replace the previous content with the empty message\n\t\t\tfor(t=this.children.length-1; t>=0; t--) {\n\t\t\t\tthis.removeListItem(t);\n\t\t\t}\n\t\t\tvar nextSibling = this.findNextSiblingDomNode();\n\t\t\tthis.makeChildWidgets(this.getEmptyMessage());\n\t\t\tthis.renderChildren(this.parentDomNode,nextSibling);\n\t\t\treturn true;\n\t\t}\n\t} else {\n\t\t// If the list was empty then we need to remove the empty message\n\t\tif(prevList.length === 0) {\n\t\t\tthis.removeChildDomNodes();\n\t\t\tthis.children = [];\n\t\t}\n\t\t// Cycle through the list, inserting and removing list items as needed\n\t\tvar hasRefreshed = false;\n\t\tfor(var t=0; t<this.list.length; t++) {\n\t\t\tvar index = this.findListItem(t,this.list[t]);\n\t\t\tif(index === undefined) {\n\t\t\t\t// The list item must be inserted\n\t\t\t\tthis.insertListItem(t,this.list[t]);\n\t\t\t\thasRefreshed = true;\n\t\t\t} else {\n\t\t\t\t// There are intervening list items that must be removed\n\t\t\t\tfor(var n=index-1; n>=t; n--) {\n\t\t\t\t\tthis.removeListItem(n);\n\t\t\t\t\thasRefreshed = true;\n\t\t\t\t}\n\t\t\t\t// Refresh the item we're reusing\n\t\t\t\tvar refreshed = this.children[t].refresh(changedTiddlers);\n\t\t\t\thasRefreshed = hasRefreshed || refreshed;\n\t\t\t}\n\t\t}\n\t\t// Remove any left over items\n\t\tfor(t=this.children.length-1; t>=this.list.length; t--) {\n\t\t\tthis.removeListItem(t);\n\t\t\thasRefreshed = true;\n\t\t}\n\t\treturn hasRefreshed;\n\t}\n};\n\n/*\nFind the list item with a given title, starting from a specified position\n*/\nListWidget.prototype.findListItem = function(startIndex,title) {\n\twhile(startIndex < this.children.length) {\n\t\tif(this.children[startIndex].parseTreeNode.itemTitle === title) {\n\t\t\treturn startIndex;\n\t\t}\n\t\tstartIndex++;\n\t}\n\treturn undefined;\n};\n\n/*\nInsert a new list item at the specified index\n*/\nListWidget.prototype.insertListItem = function(index,title) {\n\t// Create, insert and render the new child widgets\n\tvar widget = this.makeChildWidget(this.makeItemTemplate(title));\n\twidget.parentDomNode = this.parentDomNode; // Hack to enable findNextSiblingDomNode() to work\n\tthis.children.splice(index,0,widget);\n\tvar nextSibling = widget.findNextSiblingDomNode();\n\twidget.render(this.parentDomNode,nextSibling);\n\t// Animate the insertion if required\n\tif(this.storyview && this.storyview.insert) {\n\t\tthis.storyview.insert(widget);\n\t}\n\treturn true;\n};\n\n/*\nRemove the specified list item\n*/\nListWidget.prototype.removeListItem = function(index) {\n\tvar widget = this.children[index];\n\t// Animate the removal if required\n\tif(this.storyview && this.storyview.remove) {\n\t\tthis.storyview.remove(widget);\n\t} else {\n\t\twidget.removeChildDomNodes();\n\t}\n\t// Remove the child widget\n\tthis.children.splice(index,1);\n};\n\nexports.list = ListWidget;\n\nvar ListItemWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nListItemWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nListItemWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nListItemWidget.prototype.execute = function() {\n\t// Set the current list item title\n\tthis.setVariable(this.parseTreeNode.variableName,this.parseTreeNode.itemTitle);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nListItemWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.listitem = ListItemWidget;\n\n})();",
"title": "$:/core/modules/widgets/list.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/macrocall.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/macrocall.js\ntype: application/javascript\nmodule-type: widget\n\nMacrocall widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar MacroCallWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nMacroCallWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nMacroCallWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nMacroCallWidget.prototype.execute = function() {\n\t// Get the parse type if specified\n\tthis.parseType = this.getAttribute(\"$type\",\"text/vnd.tiddlywiki\");\n\tthis.renderOutput = this.getAttribute(\"$output\",\"text/html\");\n\t// Merge together the parameters specified in the parse tree with the specified attributes\n\tvar params = this.parseTreeNode.params ? this.parseTreeNode.params.slice(0) : [];\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tparams.push({name: name, value: attribute});\t\t\t\n\t\t}\n\t});\n\t// Get the macro value\n\tvar text = this.getVariable(this.parseTreeNode.name || this.getAttribute(\"$name\"),{params: params}),\n\t\tparseTreeNodes;\n\t// Are we rendering to HTML?\n\tif(this.renderOutput === \"text/html\") {\n\t\t// If so we'll return the parsed macro\n\t\tvar parser = this.wiki.parseText(this.parseType,text,\n\t\t\t\t\t\t\t{parseAsInline: !this.parseTreeNode.isBlock});\n\t\tparseTreeNodes = parser ? parser.tree : [];\n\t} else {\n\t\t// Otherwise, we'll render the text\n\t\tvar plainText = this.wiki.renderText(\"text/plain\",this.parseType,text,{parentWidget: this});\n\t\tparseTreeNodes = [{type: \"text\", text: plainText}];\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nMacroCallWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif($tw.utils.count(changedAttributes) > 0) {\n\t\t// Rerender ourselves\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.macrocall = MacroCallWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/macrocall.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/navigator.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/navigator.js\ntype: application/javascript\nmodule-type: widget\n\nNavigator widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar IMPORT_TITLE = \"$:/Import\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar NavigatorWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-navigate\", handler: \"handleNavigateEvent\"},\n\t\t{type: \"tm-edit-tiddler\", handler: \"handleEditTiddlerEvent\"},\n\t\t{type: \"tm-delete-tiddler\", handler: \"handleDeleteTiddlerEvent\"},\n\t\t{type: \"tm-save-tiddler\", handler: \"handleSaveTiddlerEvent\"},\n\t\t{type: \"tm-cancel-tiddler\", handler: \"handleCancelTiddlerEvent\"},\n\t\t{type: \"tm-close-tiddler\", handler: \"handleCloseTiddlerEvent\"},\n\t\t{type: \"tm-close-all-tiddlers\", handler: \"handleCloseAllTiddlersEvent\"},\n\t\t{type: \"tm-close-other-tiddlers\", handler: \"handleCloseOtherTiddlersEvent\"},\n\t\t{type: \"tm-new-tiddler\", handler: \"handleNewTiddlerEvent\"},\n\t\t{type: \"tm-import-tiddlers\", handler: \"handleImportTiddlersEvent\"},\n\t\t{type: \"tm-perform-import\", handler: \"handlePerformImportEvent\"},\n\t\t{type: \"tm-fold-tiddler\", handler: \"handleFoldTiddlerEvent\"},\n\t\t{type: \"tm-fold-other-tiddlers\", handler: \"handleFoldOtherTiddlersEvent\"},\n\t\t{type: \"tm-fold-all-tiddlers\", handler: \"handleFoldAllTiddlersEvent\"},\n\t\t{type: \"tm-unfold-all-tiddlers\", handler: \"handleUnfoldAllTiddlersEvent\"},\n\t\t{type: \"tm-rename-tiddler\", handler: \"handleRenameTiddlerEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nNavigatorWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nNavigatorWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nNavigatorWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.storyTitle = this.getAttribute(\"story\");\n\tthis.historyTitle = this.getAttribute(\"history\");\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nNavigatorWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.story || changedAttributes.history) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nNavigatorWidget.prototype.getStoryList = function() {\n\treturn this.storyTitle ? this.wiki.getTiddlerList(this.storyTitle) : null;\n};\n\nNavigatorWidget.prototype.saveStoryList = function(storyList) {\n\tvar storyTiddler = this.wiki.getTiddler(this.storyTitle);\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\t{title: this.storyTitle},\n\t\tstoryTiddler,\n\t\t{list: storyList}\n\t));\n};\n\nNavigatorWidget.prototype.removeTitleFromStory = function(storyList,title) {\n\tvar p = storyList.indexOf(title);\n\twhile(p !== -1) {\n\t\tstoryList.splice(p,1);\n\t\tp = storyList.indexOf(title);\n\t}\n};\n\nNavigatorWidget.prototype.replaceFirstTitleInStory = function(storyList,oldTitle,newTitle) {\n\tvar pos = storyList.indexOf(oldTitle);\n\tif(pos !== -1) {\n\t\tstoryList[pos] = newTitle;\n\t\tdo {\n\t\t\tpos = storyList.indexOf(oldTitle,pos + 1);\n\t\t\tif(pos !== -1) {\n\t\t\t\tstoryList.splice(pos,1);\n\t\t\t}\n\t\t} while(pos !== -1);\n\t} else {\n\t\tstoryList.splice(0,0,newTitle);\n\t}\n};\n\nNavigatorWidget.prototype.addToStory = function(title,fromTitle) {\n\tvar storyList = this.getStoryList();\n\t// Quit if we cannot get hold of the story list\n\tif(!storyList) {\n\t\treturn;\n\t}\n\t// See if the tiddler is already there\n\tvar slot = storyList.indexOf(title);\n\t// Quit if it already exists in the story river\n\tif(slot >= 0) {\n\t\treturn;\n\t}\n\t// First we try to find the position of the story element we navigated from\n\tvar fromIndex = storyList.indexOf(fromTitle);\n\tif(fromIndex >= 0) {\n\t\t// The tiddler is added from inside the river\n\t\t// Determine where to insert the tiddler; Fallback is \"below\"\n\t\tswitch(this.getAttribute(\"openLinkFromInsideRiver\",\"below\")) {\n\t\t\tcase \"top\":\n\t\t\t\tslot = 0;\n\t\t\t\tbreak;\n\t\t\tcase \"bottom\":\n\t\t\t\tslot = storyList.length;\n\t\t\t\tbreak;\n\t\t\tcase \"above\":\n\t\t\t\tslot = fromIndex;\n\t\t\t\tbreak;\n\t\t\tcase \"below\": // Intentional fall-through\n\t\t\tdefault:\n\t\t\t\tslot = fromIndex + 1;\n\t\t\t\tbreak;\n\t\t}\n\t} else {\n\t\t// The tiddler is opened from outside the river. Determine where to insert the tiddler; default is \"top\"\n\t\tif(this.getAttribute(\"openLinkFromOutsideRiver\",\"top\") === \"bottom\") {\n\t\t\t// Insert at bottom\n\t\t\tslot = storyList.length;\n\t\t} else {\n\t\t\t// Insert at top\n\t\t\tslot = 0;\n\t\t}\n\t}\n\t// Add the tiddler\n\tstoryList.splice(slot,0,title);\n\t// Save the story\n\tthis.saveStoryList(storyList);\n};\n\n/*\nAdd a new record to the top of the history stack\ntitle: a title string or an array of title strings\nfromPageRect: page coordinates of the origin of the navigation\n*/\nNavigatorWidget.prototype.addToHistory = function(title,fromPageRect) {\n\tthis.wiki.addToHistory(title,fromPageRect,this.historyTitle);\n};\n\n/*\nHandle a tm-navigate event\n*/\nNavigatorWidget.prototype.handleNavigateEvent = function(event) {\n\tif(event.navigateTo) {\n\t\tthis.addToStory(event.navigateTo,event.navigateFromTitle);\n\t\tif(!event.navigateSuppressNavigation) {\n\t\t\tthis.addToHistory(event.navigateTo,event.navigateFromClientRect);\n\t\t}\n\t}\n\treturn false;\n};\n\n// Close a specified tiddler\nNavigatorWidget.prototype.handleCloseTiddlerEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle,\n\t\tstoryList = this.getStoryList();\n\t// Look for tiddlers with this title to close\n\tthis.removeTitleFromStory(storyList,title);\n\tthis.saveStoryList(storyList);\n\treturn false;\n};\n\n// Close all tiddlers\nNavigatorWidget.prototype.handleCloseAllTiddlersEvent = function(event) {\n\tthis.saveStoryList([]);\n\treturn false;\n};\n\n// Close other tiddlers\nNavigatorWidget.prototype.handleCloseOtherTiddlersEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle;\n\tthis.saveStoryList([title]);\n\treturn false;\n};\n\n// Place a tiddler in edit mode\nNavigatorWidget.prototype.handleEditTiddlerEvent = function(event) {\n\tvar self = this;\n\tfunction isUnmodifiedShadow(title) {\n\t\treturn self.wiki.isShadowTiddler(title) && !self.wiki.tiddlerExists(title);\n\t}\n\tfunction confirmEditShadow(title) {\n\t\treturn confirm($tw.language.getString(\n\t\t\t\"ConfirmEditShadowTiddler\",\n\t\t\t{variables:\n\t\t\t\t{title: title}\n\t\t\t}\n\t\t));\n\t}\n\tvar title = event.param || event.tiddlerTitle;\n\tif(isUnmodifiedShadow(title) && !confirmEditShadow(title)) {\n\t\treturn false;\n\t}\n\t// Replace the specified tiddler with a draft in edit mode\n\tvar draftTiddler = this.makeDraftTiddler(title);\n\t// Update the story and history if required\n\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\tvar draftTitle = draftTiddler.fields.title,\n\t\t\tstoryList = this.getStoryList();\n\t\tthis.removeTitleFromStory(storyList,draftTitle);\n\t\tthis.replaceFirstTitleInStory(storyList,title,draftTitle);\n\t\tthis.addToHistory(draftTitle,event.navigateFromClientRect);\n\t\tthis.saveStoryList(storyList);\n\t\treturn false;\n\t}\n};\n\n// Delete a tiddler\nNavigatorWidget.prototype.handleDeleteTiddlerEvent = function(event) {\n\t// Get the tiddler we're deleting\n\tvar title = event.param || event.tiddlerTitle,\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tstoryList = this.getStoryList(),\n\t\toriginalTitle = tiddler ? tiddler.fields[\"draft.of\"] : \"\",\n\t\tconfirmationTitle;\n\tif(!tiddler) {\n\t\treturn false;\n\t}\n\t// Check if the tiddler we're deleting is in draft mode\n\tif(originalTitle) {\n\t\t// If so, we'll prompt for confirmation referencing the original tiddler\n\t\tconfirmationTitle = originalTitle;\n\t} else {\n\t\t// If not a draft, then prompt for confirmation referencing the specified tiddler\n\t\tconfirmationTitle = title;\n\t}\n\t// Seek confirmation\n\tif((this.wiki.getTiddler(originalTitle) || (tiddler.fields.text || \"\") !== \"\") && !confirm($tw.language.getString(\n\t\t\t\t\"ConfirmDeleteTiddler\",\n\t\t\t\t{variables:\n\t\t\t\t\t{title: confirmationTitle}\n\t\t\t\t}\n\t\t\t))) {\n\t\treturn false;\n\t}\n\t// Delete the original tiddler\n\tif(originalTitle) {\n\t\tthis.wiki.deleteTiddler(originalTitle);\n\t\tthis.removeTitleFromStory(storyList,originalTitle);\n\t}\n\t// Delete this tiddler\n\tthis.wiki.deleteTiddler(title);\n\t// Remove the closed tiddler from the story\n\tthis.removeTitleFromStory(storyList,title);\n\tthis.saveStoryList(storyList);\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\treturn false;\n};\n\n/*\nCreate/reuse the draft tiddler for a given title\n*/\nNavigatorWidget.prototype.makeDraftTiddler = function(targetTitle) {\n\t// See if there is already a draft tiddler for this tiddler\n\tvar draftTitle = this.wiki.findDraft(targetTitle);\n\tif(draftTitle) {\n\t\treturn this.wiki.getTiddler(draftTitle);\n\t}\n\t// Get the current value of the tiddler we're editing\n\tvar tiddler = this.wiki.getTiddler(targetTitle);\n\t// Save the initial value of the draft tiddler\n\tdraftTitle = this.generateDraftTitle(targetTitle);\n\tvar draftTiddler = new $tw.Tiddler(\n\t\t\ttiddler,\n\t\t\t{\n\t\t\t\ttitle: draftTitle,\n\t\t\t\t\"draft.title\": targetTitle,\n\t\t\t\t\"draft.of\": targetTitle\n\t\t\t},\n\t\t\tthis.wiki.getModificationFields()\n\t\t);\n\tthis.wiki.addTiddler(draftTiddler);\n\treturn draftTiddler;\n};\n\n/*\nGenerate a title for the draft of a given tiddler\n*/\nNavigatorWidget.prototype.generateDraftTitle = function(title) {\n\tvar c = 0,\n\t\tdraftTitle;\n\tdo {\n\t\tdraftTitle = \"Draft \" + (c ? (c + 1) + \" \" : \"\") + \"of '\" + title + \"'\";\n\t\tc++;\n\t} while(this.wiki.tiddlerExists(draftTitle));\n\treturn draftTitle;\n};\n\n// Take a tiddler out of edit mode, saving the changes\nNavigatorWidget.prototype.handleSaveTiddlerEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle,\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tstoryList = this.getStoryList();\n\t// Replace the original tiddler with the draft\n\tif(tiddler) {\n\t\tvar draftTitle = (tiddler.fields[\"draft.title\"] || \"\").trim(),\n\t\t\tdraftOf = (tiddler.fields[\"draft.of\"] || \"\").trim();\n\t\tif(draftTitle) {\n\t\t\tvar isRename = draftOf !== draftTitle,\n\t\t\t\tisConfirmed = true;\n\t\t\tif(isRename && this.wiki.tiddlerExists(draftTitle)) {\n\t\t\t\tisConfirmed = confirm($tw.language.getString(\n\t\t\t\t\t\"ConfirmOverwriteTiddler\",\n\t\t\t\t\t{variables:\n\t\t\t\t\t\t{title: draftTitle}\n\t\t\t\t\t}\n\t\t\t\t));\n\t\t\t}\n\t\t\tif(isConfirmed) {\n\t\t\t\t// Create the new tiddler and pass it through the th-saving-tiddler hook\n\t\t\t\tvar newTiddler = new $tw.Tiddler(this.wiki.getCreationFields(),tiddler,{\n\t\t\t\t\ttitle: draftTitle,\n\t\t\t\t\t\"draft.title\": undefined,\n\t\t\t\t\t\"draft.of\": undefined\n\t\t\t\t},this.wiki.getModificationFields());\n\t\t\t\tnewTiddler = $tw.hooks.invokeHook(\"th-saving-tiddler\",newTiddler);\n\t\t\t\tthis.wiki.addTiddler(newTiddler);\n\t\t\t\t// Remove the draft tiddler\n\t\t\t\tthis.wiki.deleteTiddler(title);\n\t\t\t\t// Remove the original tiddler if we're renaming it\n\t\t\t\tif(isRename) {\n\t\t\t\t\tthis.wiki.deleteTiddler(draftOf);\n\t\t\t\t}\n\t\t\t\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\t\t\t\t// Replace the draft in the story with the original\n\t\t\t\t\tthis.replaceFirstTitleInStory(storyList,title,draftTitle);\n\t\t\t\t\tthis.addToHistory(draftTitle,event.navigateFromClientRect);\n\t\t\t\t\tif(draftTitle !== this.storyTitle) {\n\t\t\t\t\t\tthis.saveStoryList(storyList);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Trigger an autosave\n\t\t\t\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\n// Take a tiddler out of edit mode without saving the changes\nNavigatorWidget.prototype.handleCancelTiddlerEvent = function(event) {\n\t// Flip the specified tiddler from draft back to the original\n\tvar draftTitle = event.param || event.tiddlerTitle,\n\t\tdraftTiddler = this.wiki.getTiddler(draftTitle),\n\t\toriginalTitle = draftTiddler && draftTiddler.fields[\"draft.of\"];\n\tif(draftTiddler && originalTitle) {\n\t\t// Ask for confirmation if the tiddler text has changed\n\t\tvar isConfirmed = true,\n\t\t\toriginalTiddler = this.wiki.getTiddler(originalTitle),\n\t\t\tstoryList = this.getStoryList();\n\t\tif(this.wiki.isDraftModified(draftTitle)) {\n\t\t\tisConfirmed = confirm($tw.language.getString(\n\t\t\t\t\"ConfirmCancelTiddler\",\n\t\t\t\t{variables:\n\t\t\t\t\t{title: draftTitle}\n\t\t\t\t}\n\t\t\t));\n\t\t}\n\t\t// Remove the draft tiddler\n\t\tif(isConfirmed) {\n\t\t\tthis.wiki.deleteTiddler(draftTitle);\n\t\t\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\t\t\tif(originalTiddler) {\n\t\t\t\t\tthis.replaceFirstTitleInStory(storyList,draftTitle,originalTitle);\n\t\t\t\t\tthis.addToHistory(originalTitle,event.navigateFromClientRect);\n\t\t\t\t} else {\n\t\t\t\t\tthis.removeTitleFromStory(storyList,draftTitle);\n\t\t\t\t}\n\t\t\t\tthis.saveStoryList(storyList);\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\n// Create a new draft tiddler\n// event.param can either be the title of a template tiddler, or a hashmap of fields.\n//\n// The title of the newly created tiddler follows these rules:\n// * If a hashmap was used and a title field was specified, use that title\n// * If a hashmap was used without a title field, use a default title, if necessary making it unique with a numeric suffix\n// * If a template tiddler was used, use the title of the template, if necessary making it unique with a numeric suffix\n//\n// If a draft of the target tiddler already exists then it is reused\nNavigatorWidget.prototype.handleNewTiddlerEvent = function(event) {\n\t// Get the story details\n\tvar storyList = this.getStoryList(),\n\t\ttemplateTiddler, additionalFields, title, draftTitle, existingTiddler;\n\t// Get the template tiddler (if any)\n\tif(typeof event.param === \"string\") {\n\t\t// Get the template tiddler\n\t\ttemplateTiddler = this.wiki.getTiddler(event.param);\n\t\t// Generate a new title\n\t\ttitle = this.wiki.generateNewTitle(event.param || $tw.language.getString(\"DefaultNewTiddlerTitle\"));\n\t}\n\t// Get the specified additional fields\n\tif(typeof event.paramObject === \"object\") {\n\t\tadditionalFields = event.paramObject;\n\t}\n\tif(typeof event.param === \"object\") { // Backwards compatibility with 5.1.3\n\t\tadditionalFields = event.param;\n\t}\n\tif(additionalFields && additionalFields.title) {\n\t\ttitle = additionalFields.title;\n\t}\n\t// Generate a title if we don't have one\n\ttitle = title || this.wiki.generateNewTitle($tw.language.getString(\"DefaultNewTiddlerTitle\"));\n\t// Find any existing draft for this tiddler\n\tdraftTitle = this.wiki.findDraft(title);\n\t// Pull in any existing tiddler\n\tif(draftTitle) {\n\t\texistingTiddler = this.wiki.getTiddler(draftTitle);\n\t} else {\n\t\tdraftTitle = this.generateDraftTitle(title);\n\t\texistingTiddler = this.wiki.getTiddler(title);\n\t}\n\t// Merge the tags\n\tvar mergedTags = [];\n\tif(existingTiddler && existingTiddler.fields.tags) {\n\t\t$tw.utils.pushTop(mergedTags,existingTiddler.fields.tags)\n\t}\n\tif(additionalFields && additionalFields.tags) {\n\t\t// Merge tags\n\t\tmergedTags = $tw.utils.pushTop(mergedTags,$tw.utils.parseStringArray(additionalFields.tags));\n\t}\n\tif(templateTiddler && templateTiddler.fields.tags) {\n\t\t// Merge tags\n\t\tmergedTags = $tw.utils.pushTop(mergedTags,templateTiddler.fields.tags);\n\t}\n\t// Save the draft tiddler\n\tvar draftTiddler = new $tw.Tiddler({\n\t\t\ttext: \"\",\n\t\t\t\"draft.title\": title\n\t\t},\n\t\ttemplateTiddler,\n\t\texistingTiddler,\n\t\tadditionalFields,\n\t\tthis.wiki.getCreationFields(),\n\t\t{\n\t\t\ttitle: draftTitle,\n\t\t\t\"draft.of\": title,\n\t\t\ttags: mergedTags\n\t\t},this.wiki.getModificationFields());\n\tthis.wiki.addTiddler(draftTiddler);\n\t// Update the story to insert the new draft at the top and remove any existing tiddler\n\tif(storyList.indexOf(draftTitle) === -1) {\n\t\tvar slot = storyList.indexOf(event.navigateFromTitle);\n\t\tstoryList.splice(slot + 1,0,draftTitle);\n\t}\n\tif(storyList.indexOf(title) !== -1) {\n\t\tstoryList.splice(storyList.indexOf(title),1);\t\t\n\t}\n\tthis.saveStoryList(storyList);\n\t// Add a new record to the top of the history stack\n\tthis.addToHistory(draftTitle);\n\treturn false;\n};\n\n// Import JSON tiddlers into a pending import tiddler\nNavigatorWidget.prototype.handleImportTiddlersEvent = function(event) {\n\tvar self = this;\n\t// Get the tiddlers\n\tvar tiddlers = [];\n\ttry {\n\t\ttiddlers = JSON.parse(event.param);\t\n\t} catch(e) {\n\t}\n\t// Get the current $:/Import tiddler\n\tvar importTiddler = this.wiki.getTiddler(IMPORT_TITLE),\n\t\timportData = this.wiki.getTiddlerData(IMPORT_TITLE,{}),\n\t\tnewFields = new Object({\n\t\t\ttitle: IMPORT_TITLE,\n\t\t\ttype: \"application/json\",\n\t\t\t\"plugin-type\": \"import\",\n\t\t\t\"status\": \"pending\"\n\t\t}),\n\t\tincomingTiddlers = [];\n\t// Process each tiddler\n\timportData.tiddlers = importData.tiddlers || {};\n\t$tw.utils.each(tiddlers,function(tiddlerFields) {\n\t\tvar title = tiddlerFields.title;\n\t\tif(title) {\n\t\t\tincomingTiddlers.push(title);\n\t\t\timportData.tiddlers[title] = tiddlerFields;\n\t\t}\n\t});\n\t// Give the active upgrader modules a chance to process the incoming tiddlers\n\tvar messages = this.wiki.invokeUpgraders(incomingTiddlers,importData.tiddlers);\n\t$tw.utils.each(messages,function(message,title) {\n\t\tnewFields[\"message-\" + title] = message;\n\t});\n\t// Deselect any suppressed tiddlers\n\t$tw.utils.each(importData.tiddlers,function(tiddler,title) {\n\t\tif($tw.utils.count(tiddler) === 0) {\n\t\t\tnewFields[\"selection-\" + title] = \"unchecked\";\n\t\t}\n\t});\n\t// Save the $:/Import tiddler\n\tnewFields.text = JSON.stringify(importData,null,$tw.config.preferences.jsonSpaces);\n\tthis.wiki.addTiddler(new $tw.Tiddler(importTiddler,newFields));\n\t// Update the story and history details\n\tif(this.getVariable(\"tv-auto-open-on-import\") !== \"no\") {\n\t\tvar storyList = this.getStoryList(),\n\t\t\thistory = [];\n\t\t// Add it to the story\n\t\tif(storyList.indexOf(IMPORT_TITLE) === -1) {\n\t\t\tstoryList.unshift(IMPORT_TITLE);\n\t\t}\n\t\t// And to history\n\t\thistory.push(IMPORT_TITLE);\n\t\t// Save the updated story and history\n\t\tthis.saveStoryList(storyList);\n\t\tthis.addToHistory(history);\t\t\n\t}\n\treturn false;\n};\n\n// \nNavigatorWidget.prototype.handlePerformImportEvent = function(event) {\n\tvar self = this,\n\t\timportTiddler = this.wiki.getTiddler(event.param),\n\t\timportData = this.wiki.getTiddlerDataCached(event.param,{tiddlers: {}}),\n\t\timportReport = [];\n\t// Add the tiddlers to the store\n\timportReport.push($tw.language.getString(\"Import/Imported/Hint\") + \"\\n\");\n\t$tw.utils.each(importData.tiddlers,function(tiddlerFields) {\n\t\tvar title = tiddlerFields.title;\n\t\tif(title && importTiddler && importTiddler.fields[\"selection-\" + title] !== \"unchecked\") {\n\t\t\tself.wiki.addTiddler(new $tw.Tiddler(tiddlerFields));\n\t\t\timportReport.push(\"# [[\" + tiddlerFields.title + \"]]\");\n\t\t}\n\t});\n\t// Replace the $:/Import tiddler with an import report\n\tthis.wiki.addTiddler(new $tw.Tiddler({\n\t\ttitle: event.param,\n\t\ttext: importReport.join(\"\\n\"),\n\t\t\"status\": \"complete\"\n\t}));\n\t// Navigate to the $:/Import tiddler\n\tthis.addToHistory([event.param]);\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n};\n\nNavigatorWidget.prototype.handleFoldTiddlerEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {};\n\tif(paramObject.foldedState) {\n\t\tvar foldedState = this.wiki.getTiddlerText(paramObject.foldedState,\"show\") === \"show\" ? \"hide\" : \"show\";\n\t\tthis.wiki.setText(paramObject.foldedState,\"text\",null,foldedState);\n\t}\n};\n\nNavigatorWidget.prototype.handleFoldOtherTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,event.param === title ? \"show\" : \"hide\");\n\t});\n};\n\nNavigatorWidget.prototype.handleFoldAllTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,\"hide\");\n\t});\n};\n\nNavigatorWidget.prototype.handleUnfoldAllTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,\"show\");\n\t});\n};\n\nNavigatorWidget.prototype.handleRenameTiddlerEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tfrom = paramObject.from || event.tiddlerTitle,\n\t\tto = paramObject.to;\n\t$tw.wiki.renameTiddler(from,to);\n};\n\nexports.navigator = NavigatorWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/navigator.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/password.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/password.js\ntype: application/javascript\nmodule-type: widget\n\nPassword widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar PasswordWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nPasswordWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nPasswordWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Get the current password\n\tvar password = $tw.browser ? $tw.utils.getPassword(this.passwordName) || \"\" : \"\";\n\t// Create our element\n\tvar domNode = this.document.createElement(\"input\");\n\tdomNode.setAttribute(\"type\",\"password\");\n\tdomNode.setAttribute(\"value\",password);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nPasswordWidget.prototype.handleChangeEvent = function(event) {\n\tvar password = this.domNodes[0].value;\n\treturn $tw.utils.savePassword(this.passwordName,password);\n};\n\n/*\nCompute the internal state of the widget\n*/\nPasswordWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.passwordName = this.getAttribute(\"name\",\"\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nPasswordWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.name) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.password = PasswordWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/password.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/radio.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/radio.js\ntype: application/javascript\nmodule-type: widget\n\nRadio widget\n\nWill set a field to the selected value:\n\n```\n\t<$radio field=\"myfield\" value=\"check 1\">one</$radio>\n\t<$radio field=\"myfield\" value=\"check 2\">two</$radio>\n\t<$radio field=\"myfield\" value=\"check 3\">three</$radio>\n```\n\n|Parameter |Description |h\n|tiddler |Name of the tiddler in which the field should be set. Defaults to current tiddler |\n|field |The name of the field to be set |\n|value |The value to set |\n|class |Optional class name(s) |\n\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RadioWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRadioWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRadioWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create our elements\n\tthis.labelDomNode = this.document.createElement(\"label\");\n\tthis.labelDomNode.setAttribute(\"class\",this.radioClass);\n\tthis.inputDomNode = this.document.createElement(\"input\");\n\tthis.inputDomNode.setAttribute(\"type\",\"radio\");\n\tif(this.getValue() == this.radioValue) {\n\t\tthis.inputDomNode.setAttribute(\"checked\",\"true\");\n\t}\n\tthis.labelDomNode.appendChild(this.inputDomNode);\n\tthis.spanDomNode = this.document.createElement(\"span\");\n\tthis.labelDomNode.appendChild(this.spanDomNode);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(this.inputDomNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(this.labelDomNode,nextSibling);\n\tthis.renderChildren(this.spanDomNode,null);\n\tthis.domNodes.push(this.labelDomNode);\n};\n\nRadioWidget.prototype.getValue = function() {\n\tvar tiddler = this.wiki.getTiddler(this.radioTitle);\n\treturn tiddler && tiddler.getFieldString(this.radioField);\n};\n\nRadioWidget.prototype.setValue = function() {\n\tif(this.radioField) {\n\t\tvar tiddler = this.wiki.getTiddler(this.radioTitle),\n\t\t\taddition = {};\n\t\taddition[this.radioField] = this.radioValue;\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getCreationFields(),{title: this.radioTitle},tiddler,addition,this.wiki.getModificationFields()));\n\t}\n};\n\nRadioWidget.prototype.handleChangeEvent = function(event) {\n\tif(this.inputDomNode.checked) {\n\t\tthis.setValue();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nRadioWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.radioTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.radioField = this.getAttribute(\"field\",\"text\");\n\tthis.radioValue = this.getAttribute(\"value\");\n\tthis.radioClass = this.getAttribute(\"class\",\"\");\n\tif(this.radioClass !== \"\") {\n\t\tthis.radioClass += \" \";\n\t}\n\tthis.radioClass += \"tc-radio\";\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRadioWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.value || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false;\n\t\tif(changedTiddlers[this.radioTitle]) {\n\t\t\tthis.inputDomNode.checked = this.getValue() === this.radioValue;\n\t\t\trefreshed = true;\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\nexports.radio = RadioWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/radio.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/raw.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/raw.js\ntype: application/javascript\nmodule-type: widget\n\nRaw widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RawWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRawWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRawWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tvar div = this.document.createElement(\"div\");\n\tdiv.innerHTML=this.parseTreeNode.html;\n\tparent.insertBefore(div,nextSibling);\n\tthis.domNodes.push(div);\t\n};\n\n/*\nCompute the internal state of the widget\n*/\nRawWidget.prototype.execute = function() {\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRawWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.raw = RawWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/raw.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/reveal.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/reveal.js\ntype: application/javascript\nmodule-type: widget\n\nReveal widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RevealWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRevealWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRevealWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar tag = this.parseTreeNode.isBlock ? \"div\" : \"span\";\n\tif(this.revealTag && $tw.config.htmlUnsafeElements.indexOf(this.revealTag) === -1) {\n\t\ttag = this.revealTag;\n\t}\n\tvar domNode = this.document.createElement(tag);\n\tvar classes = this[\"class\"].split(\" \") || [];\n\tclasses.push(\"tc-reveal\");\n\tdomNode.className = classes.join(\" \");\n\tif(this.style) {\n\t\tdomNode.setAttribute(\"style\",this.style);\n\t}\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tif(!domNode.isTiddlyWikiFakeDom && this.type === \"popup\" && this.isOpen) {\n\t\tthis.positionPopup(domNode);\n\t\t$tw.utils.addClass(domNode,\"tc-popup\"); // Make sure that clicks don't dismiss popups within the revealed content\n\t}\n\tif(!this.isOpen) {\n\t\tdomNode.setAttribute(\"hidden\",\"true\");\n\t}\n\tthis.domNodes.push(domNode);\n};\n\nRevealWidget.prototype.positionPopup = function(domNode) {\n\tdomNode.style.position = \"absolute\";\n\tdomNode.style.zIndex = \"1000\";\n\tswitch(this.position) {\n\t\tcase \"left\":\n\t\t\tdomNode.style.left = (this.popup.left - domNode.offsetWidth) + \"px\";\n\t\t\tdomNode.style.top = this.popup.top + \"px\";\n\t\t\tbreak;\n\t\tcase \"above\":\n\t\t\tdomNode.style.left = this.popup.left + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top - domNode.offsetHeight) + \"px\";\n\t\t\tbreak;\n\t\tcase \"aboveright\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width) + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height - domNode.offsetHeight) + \"px\";\n\t\t\tbreak;\n\t\tcase \"right\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width) + \"px\";\n\t\t\tdomNode.style.top = this.popup.top + \"px\";\n\t\t\tbreak;\n\t\tcase \"belowleft\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width - domNode.offsetWidth) + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height) + \"px\";\n\t\t\tbreak;\n\t\tdefault: // Below\n\t\t\tdomNode.style.left = this.popup.left + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height) + \"px\";\n\t\t\tbreak;\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nRevealWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.state = this.getAttribute(\"state\");\n\tthis.revealTag = this.getAttribute(\"tag\");\n\tthis.type = this.getAttribute(\"type\");\n\tthis.text = this.getAttribute(\"text\");\n\tthis.position = this.getAttribute(\"position\");\n\tthis[\"class\"] = this.getAttribute(\"class\",\"\");\n\tthis.style = this.getAttribute(\"style\",\"\");\n\tthis[\"default\"] = this.getAttribute(\"default\",\"\");\n\tthis.animate = this.getAttribute(\"animate\",\"no\");\n\tthis.retain = this.getAttribute(\"retain\",\"no\");\n\tthis.openAnimation = this.animate === \"no\" ? undefined : \"open\";\n\tthis.closeAnimation = this.animate === \"no\" ? undefined : \"close\";\n\t// Compute the title of the state tiddler and read it\n\tthis.stateTitle = this.state;\n\tthis.readState();\n\t// Construct the child widgets\n\tvar childNodes = this.isOpen ? this.parseTreeNode.children : [];\n\tthis.hasChildNodes = this.isOpen;\n\tthis.makeChildWidgets(childNodes);\n};\n\n/*\nRead the state tiddler\n*/\nRevealWidget.prototype.readState = function() {\n\t// Read the information from the state tiddler\n\tvar state = this.stateTitle ? this.wiki.getTextReference(this.stateTitle,this[\"default\"],this.getVariable(\"currentTiddler\")) : this[\"default\"];\n\tswitch(this.type) {\n\t\tcase \"popup\":\n\t\t\tthis.readPopupState(state);\n\t\t\tbreak;\n\t\tcase \"match\":\n\t\t\tthis.readMatchState(state);\n\t\t\tbreak;\n\t\tcase \"nomatch\":\n\t\t\tthis.readMatchState(state);\n\t\t\tthis.isOpen = !this.isOpen;\n\t\t\tbreak;\n\t}\n};\n\nRevealWidget.prototype.readMatchState = function(state) {\n\tthis.isOpen = state === this.text;\n};\n\nRevealWidget.prototype.readPopupState = function(state) {\n\tvar popupLocationRegExp = /^\\((-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+)\\)$/,\n\t\tmatch = popupLocationRegExp.exec(state);\n\t// Check if the state matches the location regexp\n\tif(match) {\n\t\t// If so, we're open\n\t\tthis.isOpen = true;\n\t\t// Get the location\n\t\tthis.popup = {\n\t\t\tleft: parseFloat(match[1]),\n\t\t\ttop: parseFloat(match[2]),\n\t\t\twidth: parseFloat(match[3]),\n\t\t\theight: parseFloat(match[4])\n\t\t};\n\t} else {\n\t\t// If not, we're closed\n\t\tthis.isOpen = false;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRevealWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.state || changedAttributes.type || changedAttributes.text || changedAttributes.position || changedAttributes[\"default\"] || changedAttributes.animate) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false,\n\t\t\tcurrentlyOpen = this.isOpen;\n\t\tthis.readState();\n\t\tif(this.isOpen !== currentlyOpen) {\n\t\t\tif(this.retain === \"yes\") {\n\t\t\t\tthis.updateState();\n\t\t\t} else {\n\t\t\t\tthis.refreshSelf();\n\t\t\t\trefreshed = true;\n\t\t\t}\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\n/*\nCalled by refresh() to dynamically show or hide the content\n*/\nRevealWidget.prototype.updateState = function() {\n\t// Read the current state\n\tthis.readState();\n\t// Construct the child nodes if needed\n\tvar domNode = this.domNodes[0];\n\tif(this.isOpen && !this.hasChildNodes) {\n\t\tthis.hasChildNodes = true;\n\t\tthis.makeChildWidgets(this.parseTreeNode.children);\n\t\tthis.renderChildren(domNode,null);\n\t}\n\t// Animate our DOM node\n\tif(!domNode.isTiddlyWikiFakeDom && this.type === \"popup\" && this.isOpen) {\n\t\tthis.positionPopup(domNode);\n\t\t$tw.utils.addClass(domNode,\"tc-popup\"); // Make sure that clicks don't dismiss popups within the revealed content\n\n\t}\n\tif(this.isOpen) {\n\t\tdomNode.removeAttribute(\"hidden\");\n $tw.anim.perform(this.openAnimation,domNode);\n\t} else {\n\t\t$tw.anim.perform(this.closeAnimation,domNode,{callback: function() {\n\t\t\tdomNode.setAttribute(\"hidden\",\"true\");\n }});\n\t}\n};\n\nexports.reveal = RevealWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/reveal.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/scrollable.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/scrollable.js\ntype: application/javascript\nmodule-type: widget\n\nScrollable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ScrollableWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.scaleFactor = 1;\n\tthis.addEventListeners([\n\t\t{type: \"tm-scroll\", handler: \"handleScrollEvent\"}\n\t]);\n\tif($tw.browser) {\n\t\tthis.requestAnimationFrame = window.requestAnimationFrame ||\n\t\t\twindow.webkitRequestAnimationFrame ||\n\t\t\twindow.mozRequestAnimationFrame ||\n\t\t\tfunction(callback) {\n\t\t\t\treturn window.setTimeout(callback, 1000/60);\n\t\t\t};\n\t\tthis.cancelAnimationFrame = window.cancelAnimationFrame ||\n\t\t\twindow.webkitCancelAnimationFrame ||\n\t\t\twindow.webkitCancelRequestAnimationFrame ||\n\t\t\twindow.mozCancelAnimationFrame ||\n\t\t\twindow.mozCancelRequestAnimationFrame ||\n\t\t\tfunction(id) {\n\t\t\t\twindow.clearTimeout(id);\n\t\t\t};\n\t}\n};\n\n/*\nInherit from the base widget class\n*/\nScrollableWidget.prototype = new Widget();\n\nScrollableWidget.prototype.cancelScroll = function() {\n\tif(this.idRequestFrame) {\n\t\tthis.cancelAnimationFrame.call(window,this.idRequestFrame);\n\t\tthis.idRequestFrame = null;\n\t}\n};\n\n/*\nHandle a scroll event\n*/\nScrollableWidget.prototype.handleScrollEvent = function(event) {\n\t// Pass the scroll event through if our offsetsize is larger than our scrollsize\n\tif(this.outerDomNode.scrollWidth <= this.outerDomNode.offsetWidth && this.outerDomNode.scrollHeight <= this.outerDomNode.offsetHeight && this.fallthrough === \"yes\") {\n\t\treturn true;\n\t}\n\tthis.scrollIntoView(event.target);\n\treturn false; // Handled event\n};\n\n/*\nScroll an element into view\n*/\nScrollableWidget.prototype.scrollIntoView = function(element) {\n\tvar duration = $tw.utils.getAnimationDuration();\n\tthis.cancelScroll();\n\tthis.startTime = Date.now();\n\tvar scrollPosition = {\n\t\tx: this.outerDomNode.scrollLeft,\n\t\ty: this.outerDomNode.scrollTop\n\t};\n\t// Get the client bounds of the element and adjust by the scroll position\n\tvar scrollableBounds = this.outerDomNode.getBoundingClientRect(),\n\t\tclientTargetBounds = element.getBoundingClientRect(),\n\t\tbounds = {\n\t\t\tleft: clientTargetBounds.left + scrollPosition.x - scrollableBounds.left,\n\t\t\ttop: clientTargetBounds.top + scrollPosition.y - scrollableBounds.top,\n\t\t\twidth: clientTargetBounds.width,\n\t\t\theight: clientTargetBounds.height\n\t\t};\n\t// We'll consider the horizontal and vertical scroll directions separately via this function\n\tvar getEndPos = function(targetPos,targetSize,currentPos,currentSize) {\n\t\t\t// If the target is already visible then stay where we are\n\t\t\tif(targetPos >= currentPos && (targetPos + targetSize) <= (currentPos + currentSize)) {\n\t\t\t\treturn currentPos;\n\t\t\t// If the target is above/left of the current view, then scroll to its top/left\n\t\t\t} else if(targetPos <= currentPos) {\n\t\t\t\treturn targetPos;\n\t\t\t// If the target is smaller than the window and the scroll position is too far up, then scroll till the target is at the bottom of the window\n\t\t\t} else if(targetSize < currentSize && currentPos < (targetPos + targetSize - currentSize)) {\n\t\t\t\treturn targetPos + targetSize - currentSize;\n\t\t\t// If the target is big, then just scroll to the top\n\t\t\t} else if(currentPos < targetPos) {\n\t\t\t\treturn targetPos;\n\t\t\t// Otherwise, stay where we are\n\t\t\t} else {\n\t\t\t\treturn currentPos;\n\t\t\t}\n\t\t},\n\t\tendX = getEndPos(bounds.left,bounds.width,scrollPosition.x,this.outerDomNode.offsetWidth),\n\t\tendY = getEndPos(bounds.top,bounds.height,scrollPosition.y,this.outerDomNode.offsetHeight);\n\t// Only scroll if necessary\n\tif(endX !== scrollPosition.x || endY !== scrollPosition.y) {\n\t\tvar self = this,\n\t\t\tdrawFrame;\n\t\tdrawFrame = function () {\n\t\t\tvar t;\n\t\t\tif(duration <= 0) {\n\t\t\t\tt = 1;\n\t\t\t} else {\n\t\t\t\tt = ((Date.now()) - self.startTime) / duration;\t\n\t\t\t}\n\t\t\tif(t >= 1) {\n\t\t\t\tself.cancelScroll();\n\t\t\t\tt = 1;\n\t\t\t}\n\t\t\tt = $tw.utils.slowInSlowOut(t);\n\t\t\tself.outerDomNode.scrollLeft = scrollPosition.x + (endX - scrollPosition.x) * t;\n\t\t\tself.outerDomNode.scrollTop = scrollPosition.y + (endY - scrollPosition.y) * t;\n\t\t\tif(t < 1) {\n\t\t\t\tself.idRequestFrame = self.requestAnimationFrame.call(window,drawFrame);\n\t\t\t}\n\t\t};\n\t\tdrawFrame();\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nScrollableWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create elements\n\tthis.outerDomNode = this.document.createElement(\"div\");\n\t$tw.utils.setStyle(this.outerDomNode,[\n\t\t{overflowY: \"auto\"},\n\t\t{overflowX: \"auto\"},\n\t\t{webkitOverflowScrolling: \"touch\"}\n\t]);\n\tthis.innerDomNode = this.document.createElement(\"div\");\n\tthis.outerDomNode.appendChild(this.innerDomNode);\n\t// Assign classes\n\tthis.outerDomNode.className = this[\"class\"] || \"\";\n\t// Insert element\n\tparent.insertBefore(this.outerDomNode,nextSibling);\n\tthis.renderChildren(this.innerDomNode,null);\n\tthis.domNodes.push(this.outerDomNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nScrollableWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.fallthrough = this.getAttribute(\"fallthrough\",\"yes\");\n\tthis[\"class\"] = this.getAttribute(\"class\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nScrollableWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.scrollable = ScrollableWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/scrollable.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/select.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/select.js\ntype: application/javascript\nmodule-type: widget\n\nSelect widget:\n\n```\n<$select tiddler=\"MyTiddler\" field=\"text\">\n<$list filter=\"[tag[chapter]]\">\n<option value=<<currentTiddler>>>\n<$view field=\"description\"/>\n</option>\n</$list>\n</$select>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SelectWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSelectWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSelectWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n\tthis.setSelectValue();\n\t$tw.utils.addEventListeners(this.getSelectDomNode(),[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n};\n\n/*\nHandle a change event\n*/\nSelectWidget.prototype.handleChangeEvent = function(event) {\n\t// Get the new value and assign it to the tiddler\n\tif(this.selectMultiple == false) {\n\t\tvar value = this.getSelectDomNode().value;\n\t} else {\n\t\tvar value = this.getSelectValues()\n\t\t\t\tvalue = $tw.utils.stringifyList(value);\n\t}\n\tthis.wiki.setText(this.selectTitle,this.selectField,this.selectIndex,value);\n\t// Trigger actions\n\tif(this.selectActions) {\n\t\tthis.invokeActionString(this.selectActions,this,event);\n\t}\n};\n\n/*\nIf necessary, set the value of the select element to the current value\n*/\nSelectWidget.prototype.setSelectValue = function() {\n\tvar value = this.selectDefault;\n\t// Get the value\n\tif(this.selectIndex) {\n\t\tvalue = this.wiki.extractTiddlerDataItem(this.selectTitle,this.selectIndex);\n\t} else {\n\t\tvar tiddler = this.wiki.getTiddler(this.selectTitle);\n\t\tif(tiddler) {\n\t\t\tif(this.selectField === \"text\") {\n\t\t\t\t// Calling getTiddlerText() triggers lazy loading of skinny tiddlers\n\t\t\t\tvalue = this.wiki.getTiddlerText(this.selectTitle);\n\t\t\t} else {\n\t\t\t\tif($tw.utils.hop(tiddler.fields,this.selectField)) {\n\t\t\t\t\tvalue = tiddler.getFieldString(this.selectField);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif(this.selectField === \"title\") {\n\t\t\t\tvalue = this.selectTitle;\n\t\t\t}\n\t\t}\n\t}\n\t// Assign it to the select element if it's different than the current value\n\tif (this.selectMultiple) {\n\t\tvalue = value === undefined ? \"\" : value;\n\t\tvar select = this.getSelectDomNode();\n\t\tvar values = Array.isArray(value) ? value : $tw.utils.parseStringArray(value);\n\t\tfor(var i=0; i < select.children.length; i++){\n\t\t\tif(values.indexOf(select.children[i].value) != -1) {\n\t\t\t\tselect.children[i].selected = true;\n\t\t\t}\n\t\t}\n\t\t\n\t} else {\n\t\tvar domNode = this.getSelectDomNode();\n\t\tif(domNode.value !== value) {\n\t\t\tdomNode.value = value;\n\t\t}\n\t}\n};\n\n/*\nGet the DOM node of the select element\n*/\nSelectWidget.prototype.getSelectDomNode = function() {\n\treturn this.children[0].domNodes[0];\n};\n\n// Return an array of the selected opion values\n// select is an HTML select element\nSelectWidget.prototype.getSelectValues = function() {\n\tvar select, result, options, opt;\n\tselect = this.getSelectDomNode();\n\tresult = [];\n\toptions = select && select.options;\n\tfor (var i=0; i<options.length; i++) {\n\t\topt = options[i];\n\t\tif (opt.selected) {\n\t\t\tresult.push(opt.value || opt.text);\n\t\t}\n\t}\n\treturn result;\n}\n\n/*\nCompute the internal state of the widget\n*/\nSelectWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.selectActions = this.getAttribute(\"actions\");\n\tthis.selectTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.selectField = this.getAttribute(\"field\",\"text\");\n\tthis.selectIndex = this.getAttribute(\"index\");\n\tthis.selectClass = this.getAttribute(\"class\");\n\tthis.selectDefault = this.getAttribute(\"default\");\n\tthis.selectMultiple = this.getAttribute(\"multiple\", false);\n\tthis.selectSize = this.getAttribute(\"size\");\n\t// Make the child widgets\n\tvar selectNode = {\n\t\ttype: \"element\",\n\t\ttag: \"select\",\n\t\tchildren: this.parseTreeNode.children\n\t};\n\tif(this.selectClass) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"class\",this.selectClass);\n\t}\n\tif(this.selectMultiple) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"multiple\",\"multiple\");\n\t}\n\tif(this.selectSize) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"size\",this.selectSize);\n\t}\n\tthis.makeChildWidgets([selectNode]);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nSelectWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// If we're using a different tiddler/field/index then completely refresh ourselves\n\tif(changedAttributes.selectTitle || changedAttributes.selectField || changedAttributes.selectIndex) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t// If the target tiddler value has changed, just update setting and refresh the children\n\t} else {\n\t\tvar childrenRefreshed = this.refreshChildren(changedTiddlers);\n\t\tif(changedTiddlers[this.selectTitle] || childrenRefreshed) {\n\t\t\tthis.setSelectValue();\n\t\t} \n\t\treturn childrenRefreshed;\n\t}\n};\n\nexports.select = SelectWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/select.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/set.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/set.js\ntype: application/javascript\nmodule-type: widget\n\nSet variable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SetWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSetWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSetWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nSetWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.setName = this.getAttribute(\"name\",\"currentTiddler\");\n\tthis.setFilter = this.getAttribute(\"filter\");\n\tthis.setValue = this.getAttribute(\"value\");\n\tthis.setEmptyValue = this.getAttribute(\"emptyValue\");\n\t// Set context variable\n\tthis.setVariable(this.setName,this.getValue(),this.parseTreeNode.params);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nGet the value to be assigned\n*/\nSetWidget.prototype.getValue = function() {\n\tvar value = this.setValue;\n\tif(this.setFilter) {\n\t\tvar results = this.wiki.filterTiddlers(this.setFilter,this);\n\t\tif(!this.setValue) {\n\t\t\tvalue = $tw.utils.stringifyList(results);\n\t\t}\n\t\tif(results.length === 0 && this.setEmptyValue !== undefined) {\n\t\t\tvalue = this.setEmptyValue;\n\t\t}\n\t} else if(!value && this.setEmptyValue) {\n\t\tvalue = this.setEmptyValue;\n\t}\n\treturn value;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nSetWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.name || changedAttributes.filter || changedAttributes.value || changedAttributes.emptyValue ||\n\t (this.setFilter && this.getValue() != this.variables[this.setName].value)) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.setvariable = SetWidget;\nexports.set = SetWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/set.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/text.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/text.js\ntype: application/javascript\nmodule-type: widget\n\nText node widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TextNodeWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTextNodeWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTextNodeWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar text = this.getAttribute(\"text\",this.parseTreeNode.text || \"\");\n\ttext = text.replace(/\\r/mg,\"\");\n\tvar textNode = this.document.createTextNode(text);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTextNodeWidget.prototype.execute = function() {\n\t// Nothing to do for a text node\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTextNodeWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.text) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.text = TextNodeWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/text.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/tiddler.js\ntype: application/javascript\nmodule-type: widget\n\nTiddler widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TiddlerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTiddlerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTiddlerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTiddlerWidget.prototype.execute = function() {\n\tthis.tiddlerState = this.computeTiddlerState();\n\tthis.setVariable(\"currentTiddler\",this.tiddlerState.currentTiddler);\n\tthis.setVariable(\"missingTiddlerClass\",this.tiddlerState.missingTiddlerClass);\n\tthis.setVariable(\"shadowTiddlerClass\",this.tiddlerState.shadowTiddlerClass);\n\tthis.setVariable(\"systemTiddlerClass\",this.tiddlerState.systemTiddlerClass);\n\tthis.setVariable(\"tiddlerTagClasses\",this.tiddlerState.tiddlerTagClasses);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nCompute the tiddler state flags\n*/\nTiddlerWidget.prototype.computeTiddlerState = function() {\n\t// Get our parameters\n\tthis.tiddlerTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Compute the state\n\tvar state = {\n\t\tcurrentTiddler: this.tiddlerTitle || \"\",\n\t\tmissingTiddlerClass: (this.wiki.tiddlerExists(this.tiddlerTitle) || this.wiki.isShadowTiddler(this.tiddlerTitle)) ? \"tc-tiddler-exists\" : \"tc-tiddler-missing\",\n\t\tshadowTiddlerClass: this.wiki.isShadowTiddler(this.tiddlerTitle) ? \"tc-tiddler-shadow\" : \"\",\n\t\tsystemTiddlerClass: this.wiki.isSystemTiddler(this.tiddlerTitle) ? \"tc-tiddler-system\" : \"\",\n\t\ttiddlerTagClasses: this.getTagClasses()\n\t};\n\t// Compute a simple hash to make it easier to detect changes\n\tstate.hash = state.currentTiddler + state.missingTiddlerClass + state.shadowTiddlerClass + state.systemTiddlerClass + state.tiddlerTagClasses;\n\treturn state;\n};\n\n/*\nCreate a string of CSS classes derived from the tags of the current tiddler\n*/\nTiddlerWidget.prototype.getTagClasses = function() {\n\tvar tiddler = this.wiki.getTiddler(this.tiddlerTitle);\n\tif(tiddler) {\n\t\tvar tags = [];\n\t\t$tw.utils.each(tiddler.fields.tags,function(tag) {\n\t\t\ttags.push(\"tc-tagged-\" + encodeURIComponent(tag));\n\t\t});\n\t\treturn tags.join(\" \");\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTiddlerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\tnewTiddlerState = this.computeTiddlerState();\n\tif(changedAttributes.tiddler || newTiddlerState.hash !== this.tiddlerState.hash) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.tiddler = TiddlerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/tiddler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/transclude.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/transclude.js\ntype: application/javascript\nmodule-type: widget\n\nTransclude widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TranscludeWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTranscludeWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTranscludeWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTranscludeWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.transcludeTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.transcludeSubTiddler = this.getAttribute(\"subtiddler\");\n\tthis.transcludeField = this.getAttribute(\"field\");\n\tthis.transcludeIndex = this.getAttribute(\"index\");\n\tthis.transcludeMode = this.getAttribute(\"mode\");\n\t// Parse the text reference\n\tvar parseAsInline = !this.parseTreeNode.isBlock;\n\tif(this.transcludeMode === \"inline\") {\n\t\tparseAsInline = true;\n\t} else if(this.transcludeMode === \"block\") {\n\t\tparseAsInline = false;\n\t}\n\tvar parser = this.wiki.parseTextReference(\n\t\t\t\t\t\tthis.transcludeTitle,\n\t\t\t\t\t\tthis.transcludeField,\n\t\t\t\t\t\tthis.transcludeIndex,\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tparseAsInline: parseAsInline,\n\t\t\t\t\t\t\tsubTiddler: this.transcludeSubTiddler\n\t\t\t\t\t\t}),\n\t\tparseTreeNodes = parser ? parser.tree : this.parseTreeNode.children;\n\t// Set context variables for recursion detection\n\tvar recursionMarker = this.makeRecursionMarker();\n\tthis.setVariable(\"transclusion\",recursionMarker);\n\t// Check for recursion\n\tif(parser) {\n\t\tif(this.parentWidget && this.parentWidget.hasVariable(\"transclusion\",recursionMarker)) {\n\t\t\tparseTreeNodes = [{type: \"element\", tag: \"span\", attributes: {\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-error\"}\n\t\t\t}, children: [\n\t\t\t\t{type: \"text\", text: $tw.language.getString(\"Error/RecursiveTransclusion\")}\n\t\t\t]}];\n\t\t}\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nCompose a string comprising the title, field and/or index to identify this transclusion for recursion detection\n*/\nTranscludeWidget.prototype.makeRecursionMarker = function() {\n\tvar output = [];\n\toutput.push(\"{\");\n\toutput.push(this.getVariable(\"currentTiddler\",{defaultValue: \"\"}));\n\toutput.push(\"|\");\n\toutput.push(this.transcludeTitle || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeField || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeIndex || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeSubTiddler || \"\");\n\toutput.push(\"}\");\n\treturn output.join(\"\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTranscludeWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedTiddlers[this.transcludeTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.transclude = TranscludeWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/transclude.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/vars.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/vars.js\ntype: application/javascript\nmodule-type: widget\n\nThis widget allows multiple variables to be set in one go:\n\n```\n\\define helloworld() Hello world!\n<$vars greeting=\"Hi\" me={{!!title}} sentence=<<helloworld>>>\n <<greeting>>! I am <<me>> and I say: <<sentence>>\n</$vars>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar VarsWidget = function(parseTreeNode,options) {\n\t// Call the constructor\n\tWidget.call(this);\n\t// Initialise\t\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nVarsWidget.prototype = Object.create(Widget.prototype);\n\n/*\nRender this widget into the DOM\n*/\nVarsWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nVarsWidget.prototype.execute = function() {\n\t// Parse variables\n\tvar self = this;\n\t$tw.utils.each(this.attributes,function(val,key) {\n\t\tif(key.charAt(0) !== \"$\") {\n\t\t\tself.setVariable(key,val);\n\t\t}\n\t});\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nVarsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(Object.keys(changedAttributes).length) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports[\"vars\"] = VarsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/vars.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/view.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/view.js\ntype: application/javascript\nmodule-type: widget\n\nView widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ViewWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nViewWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nViewWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tif(this.text) {\n\t\tvar textNode = this.document.createTextNode(this.text);\n\t\tparent.insertBefore(textNode,nextSibling);\n\t\tthis.domNodes.push(textNode);\n\t} else {\n\t\tthis.makeChildWidgets();\n\t\tthis.renderChildren(parent,nextSibling);\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nViewWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.viewTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.viewSubtiddler = this.getAttribute(\"subtiddler\");\n\tthis.viewField = this.getAttribute(\"field\",\"text\");\n\tthis.viewIndex = this.getAttribute(\"index\");\n\tthis.viewFormat = this.getAttribute(\"format\",\"text\");\n\tthis.viewTemplate = this.getAttribute(\"template\",\"\");\n\tswitch(this.viewFormat) {\n\t\tcase \"htmlwikified\":\n\t\t\tthis.text = this.getValueAsHtmlWikified();\n\t\t\tbreak;\n\t\tcase \"plainwikified\":\n\t\t\tthis.text = this.getValueAsPlainWikified();\n\t\t\tbreak;\n\t\tcase \"htmlencodedplainwikified\":\n\t\t\tthis.text = this.getValueAsHtmlEncodedPlainWikified();\n\t\t\tbreak;\n\t\tcase \"htmlencoded\":\n\t\t\tthis.text = this.getValueAsHtmlEncoded();\n\t\t\tbreak;\n\t\tcase \"urlencoded\":\n\t\t\tthis.text = this.getValueAsUrlEncoded();\n\t\t\tbreak;\n\t\tcase \"doubleurlencoded\":\n\t\t\tthis.text = this.getValueAsDoubleUrlEncoded();\n\t\t\tbreak;\n\t\tcase \"date\":\n\t\t\tthis.text = this.getValueAsDate(this.viewTemplate);\n\t\t\tbreak;\n\t\tcase \"relativedate\":\n\t\t\tthis.text = this.getValueAsRelativeDate();\n\t\t\tbreak;\n\t\tcase \"stripcomments\":\n\t\t\tthis.text = this.getValueAsStrippedComments();\n\t\t\tbreak;\n\t\tcase \"jsencoded\":\n\t\t\tthis.text = this.getValueAsJsEncoded();\n\t\t\tbreak;\n\t\tdefault: // \"text\"\n\t\t\tthis.text = this.getValueAsText();\n\t\t\tbreak;\n\t}\n};\n\n/*\nThe various formatter functions are baked into this widget for the moment. Eventually they will be replaced by macro functions\n*/\n\n/*\nRetrieve the value of the widget. Options are:\nasString: Optionally return the value as a string\n*/\nViewWidget.prototype.getValue = function(options) {\n\toptions = options || {};\n\tvar value = options.asString ? \"\" : undefined;\n\tif(this.viewIndex) {\n\t\tvalue = this.wiki.extractTiddlerDataItem(this.viewTitle,this.viewIndex);\n\t} else {\n\t\tvar tiddler;\n\t\tif(this.viewSubtiddler) {\n\t\t\ttiddler = this.wiki.getSubTiddler(this.viewTitle,this.viewSubtiddler);\t\n\t\t} else {\n\t\t\ttiddler = this.wiki.getTiddler(this.viewTitle);\n\t\t}\n\t\tif(tiddler) {\n\t\t\tif(this.viewField === \"text\" && !this.viewSubtiddler) {\n\t\t\t\t// Calling getTiddlerText() triggers lazy loading of skinny tiddlers\n\t\t\t\tvalue = this.wiki.getTiddlerText(this.viewTitle);\n\t\t\t} else {\n\t\t\t\tif($tw.utils.hop(tiddler.fields,this.viewField)) {\n\t\t\t\t\tif(options.asString) {\n\t\t\t\t\t\tvalue = tiddler.getFieldString(this.viewField);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tvalue = tiddler.fields[this.viewField];\t\t\t\t\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif(this.viewField === \"title\") {\n\t\t\t\tvalue = this.viewTitle;\n\t\t\t}\n\t\t}\n\t}\n\treturn value;\n};\n\nViewWidget.prototype.getValueAsText = function() {\n\treturn this.getValue({asString: true});\n};\n\nViewWidget.prototype.getValueAsHtmlWikified = function() {\n\treturn this.wiki.renderText(\"text/html\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this});\n};\n\nViewWidget.prototype.getValueAsPlainWikified = function() {\n\treturn this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this});\n};\n\nViewWidget.prototype.getValueAsHtmlEncodedPlainWikified = function() {\n\treturn $tw.utils.htmlEncode(this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this}));\n};\n\nViewWidget.prototype.getValueAsHtmlEncoded = function() {\n\treturn $tw.utils.htmlEncode(this.getValueAsText());\n};\n\nViewWidget.prototype.getValueAsUrlEncoded = function() {\n\treturn encodeURIComponent(this.getValueAsText());\n};\n\nViewWidget.prototype.getValueAsDoubleUrlEncoded = function() {\n\treturn encodeURIComponent(encodeURIComponent(this.getValueAsText()));\n};\n\nViewWidget.prototype.getValueAsDate = function(format) {\n\tformat = format || \"YYYY MM DD 0hh:0mm\";\n\tvar value = $tw.utils.parseDate(this.getValue());\n\tif(value && $tw.utils.isDate(value) && value.toString() !== \"Invalid Date\") {\n\t\treturn $tw.utils.formatDateString(value,format);\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\nViewWidget.prototype.getValueAsRelativeDate = function(format) {\n\tvar value = $tw.utils.parseDate(this.getValue());\n\tif(value && $tw.utils.isDate(value) && value.toString() !== \"Invalid Date\") {\n\t\treturn $tw.utils.getRelativeDate((new Date()) - (new Date(value))).description;\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\nViewWidget.prototype.getValueAsStrippedComments = function() {\n\tvar lines = this.getValueAsText().split(\"\\n\"),\n\t\tout = [];\n\tfor(var line=0; line<lines.length; line++) {\n\t\tvar text = lines[line];\n\t\tif(!/^\\s*\\/\\/#/.test(text)) {\n\t\t\tout.push(text);\n\t\t}\n\t}\n\treturn out.join(\"\\n\");\n};\n\nViewWidget.prototype.getValueAsJsEncoded = function() {\n\treturn $tw.utils.stringify(this.getValueAsText());\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nViewWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes.template || changedAttributes.format || changedTiddlers[this.viewTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.view = ViewWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/view.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/widget.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/widget.js\ntype: application/javascript\nmodule-type: widget\n\nWidget base class\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreate a widget object for a parse tree node\n\tparseTreeNode: reference to the parse tree node to be rendered\n\toptions: see below\nOptions include:\n\twiki: mandatory reference to wiki associated with this render tree\n\tparentWidget: optional reference to a parent renderer node for the context chain\n\tdocument: optional document object to use instead of global document\n*/\nvar Widget = function(parseTreeNode,options) {\n\tif(arguments.length > 0) {\n\t\tthis.initialise(parseTreeNode,options);\n\t}\n};\n\n/*\nInitialise widget properties. These steps are pulled out of the constructor so that we can reuse them in subclasses\n*/\nWidget.prototype.initialise = function(parseTreeNode,options) {\n\toptions = options || {};\n\t// Save widget info\n\tthis.parseTreeNode = parseTreeNode;\n\tthis.wiki = options.wiki;\n\tthis.parentWidget = options.parentWidget;\n\tthis.variablesConstructor = function() {};\n\tthis.variablesConstructor.prototype = this.parentWidget ? this.parentWidget.variables : {};\n\tthis.variables = new this.variablesConstructor();\n\tthis.document = options.document;\n\tthis.attributes = {};\n\tthis.children = [];\n\tthis.domNodes = [];\n\tthis.eventListeners = {};\n\t// Hashmap of the widget classes\n\tif(!this.widgetClasses) {\n\t\tWidget.prototype.widgetClasses = $tw.modules.applyMethods(\"widget\");\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nWidget.prototype.execute = function() {\n\tthis.makeChildWidgets();\n};\n\n/*\nSet the value of a context variable\nname: name of the variable\nvalue: value of the variable\nparams: array of {name:, default:} for each parameter\n*/\nWidget.prototype.setVariable = function(name,value,params) {\n\tthis.variables[name] = {value: value, params: params};\n};\n\n/*\nGet the prevailing value of a context variable\nname: name of variable\noptions: see below\nOptions include\nparams: array of {name:, value:} for each parameter\ndefaultValue: default value if the variable is not defined\n*/\nWidget.prototype.getVariable = function(name,options) {\n\toptions = options || {};\n\tvar actualParams = options.params || [],\n\t\tparentWidget = this.parentWidget;\n\t// Check for the variable defined in the parent widget (or an ancestor in the prototype chain)\n\tif(parentWidget && name in parentWidget.variables) {\n\t\tvar variable = parentWidget.variables[name],\n\t\t\tvalue = variable.value;\n\t\t// Substitute any parameters specified in the definition\n\t\tvalue = this.substituteVariableParameters(value,variable.params,actualParams);\n\t\tvalue = this.substituteVariableReferences(value);\n\t\treturn value;\n\t}\n\t// If the variable doesn't exist in the parent widget then look for a macro module\n\treturn this.evaluateMacroModule(name,actualParams,options.defaultValue);\n};\n\nWidget.prototype.substituteVariableParameters = function(text,formalParams,actualParams) {\n\tif(formalParams) {\n\t\tvar nextAnonParameter = 0, // Next candidate anonymous parameter in macro call\n\t\t\tparamInfo, paramValue;\n\t\t// Step through each of the parameters in the macro definition\n\t\tfor(var p=0; p<formalParams.length; p++) {\n\t\t\t// Check if we've got a macro call parameter with the same name\n\t\t\tparamInfo = formalParams[p];\n\t\t\tparamValue = undefined;\n\t\t\tfor(var m=0; m<actualParams.length; m++) {\n\t\t\t\tif(actualParams[m].name === paramInfo.name) {\n\t\t\t\t\tparamValue = actualParams[m].value;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// If not, use the next available anonymous macro call parameter\n\t\t\twhile(nextAnonParameter < actualParams.length && actualParams[nextAnonParameter].name) {\n\t\t\t\tnextAnonParameter++;\n\t\t\t}\n\t\t\tif(paramValue === undefined && nextAnonParameter < actualParams.length) {\n\t\t\t\tparamValue = actualParams[nextAnonParameter++].value;\n\t\t\t}\n\t\t\t// If we've still not got a value, use the default, if any\n\t\t\tparamValue = paramValue || paramInfo[\"default\"] || \"\";\n\t\t\t// Replace any instances of this parameter\n\t\t\ttext = text.replace(new RegExp(\"\\\\$\" + $tw.utils.escapeRegExp(paramInfo.name) + \"\\\\$\",\"mg\"),paramValue);\n\t\t}\n\t}\n\treturn text;\n};\n\nWidget.prototype.substituteVariableReferences = function(text) {\n\tvar self = this;\n\treturn (text || \"\").replace(/\\$\\(([^\\)\\$]+)\\)\\$/g,function(match,p1,offset,string) {\n\t\treturn self.getVariable(p1,{defaultValue: \"\"});\n\t});\n};\n\nWidget.prototype.evaluateMacroModule = function(name,actualParams,defaultValue) {\n\tif($tw.utils.hop($tw.macros,name)) {\n\t\tvar macro = $tw.macros[name],\n\t\t\targs = [];\n\t\tif(macro.params.length > 0) {\n\t\t\tvar nextAnonParameter = 0, // Next candidate anonymous parameter in macro call\n\t\t\t\tparamInfo, paramValue;\n\t\t\t// Step through each of the parameters in the macro definition\n\t\t\tfor(var p=0; p<macro.params.length; p++) {\n\t\t\t\t// Check if we've got a macro call parameter with the same name\n\t\t\t\tparamInfo = macro.params[p];\n\t\t\t\tparamValue = undefined;\n\t\t\t\tfor(var m=0; m<actualParams.length; m++) {\n\t\t\t\t\tif(actualParams[m].name === paramInfo.name) {\n\t\t\t\t\t\tparamValue = actualParams[m].value;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// If not, use the next available anonymous macro call parameter\n\t\t\t\twhile(nextAnonParameter < actualParams.length && actualParams[nextAnonParameter].name) {\n\t\t\t\t\tnextAnonParameter++;\n\t\t\t\t}\n\t\t\t\tif(paramValue === undefined && nextAnonParameter < actualParams.length) {\n\t\t\t\t\tparamValue = actualParams[nextAnonParameter++].value;\n\t\t\t\t}\n\t\t\t\t// If we've still not got a value, use the default, if any\n\t\t\t\tparamValue = paramValue || paramInfo[\"default\"] || \"\";\n\t\t\t\t// Save the parameter\n\t\t\t\targs.push(paramValue);\n\t\t\t}\n\t\t}\n\t\telse for(var i=0; i<actualParams.length; ++i) {\n\t\t\targs.push(actualParams[i].value);\n\t\t}\n\t\treturn (macro.run.apply(this,args) || \"\").toString();\n\t} else {\n\t\treturn defaultValue;\n\t}\n};\n\n/*\nCheck whether a given context variable value exists in the parent chain\n*/\nWidget.prototype.hasVariable = function(name,value) {\n\tvar node = this;\n\twhile(node) {\n\t\tif($tw.utils.hop(node.variables,name) && node.variables[name].value === value) {\n\t\t\treturn true;\n\t\t}\n\t\tnode = node.parentWidget;\n\t}\n\treturn false;\n};\n\n/*\nConstruct a qualifying string based on a hash of concatenating the values of a given variable in the parent chain\n*/\nWidget.prototype.getStateQualifier = function(name) {\n\tthis.qualifiers = this.qualifiers || Object.create(null);\n\tname = name || \"transclusion\";\n\tif(this.qualifiers[name]) {\n\t\treturn this.qualifiers[name];\n\t} else {\n\t\tvar output = [],\n\t\t\tnode = this;\n\t\twhile(node && node.parentWidget) {\n\t\t\tif($tw.utils.hop(node.parentWidget.variables,name)) {\n\t\t\t\toutput.push(node.getVariable(name));\n\t\t\t}\n\t\t\tnode = node.parentWidget;\n\t\t}\n\t\tvar value = $tw.utils.hashString(output.join(\"\"));\n\t\tthis.qualifiers[name] = value;\n\t\treturn value;\n\t}\n};\n\n/*\nCompute the current values of the attributes of the widget. Returns a hashmap of the names of the attributes that have changed\n*/\nWidget.prototype.computeAttributes = function() {\n\tvar changedAttributes = {},\n\t\tself = this,\n\t\tvalue;\n\t$tw.utils.each(this.parseTreeNode.attributes,function(attribute,name) {\n\t\tif(attribute.type === \"indirect\") {\n\t\t\tvalue = self.wiki.getTextReference(attribute.textReference,\"\",self.getVariable(\"currentTiddler\"));\n\t\t} else if(attribute.type === \"macro\") {\n\t\t\tvalue = self.getVariable(attribute.value.name,{params: attribute.value.params});\n\t\t} else { // String attribute\n\t\t\tvalue = attribute.value;\n\t\t}\n\t\t// Check whether the attribute has changed\n\t\tif(self.attributes[name] !== value) {\n\t\t\tself.attributes[name] = value;\n\t\t\tchangedAttributes[name] = true;\n\t\t}\n\t});\n\treturn changedAttributes;\n};\n\n/*\nCheck for the presence of an attribute\n*/\nWidget.prototype.hasAttribute = function(name) {\n\treturn $tw.utils.hop(this.attributes,name);\n};\n\n/*\nGet the value of an attribute\n*/\nWidget.prototype.getAttribute = function(name,defaultText) {\n\tif($tw.utils.hop(this.attributes,name)) {\n\t\treturn this.attributes[name];\n\t} else {\n\t\treturn defaultText;\n\t}\n};\n\n/*\nAssign the computed attributes of the widget to a domNode\noptions include:\nexcludeEventAttributes: ignores attributes whose name begins with \"on\"\n*/\nWidget.prototype.assignAttributes = function(domNode,options) {\n\toptions = options || {};\n\tvar self = this;\n\t$tw.utils.each(this.attributes,function(v,a) {\n\t\t// Check exclusions\n\t\tif(options.excludeEventAttributes && a.substr(0,2) === \"on\") {\n\t\t\tv = undefined;\n\t\t}\n\t\tif(v !== undefined) {\n\t\t\tvar b = a.split(\":\");\n\t\t\t// Setting certain attributes can cause a DOM error (eg xmlns on the svg element)\n\t\t\ttry {\n\t\t\t\tif (b.length == 2 && b[0] == \"xlink\"){\n\t\t\t\t\tdomNode.setAttributeNS(\"http://www.w3.org/1999/xlink\",b[1],v);\n\t\t\t\t} else {\n\t\t\t\t\tdomNode.setAttributeNS(null,a,v);\n\t\t\t\t}\n\t\t\t} catch(e) {\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nMake child widgets correspondng to specified parseTreeNodes\n*/\nWidget.prototype.makeChildWidgets = function(parseTreeNodes) {\n\tthis.children = [];\n\tvar self = this;\n\t$tw.utils.each(parseTreeNodes || (this.parseTreeNode && this.parseTreeNode.children),function(childNode) {\n\t\tself.children.push(self.makeChildWidget(childNode));\n\t});\n};\n\n/*\nConstruct the widget object for a parse tree node\n*/\nWidget.prototype.makeChildWidget = function(parseTreeNode) {\n\tvar WidgetClass = this.widgetClasses[parseTreeNode.type];\n\tif(!WidgetClass) {\n\t\tWidgetClass = this.widgetClasses.text;\n\t\tparseTreeNode = {type: \"text\", text: \"Undefined widget '\" + parseTreeNode.type + \"'\"};\n\t}\n\treturn new WidgetClass(parseTreeNode,{\n\t\twiki: this.wiki,\n\t\tvariables: {},\n\t\tparentWidget: this,\n\t\tdocument: this.document\n\t});\n};\n\n/*\nGet the next sibling of this widget\n*/\nWidget.prototype.nextSibling = function() {\n\tif(this.parentWidget) {\n\t\tvar index = this.parentWidget.children.indexOf(this);\n\t\tif(index !== -1 && index < this.parentWidget.children.length-1) {\n\t\t\treturn this.parentWidget.children[index+1];\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nGet the previous sibling of this widget\n*/\nWidget.prototype.previousSibling = function() {\n\tif(this.parentWidget) {\n\t\tvar index = this.parentWidget.children.indexOf(this);\n\t\tif(index !== -1 && index > 0) {\n\t\t\treturn this.parentWidget.children[index-1];\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRender the children of this widget into the DOM\n*/\nWidget.prototype.renderChildren = function(parent,nextSibling) {\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\tchildWidget.render(parent,nextSibling);\n\t});\n};\n\n/*\nAdd a list of event listeners from an array [{type:,handler:},...]\n*/\nWidget.prototype.addEventListeners = function(listeners) {\n\tvar self = this;\n\t$tw.utils.each(listeners,function(listenerInfo) {\n\t\tself.addEventListener(listenerInfo.type,listenerInfo.handler);\n\t});\n};\n\n/*\nAdd an event listener\n*/\nWidget.prototype.addEventListener = function(type,handler) {\n\tvar self = this;\n\tif(typeof handler === \"string\") { // The handler is a method name on this widget\n\t\tthis.eventListeners[type] = function(event) {\n\t\t\treturn self[handler].call(self,event);\n\t\t};\n\t} else { // The handler is a function\n\t\tthis.eventListeners[type] = function(event) {\n\t\t\treturn handler.call(self,event);\n\t\t};\n\t}\n};\n\n/*\nDispatch an event to a widget. If the widget doesn't handle the event then it is also dispatched to the parent widget\n*/\nWidget.prototype.dispatchEvent = function(event) {\n\t// Dispatch the event if this widget handles it\n\tvar listener = this.eventListeners[event.type];\n\tif(listener) {\n\t\t// Don't propagate the event if the listener returned false\n\t\tif(!listener(event)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\t// Dispatch the event to the parent widget\n\tif(this.parentWidget) {\n\t\treturn this.parentWidget.dispatchEvent(event);\n\t}\n\treturn true;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nRebuild a previously rendered widget\n*/\nWidget.prototype.refreshSelf = function() {\n\tvar nextSibling = this.findNextSiblingDomNode();\n\tthis.removeChildDomNodes();\n\tthis.render(this.parentDomNode,nextSibling);\n};\n\n/*\nRefresh all the children of a widget\n*/\nWidget.prototype.refreshChildren = function(changedTiddlers) {\n\tvar self = this,\n\t\trefreshed = false;\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\trefreshed = childWidget.refresh(changedTiddlers) || refreshed;\n\t});\n\treturn refreshed;\n};\n\n/*\nFind the next sibling in the DOM to this widget. This is done by scanning the widget tree through all next siblings and their descendents that share the same parent DOM node\n*/\nWidget.prototype.findNextSiblingDomNode = function(startIndex) {\n\t// Refer to this widget by its index within its parents children\n\tvar parent = this.parentWidget,\n\t\tindex = startIndex !== undefined ? startIndex : parent.children.indexOf(this);\nif(index === -1) {\n\tthrow \"node not found in parents children\";\n}\n\t// Look for a DOM node in the later siblings\n\twhile(++index < parent.children.length) {\n\t\tvar domNode = parent.children[index].findFirstDomNode();\n\t\tif(domNode) {\n\t\t\treturn domNode;\n\t\t}\n\t}\n\t// Go back and look for later siblings of our parent if it has the same parent dom node\n\tvar grandParent = parent.parentWidget;\n\tif(grandParent && parent.parentDomNode === this.parentDomNode) {\n\t\tindex = grandParent.children.indexOf(parent);\n\t\tif(index !== -1) {\n\t\t\treturn parent.findNextSiblingDomNode(index);\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nFind the first DOM node generated by a widget or its children\n*/\nWidget.prototype.findFirstDomNode = function() {\n\t// Return the first dom node of this widget, if we've got one\n\tif(this.domNodes.length > 0) {\n\t\treturn this.domNodes[0];\n\t}\n\t// Otherwise, recursively call our children\n\tfor(var t=0; t<this.children.length; t++) {\n\t\tvar domNode = this.children[t].findFirstDomNode();\n\t\tif(domNode) {\n\t\t\treturn domNode;\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRemove any DOM nodes created by this widget or its children\n*/\nWidget.prototype.removeChildDomNodes = function() {\n\t// If this widget has directly created DOM nodes, delete them and exit. This assumes that any child widgets are contained within the created DOM nodes, which would normally be the case\n\tif(this.domNodes.length > 0) {\n\t\t$tw.utils.each(this.domNodes,function(domNode) {\n\t\t\tdomNode.parentNode.removeChild(domNode);\n\t\t});\n\t\tthis.domNodes = [];\n\t} else {\n\t\t// Otherwise, ask the child widgets to delete their DOM nodes\n\t\t$tw.utils.each(this.children,function(childWidget) {\n\t\t\tchildWidget.removeChildDomNodes();\n\t\t});\n\t}\n};\n\n/*\nInvoke the action widgets that are descendents of the current widget.\n*/\nWidget.prototype.invokeActions = function(triggeringWidget,event) {\n\tvar handled = false;\n\t// For each child widget\n\tfor(var t=0; t<this.children.length; t++) {\n\t\tvar child = this.children[t];\n\t\t// Invoke the child if it is an action widget\n\t\tif(child.invokeAction && child.invokeAction(triggeringWidget,event)) {\n\t\t\thandled = true;\n\t\t}\n\t\t// Propagate through through the child if it permits it\n\t\tif(child.allowActionPropagation() && child.invokeActions(triggeringWidget,event)) {\n\t\t\thandled = true;\n\t\t}\n\t}\n\treturn handled;\n};\n\n/*\nInvoke the action widgets defined in a string\n*/\nWidget.prototype.invokeActionString = function(actions,triggeringWidget,event) {\n\tactions = actions || \"\";\n\tvar parser = this.wiki.parseText(\"text/vnd.tiddlywiki\",actions,{\n\t\t\tparentWidget: this,\n\t\t\tdocument: this.document\n\t\t}),\n\t\twidgetNode = this.wiki.makeWidget(parser,{\n\t\t\tparentWidget: this,\n\t\t\tdocument: this.document\n\t\t});\n\tvar container = this.document.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn widgetNode.invokeActions(this,event);\n};\n\nWidget.prototype.allowActionPropagation = function() {\n\treturn true;\n};\n\nexports.widget = Widget;\n\n})();\n",
"title": "$:/core/modules/widgets/widget.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/wikify.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/wikify.js\ntype: application/javascript\nmodule-type: widget\n\nWidget to wikify text into a variable\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar WikifyWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nWikifyWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nWikifyWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nWikifyWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.wikifyName = this.getAttribute(\"name\");\n\tthis.wikifyText = this.getAttribute(\"text\");\n\tthis.wikifyType = this.getAttribute(\"type\");\n\tthis.wikifyMode = this.getAttribute(\"mode\",\"block\");\n\tthis.wikifyOutput = this.getAttribute(\"output\",\"text\");\n\t// Create the parse tree\n\tthis.wikifyParser = this.wiki.parseText(this.wikifyType,this.wikifyText,{\n\t\t\tparseAsInline: this.wikifyMode === \"inline\"\n\t\t});\n\t// Create the widget tree \n\tthis.wikifyWidgetNode = this.wiki.makeWidget(this.wikifyParser,{\n\t\t\tdocument: $tw.fakeDocument,\n\t\t\tparentWidget: this\n\t\t});\n\t// Render the widget tree to the container\n\tthis.wikifyContainer = $tw.fakeDocument.createElement(\"div\");\n\tthis.wikifyWidgetNode.render(this.wikifyContainer,null);\n\tthis.wikifyResult = this.getResult();\n\t// Set context variable\n\tthis.setVariable(this.wikifyName,this.wikifyResult);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nReturn the result string\n*/\nWikifyWidget.prototype.getResult = function() {\n\tvar result;\n\tswitch(this.wikifyOutput) {\n\t\tcase \"text\":\n\t\t\tresult = this.wikifyContainer.textContent;\n\t\t\tbreak;\n\t\tcase \"html\":\n\t\t\tresult = this.wikifyContainer.innerHTML;\n\t\t\tbreak;\n\t\tcase \"parsetree\":\n\t\t\tresult = JSON.stringify(this.wikifyParser.tree,0,$tw.config.preferences.jsonSpaces);\n\t\t\tbreak;\n\t\tcase \"widgettree\":\n\t\t\tresult = JSON.stringify(this.getWidgetTree(),0,$tw.config.preferences.jsonSpaces);\n\t\t\tbreak;\n\t}\n\treturn result;\n};\n\n/*\nReturn a string of the widget tree\n*/\nWikifyWidget.prototype.getWidgetTree = function() {\n\tvar copyNode = function(widgetNode,resultNode) {\n\t\t\tvar type = widgetNode.parseTreeNode.type;\n\t\t\tresultNode.type = type;\n\t\t\tswitch(type) {\n\t\t\t\tcase \"element\":\n\t\t\t\t\tresultNode.tag = widgetNode.parseTreeNode.tag;\n\t\t\t\t\tbreak;\n\t\t\t\tcase \"text\":\n\t\t\t\t\tresultNode.text = widgetNode.parseTreeNode.text;\n\t\t\t\t\tbreak;\t\n\t\t\t}\n\t\t\tif(Object.keys(widgetNode.attributes || {}).length > 0) {\n\t\t\t\tresultNode.attributes = {};\n\t\t\t\t$tw.utils.each(widgetNode.attributes,function(attr,attrName) {\n\t\t\t\t\tresultNode.attributes[attrName] = widgetNode.getAttribute(attrName);\n\t\t\t\t});\n\t\t\t}\n\t\t\tif(Object.keys(widgetNode.children || {}).length > 0) {\n\t\t\t\tresultNode.children = [];\n\t\t\t\t$tw.utils.each(widgetNode.children,function(widgetChildNode) {\n\t\t\t\t\tvar node = {};\n\t\t\t\t\tresultNode.children.push(node);\n\t\t\t\t\tcopyNode(widgetChildNode,node);\n\t\t\t\t});\n\t\t\t}\n\t\t},\n\t\tresults = {};\n\tcopyNode(this.wikifyWidgetNode,results);\n\treturn results;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nWikifyWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// Refresh ourselves entirely if any of our attributes have changed\n\tif(changedAttributes.name || changedAttributes.text || changedAttributes.type || changedAttributes.mode || changedAttributes.output) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\t// Refresh the widget tree\n\t\tif(this.wikifyWidgetNode.refresh(changedTiddlers)) {\n\t\t\t// Check if there was any change\n\t\t\tvar result = this.getResult();\n\t\t\tif(result !== this.wikifyResult) {\n\t\t\t\t// If so, save the change\n\t\t\t\tthis.wikifyResult = result;\n\t\t\t\tthis.setVariable(this.wikifyName,this.wikifyResult);\n\t\t\t\t// Refresh each of our child widgets\n\t\t\t\t$tw.utils.each(this.children,function(childWidget) {\n\t\t\t\t\tchildWidget.refreshSelf();\n\t\t\t\t});\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\t// Just refresh the children\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.wikify = WikifyWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/wikify.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/wiki-bulkops.js": {
"text": "/*\\\ntitle: $:/core/modules/wiki-bulkops.js\ntype: application/javascript\nmodule-type: wikimethod\n\nBulk tiddler operations such as rename.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nRename a tiddler, and relink any tags or lists that reference it.\n*/\nexports.renameTiddler = function(fromTitle,toTitle) {\n\tvar self = this;\n\tfromTitle = (fromTitle || \"\").trim();\n\ttoTitle = (toTitle || \"\").trim();\n\tif(fromTitle && toTitle && fromTitle !== toTitle) {\n\t\t// Rename the tiddler itself\n\t\tvar tiddler = this.getTiddler(fromTitle);\n\t\tthis.addTiddler(new $tw.Tiddler(tiddler,{title: toTitle},this.getModificationFields()));\n\t\tthis.deleteTiddler(fromTitle);\n\t\t// Rename any tags or lists that reference it\n\t\tthis.each(function(tiddler,title) {\n\t\t\tvar tags = (tiddler.fields.tags || []).slice(0),\n\t\t\t\tlist = (tiddler.fields.list || []).slice(0),\n\t\t\t\tisModified = false;\n\t\t\t// Rename tags\n\t\t\t$tw.utils.each(tags,function (title,index) {\n\t\t\t\tif(title === fromTitle) {\n\t\t\t\t\ttags[index] = toTitle;\n\t\t\t\t\tisModified = true;\n\t\t\t\t}\n\t\t\t});\n\t\t\t// Rename lists\n\t\t\t$tw.utils.each(list,function (title,index) {\n\t\t\t\tif(title === fromTitle) {\n\t\t\t\t\tlist[index] = toTitle;\n\t\t\t\t\tisModified = true;\n\t\t\t\t}\n\t\t\t});\n\t\t\tif(isModified) {\n\t\t\t\tself.addTiddler(new $tw.Tiddler(tiddler,{tags: tags, list: list},self.getModificationFields()));\n\t\t\t}\n\t\t});\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/wiki-bulkops.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/core/modules/wiki.js": {
"text": "/*\\\ntitle: $:/core/modules/wiki.js\ntype: application/javascript\nmodule-type: wikimethod\n\nExtension methods for the $tw.Wiki object\n\nAdds the following properties to the wiki object:\n\n* `eventListeners` is a hashmap by type of arrays of listener functions\n* `changedTiddlers` is a hashmap describing changes to named tiddlers since wiki change events were last dispatched. Each entry is a hashmap containing two fields:\n\tmodified: true/false\n\tdeleted: true/false\n* `changeCount` is a hashmap by tiddler title containing a numerical index that starts at zero and is incremented each time a tiddler is created changed or deleted\n* `caches` is a hashmap by tiddler title containing a further hashmap of named cache objects. Caches are automatically cleared when a tiddler is modified or deleted\n* `globalCache` is a hashmap by cache name of cache objects that are cleared whenever any tiddler change occurs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar USER_NAME_TITLE = \"$:/status/UserName\";\n\n/*\nGet the value of a text reference. Text references can have any of these forms:\n\t<tiddlertitle>\n\t<tiddlertitle>!!<fieldname>\n\t!!<fieldname> - specifies a field of the current tiddlers\n\t<tiddlertitle>##<index>\n*/\nexports.getTextReference = function(textRef,defaultText,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle = tr.title || currTiddlerTitle;\n\tif(tr.field) {\n\t\tvar tiddler = this.getTiddler(title);\n\t\tif(tr.field === \"title\") { // Special case so we can return the title of a non-existent tiddler\n\t\t\treturn title;\n\t\t} else if(tiddler && $tw.utils.hop(tiddler.fields,tr.field)) {\n\t\t\treturn tiddler.getFieldString(tr.field);\n\t\t} else {\n\t\t\treturn defaultText;\n\t\t}\n\t} else if(tr.index) {\n\t\treturn this.extractTiddlerDataItem(title,tr.index,defaultText);\n\t} else {\n\t\treturn this.getTiddlerText(title,defaultText);\n\t}\n};\n\nexports.setTextReference = function(textRef,value,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle = tr.title || currTiddlerTitle;\n\tthis.setText(title,tr.field,tr.index,value);\n};\n\nexports.setText = function(title,field,index,value,options) {\n\toptions = options || {};\n\tvar creationFields = options.suppressTimestamp ? {} : this.getCreationFields(),\n\t\tmodificationFields = options.suppressTimestamp ? {} : this.getModificationFields();\n\t// Check if it is a reference to a tiddler field\n\tif(index) {\n\t\tvar data = this.getTiddlerData(title,Object.create(null));\n\t\tif(value !== undefined) {\n\t\t\tdata[index] = value;\n\t\t} else {\n\t\t\tdelete data[index];\n\t\t}\n\t\tthis.setTiddlerData(title,data,modificationFields);\n\t} else {\n\t\tvar tiddler = this.getTiddler(title),\n\t\t\tfields = {title: title};\n\t\tfields[field || \"text\"] = value;\n\t\tthis.addTiddler(new $tw.Tiddler(creationFields,tiddler,fields,modificationFields));\n\t}\n};\n\nexports.deleteTextReference = function(textRef,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle,tiddler,fields;\n\t// Check if it is a reference to a tiddler\n\tif(tr.title && !tr.field) {\n\t\tthis.deleteTiddler(tr.title);\n\t// Else check for a field reference\n\t} else if(tr.field) {\n\t\ttitle = tr.title || currTiddlerTitle;\n\t\ttiddler = this.getTiddler(title);\n\t\tif(tiddler && $tw.utils.hop(tiddler.fields,tr.field)) {\n\t\t\tfields = Object.create(null);\n\t\t\tfields[tr.field] = undefined;\n\t\t\tthis.addTiddler(new $tw.Tiddler(tiddler,fields,this.getModificationFields()));\n\t\t}\n\t}\n};\n\nexports.addEventListener = function(type,listener) {\n\tthis.eventListeners = this.eventListeners || {};\n\tthis.eventListeners[type] = this.eventListeners[type] || [];\n\tthis.eventListeners[type].push(listener);\t\n};\n\nexports.removeEventListener = function(type,listener) {\n\tvar listeners = this.eventListeners[type];\n\tif(listeners) {\n\t\tvar p = listeners.indexOf(listener);\n\t\tif(p !== -1) {\n\t\t\tlisteners.splice(p,1);\n\t\t}\n\t}\n};\n\nexports.dispatchEvent = function(type /*, args */) {\n\tvar args = Array.prototype.slice.call(arguments,1),\n\t\tlisteners = this.eventListeners[type];\n\tif(listeners) {\n\t\tfor(var p=0; p<listeners.length; p++) {\n\t\t\tvar listener = listeners[p];\n\t\t\tlistener.apply(listener,args);\n\t\t}\n\t}\n};\n\n/*\nCauses a tiddler to be marked as changed, incrementing the change count, and triggers event handlers.\nThis method should be called after the changes it describes have been made to the wiki.tiddlers[] array.\n\ttitle: Title of tiddler\n\tisDeleted: defaults to false (meaning the tiddler has been created or modified),\n\t\ttrue if the tiddler has been deleted\n*/\nexports.enqueueTiddlerEvent = function(title,isDeleted) {\n\t// Record the touch in the list of changed tiddlers\n\tthis.changedTiddlers = this.changedTiddlers || Object.create(null);\n\tthis.changedTiddlers[title] = this.changedTiddlers[title] || Object.create(null);\n\tthis.changedTiddlers[title][isDeleted ? \"deleted\" : \"modified\"] = true;\n\t// Increment the change count\n\tthis.changeCount = this.changeCount || Object.create(null);\n\tif($tw.utils.hop(this.changeCount,title)) {\n\t\tthis.changeCount[title]++;\n\t} else {\n\t\tthis.changeCount[title] = 1;\n\t}\n\t// Trigger events\n\tthis.eventListeners = this.eventListeners || {};\n\tif(!this.eventsTriggered) {\n\t\tvar self = this;\n\t\t$tw.utils.nextTick(function() {\n\t\t\tvar changes = self.changedTiddlers;\n\t\t\tself.changedTiddlers = Object.create(null);\n\t\t\tself.eventsTriggered = false;\n\t\t\tif($tw.utils.count(changes) > 0) {\n\t\t\t\tself.dispatchEvent(\"change\",changes);\n\t\t\t}\n\t\t});\n\t\tthis.eventsTriggered = true;\n\t}\n};\n\nexports.getSizeOfTiddlerEventQueue = function() {\n\treturn $tw.utils.count(this.changedTiddlers);\n};\n\nexports.clearTiddlerEventQueue = function() {\n\tthis.changedTiddlers = Object.create(null);\n\tthis.changeCount = Object.create(null);\n};\n\nexports.getChangeCount = function(title) {\n\tthis.changeCount = this.changeCount || Object.create(null);\n\tif($tw.utils.hop(this.changeCount,title)) {\n\t\treturn this.changeCount[title];\n\t} else {\n\t\treturn 0;\n\t}\n};\n\n/*\nGenerate an unused title from the specified base\n*/\nexports.generateNewTitle = function(baseTitle,options) {\n\toptions = options || {};\n\tvar c = 0,\n\t\ttitle = baseTitle;\n\twhile(this.tiddlerExists(title) || this.isShadowTiddler(title) || this.findDraft(title)) {\n\t\ttitle = baseTitle + \n\t\t\t(options.prefix || \" \") + \n\t\t\t(++c);\n\t}\n\treturn title;\n};\n\nexports.isSystemTiddler = function(title) {\n\treturn title && title.indexOf(\"$:/\") === 0;\n};\n\nexports.isTemporaryTiddler = function(title) {\n\treturn title && title.indexOf(\"$:/temp/\") === 0;\n};\n\nexports.isImageTiddler = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\t\t\n\t\tvar contentTypeInfo = $tw.config.contentTypeInfo[tiddler.fields.type || \"text/vnd.tiddlywiki\"];\n\t\treturn !!contentTypeInfo && contentTypeInfo.flags.indexOf(\"image\") !== -1;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLike addTiddler() except it will silently reject any plugin tiddlers that are older than the currently loaded version. Returns true if the tiddler was imported\n*/\nexports.importTiddler = function(tiddler) {\n\tvar existingTiddler = this.getTiddler(tiddler.fields.title);\n\t// Check if we're dealing with a plugin\n\tif(tiddler && tiddler.hasField(\"plugin-type\") && tiddler.hasField(\"version\") && existingTiddler && existingTiddler.hasField(\"plugin-type\") && existingTiddler.hasField(\"version\")) {\n\t\t// Reject the incoming plugin if it is older\n\t\tif(!$tw.utils.checkVersions(tiddler.fields.version,existingTiddler.fields.version)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\t// Fall through to adding the tiddler\n\tthis.addTiddler(tiddler);\n\treturn true;\n};\n\n/*\nReturn a hashmap of the fields that should be set when a tiddler is created\n*/\nexports.getCreationFields = function() {\n\tvar fields = {\n\t\t\tcreated: new Date()\n\t\t},\n\t\tcreator = this.getTiddlerText(USER_NAME_TITLE);\n\tif(creator) {\n\t\tfields.creator = creator;\n\t}\n\treturn fields;\n};\n\n/*\nReturn a hashmap of the fields that should be set when a tiddler is modified\n*/\nexports.getModificationFields = function() {\n\tvar fields = Object.create(null),\n\t\tmodifier = this.getTiddlerText(USER_NAME_TITLE);\n\tfields.modified = new Date();\n\tif(modifier) {\n\t\tfields.modifier = modifier;\n\t}\n\treturn fields;\n};\n\n/*\nReturn a sorted array of tiddler titles. Options include:\nsortField: field to sort by\nexcludeTag: tag to exclude\nincludeSystem: whether to include system tiddlers (defaults to false)\n*/\nexports.getTiddlers = function(options) {\n\toptions = options || Object.create(null);\n\tvar self = this,\n\t\tsortField = options.sortField || \"title\",\n\t\ttiddlers = [], t, titles = [];\n\tthis.each(function(tiddler,title) {\n\t\tif(options.includeSystem || !self.isSystemTiddler(title)) {\n\t\t\tif(!options.excludeTag || !tiddler.hasTag(options.excludeTag)) {\n\t\t\t\ttiddlers.push(tiddler);\n\t\t\t}\n\t\t}\n\t});\n\ttiddlers.sort(function(a,b) {\n\t\tvar aa = a.fields[sortField].toLowerCase() || \"\",\n\t\t\tbb = b.fields[sortField].toLowerCase() || \"\";\n\t\tif(aa < bb) {\n\t\t\treturn -1;\n\t\t} else {\n\t\t\tif(aa > bb) {\n\t\t\t\treturn 1;\n\t\t\t} else {\n\t\t\t\treturn 0;\n\t\t\t}\n\t\t}\n\t});\n\tfor(t=0; t<tiddlers.length; t++) {\n\t\ttitles.push(tiddlers[t].fields.title);\n\t}\n\treturn titles;\n};\n\nexports.countTiddlers = function(excludeTag) {\n\tvar tiddlers = this.getTiddlers({excludeTag: excludeTag});\n\treturn $tw.utils.count(tiddlers);\n};\n\n/*\nReturns a function iterator(callback) that iterates through the specified titles, and invokes the callback with callback(tiddler,title)\n*/\nexports.makeTiddlerIterator = function(titles) {\n\tvar self = this;\n\tif(!$tw.utils.isArray(titles)) {\n\t\ttitles = Object.keys(titles);\n\t} else {\n\t\ttitles = titles.slice(0);\n\t}\n\treturn function(callback) {\n\t\ttitles.forEach(function(title) {\n\t\t\tcallback(self.getTiddler(title),title);\n\t\t});\n\t};\n};\n\n/*\nSort an array of tiddler titles by a specified field\n\ttitles: array of titles (sorted in place)\n\tsortField: name of field to sort by\n\tisDescending: true if the sort should be descending\n\tisCaseSensitive: true if the sort should consider upper and lower case letters to be different\n*/\nexports.sortTiddlers = function(titles,sortField,isDescending,isCaseSensitive,isNumeric) {\n\tvar self = this;\n\ttitles.sort(function(a,b) {\n\t\tvar x,y,\n\t\t\tcompareNumbers = function(x,y) {\n\t\t\t\tvar result = \n\t\t\t\t\tisNaN(x) && !isNaN(y) ? (isDescending ? -1 : 1) :\n\t\t\t\t\t!isNaN(x) && isNaN(y) ? (isDescending ? 1 : -1) :\n\t\t\t\t\t (isDescending ? y - x : x - y);\n\t\t\t\treturn result;\n\t\t\t};\n\t\tif(sortField !== \"title\") {\n\t\t\tvar tiddlerA = self.getTiddler(a),\n\t\t\t\ttiddlerB = self.getTiddler(b);\n\t\t\tif(tiddlerA) {\n\t\t\t\ta = tiddlerA.fields[sortField] || \"\";\n\t\t\t} else {\n\t\t\t\ta = \"\";\n\t\t\t}\n\t\t\tif(tiddlerB) {\n\t\t\t\tb = tiddlerB.fields[sortField] || \"\";\n\t\t\t} else {\n\t\t\t\tb = \"\";\n\t\t\t}\n\t\t}\n\t\tx = Number(a);\n\t\ty = Number(b);\n\t\tif(isNumeric && (!isNaN(x) || !isNaN(y))) {\n\t\t\treturn compareNumbers(x,y);\n\t\t} else if($tw.utils.isDate(a) && $tw.utils.isDate(b)) {\n\t\t\treturn isDescending ? b - a : a - b;\n\t\t} else {\n\t\t\ta = String(a);\n\t\t\tb = String(b);\n\t\t\tif(!isCaseSensitive) {\n\t\t\t\ta = a.toLowerCase();\n\t\t\t\tb = b.toLowerCase();\n\t\t\t}\n\t\t\treturn isDescending ? b.localeCompare(a) : a.localeCompare(b);\n\t\t}\n\t});\n};\n\n/*\nFor every tiddler invoke a callback(title,tiddler) with `this` set to the wiki object. Options include:\nsortField: field to sort by\nexcludeTag: tag to exclude\nincludeSystem: whether to include system tiddlers (defaults to false)\n*/\nexports.forEachTiddler = function(/* [options,]callback */) {\n\tvar arg = 0,\n\t\toptions = arguments.length >= 2 ? arguments[arg++] : {},\n\t\tcallback = arguments[arg++],\n\t\ttitles = this.getTiddlers(options),\n\t\tt, tiddler;\n\tfor(t=0; t<titles.length; t++) {\n\t\ttiddler = this.getTiddler(titles[t]);\n\t\tif(tiddler) {\n\t\t\tcallback.call(this,tiddler.fields.title,tiddler);\n\t\t}\n\t}\n};\n\n/*\nReturn an array of tiddler titles that are directly linked from the specified tiddler\n*/\nexports.getTiddlerLinks = function(title) {\n\tvar self = this;\n\t// We'll cache the links so they only get computed if the tiddler changes\n\treturn this.getCacheForTiddler(title,\"links\",function() {\n\t\t// Parse the tiddler\n\t\tvar parser = self.parseTiddler(title);\n\t\t// Count up the links\n\t\tvar links = [],\n\t\t\tcheckParseTree = function(parseTree) {\n\t\t\t\tfor(var t=0; t<parseTree.length; t++) {\n\t\t\t\t\tvar parseTreeNode = parseTree[t];\n\t\t\t\t\tif(parseTreeNode.type === \"link\" && parseTreeNode.attributes.to && parseTreeNode.attributes.to.type === \"string\") {\n\t\t\t\t\t\tvar value = parseTreeNode.attributes.to.value;\n\t\t\t\t\t\tif(links.indexOf(value) === -1) {\n\t\t\t\t\t\t\tlinks.push(value);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif(parseTreeNode.children) {\n\t\t\t\t\t\tcheckParseTree(parseTreeNode.children);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\t\tif(parser) {\n\t\t\tcheckParseTree(parser.tree);\n\t\t}\n\t\treturn links;\n\t});\n};\n\n/*\nReturn an array of tiddler titles that link to the specified tiddler\n*/\nexports.getTiddlerBacklinks = function(targetTitle) {\n\tvar self = this,\n\t\tbacklinks = [];\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\tif(links.indexOf(targetTitle) !== -1) {\n\t\t\tbacklinks.push(title);\n\t\t}\n\t});\n\treturn backlinks;\n};\n\n/*\nReturn a hashmap of tiddler titles that are referenced but not defined. Each value is the number of times the missing tiddler is referenced\n*/\nexports.getMissingTitles = function() {\n\tvar self = this,\n\t\tmissing = [];\n// We should cache the missing tiddler list, even if we recreate it every time any tiddler is modified\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\t$tw.utils.each(links,function(link) {\n\t\t\tif((!self.tiddlerExists(link) && !self.isShadowTiddler(link)) && missing.indexOf(link) === -1) {\n\t\t\t\tmissing.push(link);\n\t\t\t}\n\t\t});\n\t});\n\treturn missing;\n};\n\nexports.getOrphanTitles = function() {\n\tvar self = this,\n\t\torphans = this.getTiddlers();\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\t$tw.utils.each(links,function(link) {\n\t\t\tvar p = orphans.indexOf(link);\n\t\t\tif(p !== -1) {\n\t\t\t\torphans.splice(p,1);\n\t\t\t}\n\t\t});\n\t});\n\treturn orphans; // Todo\n};\n\n/*\nRetrieves a list of the tiddler titles that are tagged with a given tag\n*/\nexports.getTiddlersWithTag = function(tag) {\n\tvar self = this;\n\treturn this.getGlobalCache(\"taglist-\" + tag,function() {\n\t\tvar tagmap = self.getTagMap();\n\t\treturn self.sortByList(tagmap[tag],tag);\n\t});\n};\n\n/*\nGet a hashmap by tag of arrays of tiddler titles\n*/\nexports.getTagMap = function() {\n\tvar self = this;\n\treturn this.getGlobalCache(\"tagmap\",function() {\n\t\tvar tags = Object.create(null),\n\t\t\tstoreTags = function(tagArray,title) {\n\t\t\t\tif(tagArray) {\n\t\t\t\t\tfor(var index=0; index<tagArray.length; index++) {\n\t\t\t\t\t\tvar tag = tagArray[index];\n\t\t\t\t\t\tif($tw.utils.hop(tags,tag)) {\n\t\t\t\t\t\t\ttags[tag].push(title);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\ttags[tag] = [title];\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\ttitle, tiddler;\n\t\t// Collect up all the tags\n\t\tself.eachShadow(function(tiddler,title) {\n\t\t\tif(!self.tiddlerExists(title)) {\n\t\t\t\ttiddler = self.getTiddler(title);\n\t\t\t\tstoreTags(tiddler.fields.tags,title);\n\t\t\t}\n\t\t});\n\t\tself.each(function(tiddler,title) {\n\t\t\tstoreTags(tiddler.fields.tags,title);\n\t\t});\n\t\treturn tags;\n\t});\n};\n\n/*\nLookup a given tiddler and return a list of all the tiddlers that include it in the specified list field\n*/\nexports.findListingsOfTiddler = function(targetTitle,fieldName) {\n\tfieldName = fieldName || \"list\";\n\tvar titles = [];\n\tthis.each(function(tiddler,title) {\n\t\tvar list = $tw.utils.parseStringArray(tiddler.fields[fieldName]);\n\t\tif(list && list.indexOf(targetTitle) !== -1) {\n\t\t\ttitles.push(title);\n\t\t}\n\t});\n\treturn titles;\n};\n\n/*\nSorts an array of tiddler titles according to an ordered list\n*/\nexports.sortByList = function(array,listTitle) {\n\tvar list = this.getTiddlerList(listTitle);\n\tif(!array || array.length === 0) {\n\t\treturn [];\n\t} else {\n\t\tvar titles = [], t, title;\n\t\t// First place any entries that are present in the list\n\t\tfor(t=0; t<list.length; t++) {\n\t\t\ttitle = list[t];\n\t\t\tif(array.indexOf(title) !== -1) {\n\t\t\t\ttitles.push(title);\n\t\t\t}\n\t\t}\n\t\t// Then place any remaining entries\n\t\tfor(t=0; t<array.length; t++) {\n\t\t\ttitle = array[t];\n\t\t\tif(list.indexOf(title) === -1) {\n\t\t\t\ttitles.push(title);\n\t\t\t}\n\t\t}\n\t\t// Finally obey the list-before and list-after fields of each tiddler in turn\n\t\tvar sortedTitles = titles.slice(0);\n\t\tfor(t=0; t<sortedTitles.length; t++) {\n\t\t\ttitle = sortedTitles[t];\n\t\t\tvar currPos = titles.indexOf(title),\n\t\t\t\tnewPos = -1,\n\t\t\t\ttiddler = this.getTiddler(title);\n\t\t\tif(tiddler) {\n\t\t\t\tvar beforeTitle = tiddler.fields[\"list-before\"],\n\t\t\t\t\tafterTitle = tiddler.fields[\"list-after\"];\n\t\t\t\tif(beforeTitle === \"\") {\n\t\t\t\t\tnewPos = 0;\n\t\t\t\t} else if(beforeTitle) {\n\t\t\t\t\tnewPos = titles.indexOf(beforeTitle);\n\t\t\t\t} else if(afterTitle) {\n\t\t\t\t\tnewPos = titles.indexOf(afterTitle);\n\t\t\t\t\tif(newPos >= 0) {\n\t\t\t\t\t\t++newPos;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif(newPos === -1) {\n\t\t\t\t\tnewPos = currPos;\n\t\t\t\t}\n\t\t\t\tif(newPos !== currPos) {\n\t\t\t\t\ttitles.splice(currPos,1);\n\t\t\t\t\tif(newPos >= currPos) {\n\t\t\t\t\t\tnewPos--;\n\t\t\t\t\t}\n\t\t\t\t\ttitles.splice(newPos,0,title);\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t\treturn titles;\n\t}\n};\n\nexports.getSubTiddler = function(title,subTiddlerTitle) {\n\tvar bundleInfo = this.getPluginInfo(title) || this.getTiddlerDataCached(title);\n\tif(bundleInfo && bundleInfo.tiddlers) {\n\t\tvar subTiddler = bundleInfo.tiddlers[subTiddlerTitle];\n\t\tif(subTiddler) {\n\t\t\treturn new $tw.Tiddler(subTiddler);\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRetrieve a tiddler as a JSON string of the fields\n*/\nexports.getTiddlerAsJson = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\n\t\tvar fields = Object.create(null);\n\t\t$tw.utils.each(tiddler.fields,function(value,name) {\n\t\t\tfields[name] = tiddler.getFieldString(name);\n\t\t});\n\t\treturn JSON.stringify(fields);\n\t} else {\n\t\treturn JSON.stringify({title: title});\n\t}\n};\n\n/*\nGet the content of a tiddler as a JavaScript object. How this is done depends on the type of the tiddler:\n\napplication/json: the tiddler JSON is parsed into an object\napplication/x-tiddler-dictionary: the tiddler is parsed as sequence of name:value pairs\n\nOther types currently just return null.\n\ntitleOrTiddler: string tiddler title or a tiddler object\ndefaultData: default data to be returned if the tiddler is missing or doesn't contain data\n\nNote that the same value is returned for repeated calls for the same tiddler data. The value is frozen to prevent modification; otherwise modifications would be visible to all callers\n*/\nexports.getTiddlerDataCached = function(titleOrTiddler,defaultData) {\n\tvar self = this,\n\t\ttiddler = titleOrTiddler;\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\ttiddler = this.getTiddler(tiddler);\t\n\t}\n\tif(tiddler) {\n\t\treturn this.getCacheForTiddler(tiddler.fields.title,\"data\",function() {\n\t\t\t// Return the frozen value\n\t\t\tvar value = self.getTiddlerData(tiddler.fields.title,defaultData);\n\t\t\t$tw.utils.deepFreeze(value);\n\t\t\treturn value;\n\t\t});\n\t} else {\n\t\treturn defaultData;\n\t}\n};\n\n/*\nAlternative, uncached version of getTiddlerDataCached(). The return value can be mutated freely and reused\n*/\nexports.getTiddlerData = function(titleOrTiddler,defaultData) {\n\tvar tiddler = titleOrTiddler,\n\t\tdata;\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\ttiddler = this.getTiddler(tiddler);\t\n\t}\n\tif(tiddler && tiddler.fields.text) {\n\t\tswitch(tiddler.fields.type) {\n\t\t\tcase \"application/json\":\n\t\t\t\t// JSON tiddler\n\t\t\t\ttry {\n\t\t\t\t\tdata = JSON.parse(tiddler.fields.text);\n\t\t\t\t} catch(ex) {\n\t\t\t\t\treturn defaultData;\n\t\t\t\t}\n\t\t\t\treturn data;\n\t\t\tcase \"application/x-tiddler-dictionary\":\n\t\t\t\treturn $tw.utils.parseFields(tiddler.fields.text);\n\t\t}\n\t}\n\treturn defaultData;\n};\n\n/*\nExtract an indexed field from within a data tiddler\n*/\nexports.extractTiddlerDataItem = function(titleOrTiddler,index,defaultText) {\n\tvar data = this.getTiddlerData(titleOrTiddler,Object.create(null)),\n\t\ttext;\n\tif(data && $tw.utils.hop(data,index)) {\n\t\ttext = data[index];\n\t}\n\tif(typeof text === \"string\" || typeof text === \"number\") {\n\t\treturn text.toString();\n\t} else {\n\t\treturn defaultText;\n\t}\n};\n\n/*\nSet a tiddlers content to a JavaScript object. Currently this is done by setting the tiddler's type to \"application/json\" and setting the text to the JSON text of the data.\ntitle: title of tiddler\ndata: object that can be serialised to JSON\nfields: optional hashmap of additional tiddler fields to be set\n*/\nexports.setTiddlerData = function(title,data,fields) {\n\tvar existingTiddler = this.getTiddler(title),\n\t\tnewFields = {\n\t\t\ttitle: title\n\t};\n\tif(existingTiddler && existingTiddler.fields.type === \"application/x-tiddler-dictionary\") {\n\t\tnewFields.text = $tw.utils.makeTiddlerDictionary(data);\n\t} else {\n\t\tnewFields.type = \"application/json\";\n\t\tnewFields.text = JSON.stringify(data,null,$tw.config.preferences.jsonSpaces);\n\t}\n\tthis.addTiddler(new $tw.Tiddler(this.getCreationFields(),existingTiddler,fields,newFields,this.getModificationFields()));\n};\n\n/*\nReturn the content of a tiddler as an array containing each line\n*/\nexports.getTiddlerList = function(title,field,index) {\n\tif(index) {\n\t\treturn $tw.utils.parseStringArray(this.extractTiddlerDataItem(title,index,\"\"));\n\t}\n\tfield = field || \"list\";\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\n\t\treturn ($tw.utils.parseStringArray(tiddler.fields[field]) || []).slice(0);\n\t}\n\treturn [];\n};\n\n// Return a named global cache object. Global cache objects are cleared whenever a tiddler change occurs\nexports.getGlobalCache = function(cacheName,initializer) {\n\tthis.globalCache = this.globalCache || Object.create(null);\n\tif($tw.utils.hop(this.globalCache,cacheName)) {\n\t\treturn this.globalCache[cacheName];\n\t} else {\n\t\tthis.globalCache[cacheName] = initializer();\n\t\treturn this.globalCache[cacheName];\n\t}\n};\n\nexports.clearGlobalCache = function() {\n\tthis.globalCache = Object.create(null);\n};\n\n// Return the named cache object for a tiddler. If the cache doesn't exist then the initializer function is invoked to create it\nexports.getCacheForTiddler = function(title,cacheName,initializer) {\n\tthis.caches = this.caches || Object.create(null);\n\tvar caches = this.caches[title];\n\tif(caches && caches[cacheName]) {\n\t\treturn caches[cacheName];\n\t} else {\n\t\tif(!caches) {\n\t\t\tcaches = Object.create(null);\n\t\t\tthis.caches[title] = caches;\n\t\t}\n\t\tcaches[cacheName] = initializer();\n\t\treturn caches[cacheName];\n\t}\n};\n\n// Clear all caches associated with a particular tiddler, or, if the title is null, clear all the caches for all the tiddlers\nexports.clearCache = function(title) {\n\tif(title) {\n\t\tthis.caches = this.caches || Object.create(null);\n\t\tif($tw.utils.hop(this.caches,title)) {\n\t\t\tdelete this.caches[title];\n\t\t}\n\t} else {\n\t\tthis.caches = Object.create(null);\n\t}\n};\n\nexports.initParsers = function(moduleType) {\n\t// Install the parser modules\n\t$tw.Wiki.parsers = {};\n\tvar self = this;\n\t$tw.modules.forEachModuleOfType(\"parser\",function(title,module) {\n\t\tfor(var f in module) {\n\t\t\tif($tw.utils.hop(module,f)) {\n\t\t\t\t$tw.Wiki.parsers[f] = module[f]; // Store the parser class\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nParse a block of text of a specified MIME type\n\ttype: content type of text to be parsed\n\ttext: text\n\toptions: see below\nOptions include:\n\tparseAsInline: if true, the text of the tiddler will be parsed as an inline run\n\t_canonical_uri: optional string of the canonical URI of this content\n*/\nexports.parseText = function(type,text,options) {\n\ttext = text || \"\";\n\toptions = options || {};\n\t// Select a parser\n\tvar Parser = $tw.Wiki.parsers[type];\n\tif(!Parser && $tw.utils.getFileExtensionInfo(type)) {\n\t\tParser = $tw.Wiki.parsers[$tw.utils.getFileExtensionInfo(type).type];\n\t}\n\tif(!Parser) {\n\t\tParser = $tw.Wiki.parsers[options.defaultType || \"text/vnd.tiddlywiki\"];\n\t}\n\tif(!Parser) {\n\t\treturn null;\n\t}\n\t// Return the parser instance\n\treturn new Parser(type,text,{\n\t\tparseAsInline: options.parseAsInline,\n\t\twiki: this,\n\t\t_canonical_uri: options._canonical_uri\n\t});\n};\n\n/*\nParse a tiddler according to its MIME type\n*/\nexports.parseTiddler = function(title,options) {\n\toptions = $tw.utils.extend({},options);\n\tvar cacheType = options.parseAsInline ? \"inlineParseTree\" : \"blockParseTree\",\n\t\ttiddler = this.getTiddler(title),\n\t\tself = this;\n\treturn tiddler ? this.getCacheForTiddler(title,cacheType,function() {\n\t\t\tif(tiddler.hasField(\"_canonical_uri\")) {\n\t\t\t\toptions._canonical_uri = tiddler.fields._canonical_uri;\n\t\t\t}\n\t\t\treturn self.parseText(tiddler.fields.type,tiddler.fields.text,options);\n\t\t}) : null;\n};\n\nexports.parseTextReference = function(title,field,index,options) {\n\tvar tiddler,text;\n\tif(options.subTiddler) {\n\t\ttiddler = this.getSubTiddler(title,options.subTiddler);\n\t} else {\n\t\ttiddler = this.getTiddler(title);\n\t\tif(field === \"text\" || (!field && !index)) {\n\t\t\tthis.getTiddlerText(title); // Force the tiddler to be lazily loaded\n\t\t\treturn this.parseTiddler(title,options);\n\t\t}\n\t}\n\tif(field === \"text\" || (!field && !index)) {\n\t\tif(tiddler && tiddler.fields) {\n\t\t\treturn this.parseText(tiddler.fields.type || \"text/vnd.tiddlywiki\",tiddler.fields.text,options);\t\t\t\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t} else if(field) {\n\t\tif(field === \"title\") {\n\t\t\ttext = title;\n\t\t} else {\n\t\t\tif(!tiddler || !tiddler.hasField(field)) {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t\ttext = tiddler.fields[field];\n\t\t}\n\t\treturn this.parseText(\"text/vnd.tiddlywiki\",text.toString(),options);\n\t} else if(index) {\n\t\tthis.getTiddlerText(title); // Force the tiddler to be lazily loaded\n\t\ttext = this.extractTiddlerDataItem(tiddler,index,undefined);\n\t\tif(text === undefined) {\n\t\t\treturn null;\n\t\t}\n\t\treturn this.parseText(\"text/vnd.tiddlywiki\",text,options);\n\t}\n};\n\n/*\nMake a widget tree for a parse tree\nparser: parser object\noptions: see below\nOptions include:\ndocument: optional document to use\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.makeWidget = function(parser,options) {\n\toptions = options || {};\n\tvar widgetNode = {\n\t\t\ttype: \"widget\",\n\t\t\tchildren: []\n\t\t},\n\t\tcurrWidgetNode = widgetNode;\n\t// Create set variable widgets for each variable\n\t$tw.utils.each(options.variables,function(value,name) {\n\t\tvar setVariableWidget = {\n\t\t\ttype: \"set\",\n\t\t\tattributes: {\n\t\t\t\tname: {type: \"string\", value: name},\n\t\t\t\tvalue: {type: \"string\", value: value}\n\t\t\t},\n\t\t\tchildren: []\n\t\t};\n\t\tcurrWidgetNode.children = [setVariableWidget];\n\t\tcurrWidgetNode = setVariableWidget;\n\t});\n\t// Add in the supplied parse tree nodes\n\tcurrWidgetNode.children = parser ? parser.tree : [];\n\t// Create the widget\n\treturn new widget.widget(widgetNode,{\n\t\twiki: this,\n\t\tdocument: options.document || $tw.fakeDocument,\n\t\tparentWidget: options.parentWidget\n\t});\n};\n\n/*\nMake a widget tree for transclusion\ntitle: target tiddler title\noptions: as for wiki.makeWidget() plus:\noptions.field: optional field to transclude (defaults to \"text\")\noptions.mode: transclusion mode \"inline\" or \"block\"\noptions.children: optional array of children for the transclude widget\n*/\nexports.makeTranscludeWidget = function(title,options) {\n\toptions = options || {};\n\tvar parseTree = {tree: [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"div\",\n\t\t\tchildren: [{\n\t\t\t\ttype: \"transclude\",\n\t\t\t\tattributes: {\n\t\t\t\t\ttiddler: {\n\t\t\t\t\t\tname: \"tiddler\",\n\t\t\t\t\t\ttype: \"string\",\n\t\t\t\t\t\tvalue: title}},\n\t\t\t\tisBlock: !options.parseAsInline}]}\n\t]};\n\tif(options.field) {\n\t\tparseTree.tree[0].children[0].attributes.field = {type: \"string\", value: options.field};\n\t}\n\tif(options.mode) {\n\t\tparseTree.tree[0].children[0].attributes.mode = {type: \"string\", value: options.mode};\n\t}\n\tif(options.children) {\n\t\tparseTree.tree[0].children[0].children = options.children;\n\t}\n\treturn $tw.wiki.makeWidget(parseTree,options);\n};\n\n/*\nParse text in a specified format and render it into another format\n\toutputType: content type for the output\n\ttextType: content type of the input text\n\ttext: input text\n\toptions: see below\nOptions include:\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.renderText = function(outputType,textType,text,options) {\n\toptions = options || {};\n\tvar parser = this.parseText(textType,text,options),\n\t\twidgetNode = this.makeWidget(parser,options);\n\tvar container = $tw.fakeDocument.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn outputType === \"text/html\" ? container.innerHTML : container.textContent;\n};\n\n/*\nParse text from a tiddler and render it into another format\n\toutputType: content type for the output\n\ttitle: title of the tiddler to be rendered\n\toptions: see below\nOptions include:\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.renderTiddler = function(outputType,title,options) {\n\toptions = options || {};\n\tvar parser = this.parseTiddler(title,options),\n\t\twidgetNode = this.makeWidget(parser,options);\n\tvar container = $tw.fakeDocument.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn outputType === \"text/html\" ? container.innerHTML : (outputType === \"text/plain-formatted\" ? container.formattedTextContent : container.textContent);\n};\n\n/*\nReturn an array of tiddler titles that match a search string\n\ttext: The text string to search for\n\toptions: see below\nOptions available:\n\tsource: an iterator function for the source tiddlers, called source(iterator), where iterator is called as iterator(tiddler,title)\n\texclude: An array of tiddler titles to exclude from the search\n\tinvert: If true returns tiddlers that do not contain the specified string\n\tcaseSensitive: If true forces a case sensitive search\n\tliteral: If true, searches for literal string, rather than separate search terms\n\tfield: If specified, restricts the search to the specified field\n*/\nexports.search = function(text,options) {\n\toptions = options || {};\n\tvar self = this,\n\t\tt,\n\t\tinvert = !!options.invert;\n\t// Convert the search string into a regexp for each term\n\tvar terms, searchTermsRegExps,\n\t\tflags = options.caseSensitive ? \"\" : \"i\";\n\tif(options.literal) {\n\t\tif(text.length === 0) {\n\t\t\tsearchTermsRegExps = null;\n\t\t} else {\n\t\t\tsearchTermsRegExps = [new RegExp(\"(\" + $tw.utils.escapeRegExp(text) + \")\",flags)];\n\t\t}\n\t} else {\n\t\tterms = text.split(/ +/);\n\t\tif(terms.length === 1 && terms[0] === \"\") {\n\t\t\tsearchTermsRegExps = null;\n\t\t} else {\n\t\t\tsearchTermsRegExps = [];\n\t\t\tfor(t=0; t<terms.length; t++) {\n\t\t\t\tsearchTermsRegExps.push(new RegExp(\"(\" + $tw.utils.escapeRegExp(terms[t]) + \")\",flags));\n\t\t\t}\n\t\t}\n\t}\n\t// Function to check a given tiddler for the search term\n\tvar searchTiddler = function(title) {\n\t\tif(!searchTermsRegExps) {\n\t\t\treturn true;\n\t\t}\n\t\tvar tiddler = self.getTiddler(title);\n\t\tif(!tiddler) {\n\t\t\ttiddler = new $tw.Tiddler({title: title, text: \"\", type: \"text/vnd.tiddlywiki\"});\n\t\t}\n\t\tvar contentTypeInfo = $tw.config.contentTypeInfo[tiddler.fields.type] || $tw.config.contentTypeInfo[\"text/vnd.tiddlywiki\"],\n\t\t\tmatch;\n\t\tfor(var t=0; t<searchTermsRegExps.length; t++) {\n\t\t\tmatch = false;\n\t\t\tif(options.field) {\n\t\t\t\tmatch = searchTermsRegExps[t].test(tiddler.getFieldString(options.field));\n\t\t\t} else {\n\t\t\t\t// Search title, tags and body\n\t\t\t\tif(contentTypeInfo.encoding === \"utf8\") {\n\t\t\t\t\tmatch = match || searchTermsRegExps[t].test(tiddler.fields.text);\n\t\t\t\t}\n\t\t\t\tvar tags = tiddler.fields.tags ? tiddler.fields.tags.join(\"\\0\") : \"\";\n\t\t\t\tmatch = match || searchTermsRegExps[t].test(tags) || searchTermsRegExps[t].test(tiddler.fields.title);\n\t\t\t}\n\t\t\tif(!match) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t\treturn true;\n\t};\n\t// Loop through all the tiddlers doing the search\n\tvar results = [],\n\t\tsource = options.source || this.each;\n\tsource(function(tiddler,title) {\n\t\tif(searchTiddler(title) !== options.invert) {\n\t\t\tresults.push(title);\n\t\t}\n\t});\n\t// Remove any of the results we have to exclude\n\tif(options.exclude) {\n\t\tfor(t=0; t<options.exclude.length; t++) {\n\t\t\tvar p = results.indexOf(options.exclude[t]);\n\t\t\tif(p !== -1) {\n\t\t\t\tresults.splice(p,1);\n\t\t\t}\n\t\t}\n\t}\n\treturn results;\n};\n\n/*\nTrigger a load for a tiddler if it is skinny. Returns the text, or undefined if the tiddler is missing, null if the tiddler is being lazily loaded.\n*/\nexports.getTiddlerText = function(title,defaultText) {\n\tvar tiddler = this.getTiddler(title);\n\t// Return undefined if the tiddler isn't found\n\tif(!tiddler) {\n\t\treturn defaultText;\n\t}\n\tif(tiddler.fields.text !== undefined) {\n\t\t// Just return the text if we've got it\n\t\treturn tiddler.fields.text;\n\t} else {\n\t\t// Tell any listeners about the need to lazily load this tiddler\n\t\tthis.dispatchEvent(\"lazyLoad\",title);\n\t\t// Indicate that the text is being loaded\n\t\treturn null;\n\t}\n};\n\n/*\nRead an array of browser File objects, invoking callback(tiddlerFieldsArray) once they're all read\n*/\nexports.readFiles = function(files,callback) {\n\tvar result = [],\n\t\toutstanding = files.length;\n\tfor(var f=0; f<files.length; f++) {\n\t\tthis.readFile(files[f],function(tiddlerFieldsArray) {\n\t\t\tresult.push.apply(result,tiddlerFieldsArray);\n\t\t\tif(--outstanding === 0) {\n\t\t\t\tcallback(result);\n\t\t\t}\n\t\t});\n\t}\n\treturn files.length;\n};\n\n/*\nRead a browser File object, invoking callback(tiddlerFieldsArray) with an array of tiddler fields objects\n*/\nexports.readFile = function(file,callback) {\n\t// Get the type, falling back to the filename extension\n\tvar self = this,\n\t\ttype = file.type;\n\tif(type === \"\" || !type) {\n\t\tvar dotPos = file.name.lastIndexOf(\".\");\n\t\tif(dotPos !== -1) {\n\t\t\tvar fileExtensionInfo = $tw.utils.getFileExtensionInfo(file.name.substr(dotPos));\n\t\t\tif(fileExtensionInfo) {\n\t\t\t\ttype = fileExtensionInfo.type;\n\t\t\t}\n\t\t}\n\t}\n\t// Figure out if we're reading a binary file\n\tvar contentTypeInfo = $tw.config.contentTypeInfo[type],\n\t\tisBinary = contentTypeInfo ? contentTypeInfo.encoding === \"base64\" : false;\n\t// Log some debugging information\n\tif($tw.log.IMPORT) {\n\t\tconsole.log(\"Importing file '\" + file.name + \"', type: '\" + type + \"', isBinary: \" + isBinary);\n\t}\n\t// Create the FileReader\n\tvar reader = new FileReader();\n\t// Onload\n\treader.onload = function(event) {\n\t\t// Deserialise the file contents\n\t\tvar text = event.target.result,\n\t\t\ttiddlerFields = {title: file.name || \"Untitled\", type: type};\n\t\t// Are we binary?\n\t\tif(isBinary) {\n\t\t\t// The base64 section starts after the first comma in the data URI\n\t\t\tvar commaPos = text.indexOf(\",\");\n\t\t\tif(commaPos !== -1) {\n\t\t\t\ttiddlerFields.text = text.substr(commaPos+1);\n\t\t\t\tcallback([tiddlerFields]);\n\t\t\t}\n\t\t} else {\n\t\t\t// Check whether this is an encrypted TiddlyWiki file\n\t\t\tvar encryptedJson = $tw.utils.extractEncryptedStoreArea(text);\n\t\t\tif(encryptedJson) {\n\t\t\t\t// If so, attempt to decrypt it with the current password\n\t\t\t\t$tw.utils.decryptStoreAreaInteractive(encryptedJson,function(tiddlers) {\n\t\t\t\t\tcallback(tiddlers);\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// Otherwise, just try to deserialise any tiddlers in the file\n\t\t\t\tcallback(self.deserializeTiddlers(type,text,tiddlerFields));\n\t\t\t}\n\t\t}\n\t};\n\t// Kick off the read\n\tif(isBinary) {\n\t\treader.readAsDataURL(file);\n\t} else {\n\t\treader.readAsText(file);\n\t}\n};\n\n/*\nFind any existing draft of a specified tiddler\n*/\nexports.findDraft = function(targetTitle) {\n\tvar draftTitle = undefined;\n\tthis.forEachTiddler({includeSystem: true},function(title,tiddler) {\n\t\tif(tiddler.fields[\"draft.title\"] && tiddler.fields[\"draft.of\"] === targetTitle) {\n\t\t\tdraftTitle = title;\n\t\t}\n\t});\n\treturn draftTitle;\n}\n\n/*\nCheck whether the specified draft tiddler has been modified.\nIf the original tiddler doesn't exist, create a vanilla tiddler variable,\nto check if additional fields have been added.\n*/\nexports.isDraftModified = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(!tiddler.isDraft()) {\n\t\treturn false;\n\t}\n\tvar ignoredFields = [\"created\", \"modified\", \"title\", \"draft.title\", \"draft.of\"],\n\t\torigTiddler = this.getTiddler(tiddler.fields[\"draft.of\"]) || new $tw.Tiddler({text:\"\", tags:[]}),\n\t\ttitleModified = tiddler.fields[\"draft.title\"] !== tiddler.fields[\"draft.of\"];\n\treturn titleModified || !tiddler.isEqual(origTiddler,ignoredFields);\n};\n\n/*\nAdd a new record to the top of the history stack\ntitle: a title string or an array of title strings\nfromPageRect: page coordinates of the origin of the navigation\nhistoryTitle: title of history tiddler (defaults to $:/HistoryList)\n*/\nexports.addToHistory = function(title,fromPageRect,historyTitle) {\n\tvar story = new $tw.Story({wiki: this, historyTitle: historyTitle});\n\tstory.addToHistory(title,fromPageRect);\n};\n\n/*\nInvoke the available upgrader modules\ntitles: array of tiddler titles to be processed\ntiddlers: hashmap by title of tiddler fields of pending import tiddlers. These can be modified by the upgraders. An entry with no fields indicates a tiddler that was pending import has been suppressed. When entries are added to the pending import the tiddlers hashmap may have entries that are not present in the titles array\nReturns a hashmap of messages keyed by tiddler title.\n*/\nexports.invokeUpgraders = function(titles,tiddlers) {\n\t// Collect up the available upgrader modules\n\tvar self = this;\n\tif(!this.upgraderModules) {\n\t\tthis.upgraderModules = [];\n\t\t$tw.modules.forEachModuleOfType(\"upgrader\",function(title,module) {\n\t\t\tif(module.upgrade) {\n\t\t\t\tself.upgraderModules.push(module);\n\t\t\t}\n\t\t});\n\t}\n\t// Invoke each upgrader in turn\n\tvar messages = {};\n\tfor(var t=0; t<this.upgraderModules.length; t++) {\n\t\tvar upgrader = this.upgraderModules[t],\n\t\t\tupgraderMessages = upgrader.upgrade(this,titles,tiddlers);\n\t\t$tw.utils.extend(messages,upgraderMessages);\n\t}\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/wiki.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/palettes/Blanca": {
"title": "$:/palettes/Blanca",
"name": "Blanca",
"description": "A clean white palette to let you focus",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #66cccc\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #ffffff\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #7897f3\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ccc\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #ffffff\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #7897f3\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #eeeeee\ntab-border-selected: #cccccc\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ffeedd\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: #eee\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #ff9900\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Blue": {
"title": "$:/palettes/Blue",
"name": "Blue",
"description": "A blue theme",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #fff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour foreground>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333353\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #ddddff\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ffffff\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: <<colour page-background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #5959c0\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: #ccccdd\ntab-border-selected: #ccccdd\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #eeeeff\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #666666\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #ffffff\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #ffffff\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #5959c0\ntoolbar-new-button: #5eb95e\ntoolbar-options-button: rgb(128, 88, 165)\ntoolbar-save-button: #0e90d2\ntoolbar-info-button: #0e90d2\ntoolbar-edit-button: rgb(243, 123, 29)\ntoolbar-close-button: #dd514c\ntoolbar-delete-button: #dd514c\ntoolbar-cancel-button: rgb(243, 123, 29)\ntoolbar-done-button: #5eb95e\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Muted": {
"title": "$:/palettes/Muted",
"name": "Muted",
"description": "Bright tiddlers on a muted background",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #bbb\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #6f6f70\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #29a6ee\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #c2c1c2\nsidebar-foreground-shadow: rgba(255,255,255,0)\nsidebar-foreground: #d3d2d4\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #6f6f70\nsidebar-tab-background: #666667\nsidebar-tab-border-selected: #999\nsidebar-tab-border: #515151\nsidebar-tab-divider: #999\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: #999\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #d1d0d2\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #d5ad34\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/ContrastLight": {
"title": "$:/palettes/ContrastLight",
"name": "Contrast (Light)",
"description": "High contrast and unambiguous (light version)",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #f00\nalert-border: <<colour background>>\nalert-highlight: <<colour foreground>>\nalert-muted-foreground: #800\nbackground: #fff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: <<colour background>>\nbutton-foreground: <<colour foreground>>\nbutton-border: <<colour foreground>>\ncode-background: <<colour background>>\ncode-border: <<colour foreground>>\ncode-foreground: <<colour foreground>>\ndirty-indicator: #f00\ndownload-background: #080\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: <<colour foreground>>\ndropdown-tab-background: <<colour foreground>>\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #00a\nexternal-link-foreground: #00e\nforeground: #000\nmessage-background: <<colour foreground>>\nmessage-border: <<colour background>>\nmessage-foreground: <<colour background>>\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: <<colour foreground>>\nmodal-footer-background: <<colour background>>\nmodal-footer-border: <<colour foreground>>\nmodal-header-border: <<colour foreground>>\nmuted-foreground: <<colour foreground>>\nnotification-background: <<colour background>>\nnotification-border: <<colour foreground>>\npage-background: <<colour background>>\npre-background: <<colour background>>\npre-border: <<colour foreground>>\nprimary: #00f\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: <<colour background>>\nsidebar-controls-foreground: <<colour foreground>>\nsidebar-foreground-shadow: rgba(0,0,0, 0)\nsidebar-foreground: <<colour foreground>>\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: <<colour foreground>>\nsidebar-tab-background-selected: <<colour background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: <<colour foreground>>\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: <<colour foreground>>\nsidebar-tiddler-link-foreground: <<colour primary>>\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: <<colour foreground>>\ntab-border-selected: <<colour foreground>>\ntab-border: <<colour foreground>>\ntab-divider: <<colour foreground>>\ntab-foreground-selected: <<colour foreground>>\ntab-foreground: <<colour background>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #000\ntag-foreground: #fff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour foreground>>\ntiddler-controls-foreground-hover: #ddd\ntiddler-controls-foreground-selected: #fdd\ntiddler-controls-foreground: <<colour foreground>>\ntiddler-editor-background: <<colour background>>\ntiddler-editor-border-image: <<colour foreground>>\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: <<colour background>>\ntiddler-editor-fields-odd: <<colour background>>\ntiddler-info-background: <<colour background>>\ntiddler-info-border: <<colour foreground>>\ntiddler-info-tab-background: <<colour background>>\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: <<colour foreground>>\ntiddler-title-foreground: <<colour foreground>>\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: <<colour foreground>>\nvery-muted-foreground: #888888\n"
},
"$:/palettes/ContrastDark": {
"title": "$:/palettes/ContrastDark",
"name": "Contrast (Dark)",
"description": "High contrast and unambiguous (dark version)",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #f00\nalert-border: <<colour background>>\nalert-highlight: <<colour foreground>>\nalert-muted-foreground: #800\nbackground: #000\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: <<colour background>>\nbutton-foreground: <<colour foreground>>\nbutton-border: <<colour foreground>>\ncode-background: <<colour background>>\ncode-border: <<colour foreground>>\ncode-foreground: <<colour foreground>>\ndirty-indicator: #f00\ndownload-background: #080\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: <<colour foreground>>\ndropdown-tab-background: <<colour foreground>>\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #00a\nexternal-link-foreground: #00e\nforeground: #fff\nmessage-background: <<colour foreground>>\nmessage-border: <<colour background>>\nmessage-foreground: <<colour background>>\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: <<colour foreground>>\nmodal-footer-background: <<colour background>>\nmodal-footer-border: <<colour foreground>>\nmodal-header-border: <<colour foreground>>\nmuted-foreground: <<colour foreground>>\nnotification-background: <<colour background>>\nnotification-border: <<colour foreground>>\npage-background: <<colour background>>\npre-background: <<colour background>>\npre-border: <<colour foreground>>\nprimary: #00f\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: <<colour background>>\nsidebar-controls-foreground: <<colour foreground>>\nsidebar-foreground-shadow: rgba(0,0,0, 0)\nsidebar-foreground: <<colour foreground>>\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: <<colour foreground>>\nsidebar-tab-background-selected: <<colour background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: <<colour foreground>>\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: <<colour foreground>>\nsidebar-tiddler-link-foreground: <<colour primary>>\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: <<colour foreground>>\ntab-border-selected: <<colour foreground>>\ntab-border: <<colour foreground>>\ntab-divider: <<colour foreground>>\ntab-foreground-selected: <<colour foreground>>\ntab-foreground: <<colour background>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #fff\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour foreground>>\ntiddler-controls-foreground-hover: #ddd\ntiddler-controls-foreground-selected: #fdd\ntiddler-controls-foreground: <<colour foreground>>\ntiddler-editor-background: <<colour background>>\ntiddler-editor-border-image: <<colour foreground>>\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: <<colour background>>\ntiddler-editor-fields-odd: <<colour background>>\ntiddler-info-background: <<colour background>>\ntiddler-info-border: <<colour foreground>>\ntiddler-info-tab-background: <<colour background>>\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: <<colour foreground>>\ntiddler-title-foreground: <<colour foreground>>\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: <<colour foreground>>\nvery-muted-foreground: #888888\n"
},
"$:/palettes/DarkPhotos": {
"created": "20150402111612188",
"description": "Good with dark photo backgrounds",
"modified": "20150402112344080",
"name": "DarkPhotos",
"tags": "$:/tags/Palette",
"title": "$:/palettes/DarkPhotos",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: \nbutton-foreground: \nbutton-border: \ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #ddd\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #336438\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #ccf\nsidebar-controls-foreground: #fff\nsidebar-foreground-shadow: rgba(0,0,0, 0.5)\nsidebar-foreground: #fff\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #eee\nsidebar-tab-background-selected: rgba(255,255,255, 0.8)\nsidebar-tab-background: rgba(255,255,255, 0.4)\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: rgba(255,255,255, 0.2)\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #aaf\nsidebar-tiddler-link-foreground: #ddf\nsite-title-foreground: #fff\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ec6\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Rocker": {
"title": "$:/palettes/Rocker",
"name": "Rocker",
"description": "A dark theme",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #000\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #cc0000\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ffffff\nsidebar-foreground-shadow: rgba(255,255,255, 0.0)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #000\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #ffbb99\nsidebar-tiddler-link-foreground: #cc0000\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ffbb99\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #cc0000\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/SolarFlare": {
"title": "$:/palettes/SolarFlare",
"name": "Solar Flare",
"description": "Warm, relaxing earth colours",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": ": Background Tones\n\nbase03: #002b36\nbase02: #073642\n\n: Content Tones\n\nbase01: #586e75\nbase00: #657b83\nbase0: #839496\nbase1: #93a1a1\n\n: Background Tones\n\nbase2: #eee8d5\nbase3: #fdf6e3\n\n: Accent Colors\n\nyellow: #b58900\norange: #cb4b16\nred: #dc322f\nmagenta: #d33682\nviolet: #6c71c4\nblue: #268bd2\ncyan: #2aa198\ngreen: #859900\n\n: Additional Tones (RA)\n\nbase10: #c0c4bb\nviolet-muted: #7c81b0\nblue-muted: #4e7baa\n\nyellow-hot: #ffcc44\norange-hot: #eb6d20\nred-hot: #ff2222\nblue-hot: #2298ee\ngreen-hot: #98ee22\n\n: Palette\n\n: Do not use colour macro for background and foreground\nbackground: #fdf6e3\n download-foreground: <<colour background>>\n dragger-foreground: <<colour background>>\n dropdown-background: <<colour background>>\n modal-background: <<colour background>>\n sidebar-foreground-shadow: <<colour background>>\n tiddler-background: <<colour background>>\n tiddler-border: <<colour background>>\n tiddler-link-background: <<colour background>>\n tab-background-selected: <<colour background>>\n dropdown-tab-background-selected: <<colour tab-background-selected>>\nforeground: #657b83\n dragger-background: <<colour foreground>>\n tab-foreground: <<colour foreground>>\n tab-foreground-selected: <<colour tab-foreground>>\n sidebar-tab-foreground-selected: <<colour tab-foreground-selected>>\n sidebar-tab-foreground: <<colour tab-foreground>>\n sidebar-button-foreground: <<colour foreground>>\n sidebar-controls-foreground: <<colour foreground>>\n sidebar-foreground: <<colour foreground>>\n: base03\n: base02\n: base01\n alert-muted-foreground: <<colour base01>>\n: base00\n code-foreground: <<colour base00>>\n message-foreground: <<colour base00>>\n tag-foreground: <<colour base00>>\n: base0\n sidebar-tiddler-link-foreground: <<colour base0>>\n: base1\n muted-foreground: <<colour base1>>\n blockquote-bar: <<colour muted-foreground>>\n dropdown-border: <<colour muted-foreground>>\n sidebar-muted-foreground: <<colour muted-foreground>>\n tiddler-title-foreground: <<colour muted-foreground>>\n site-title-foreground: <<colour tiddler-title-foreground>>\n: base2\n modal-footer-background: <<colour base2>>\n page-background: <<colour base2>>\n modal-backdrop: <<colour page-background>>\n notification-background: <<colour page-background>>\n code-background: <<colour page-background>>\n code-border: <<colour code-background>>\n pre-background: <<colour page-background>>\n pre-border: <<colour pre-background>>\n sidebar-tab-background-selected: <<colour page-background>>\n table-header-background: <<colour base2>>\n tag-background: <<colour base2>>\n tiddler-editor-background: <<colour base2>>\n tiddler-info-background: <<colour base2>>\n tiddler-info-tab-background: <<colour base2>>\n tab-background: <<colour base2>>\n dropdown-tab-background: <<colour tab-background>>\n: base3\n alert-background: <<colour base3>>\n message-background: <<colour base3>>\n: yellow\n: orange\n: red\n: magenta\n alert-highlight: <<colour magenta>>\n: violet\n external-link-foreground: <<colour violet>>\n: blue\n: cyan\n: green\n: base10\n tiddler-controls-foreground: <<colour base10>>\n: violet-muted\n external-link-foreground-visited: <<colour violet-muted>>\n: blue-muted\n primary: <<colour blue-muted>>\n download-background: <<colour primary>>\n tiddler-link-foreground: <<colour primary>>\n\nalert-border: #b99e2f\ndirty-indicator: #ff0000\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nmessage-border: #cfd6e6\nmodal-border: #999999\nsidebar-controls-foreground-hover:\nsidebar-muted-foreground-hover:\nsidebar-tab-background: #ded8c5\nsidebar-tiddler-link-foreground-hover:\nstatic-alert-foreground: #aaaaaa\ntab-border: #cccccc\n modal-footer-border: <<colour tab-border>>\n modal-header-border: <<colour tab-border>>\n notification-border: <<colour tab-border>>\n sidebar-tab-border: <<colour tab-border>>\n tab-border-selected: <<colour tab-border>>\n sidebar-tab-border-selected: <<colour tab-border-selected>>\ntab-divider: #d8d8d8\n sidebar-tab-divider: <<colour tab-divider>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-border: #dddddd\ntiddler-subtitle-foreground: #c0c0c0\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Vanilla": {
"title": "$:/palettes/Vanilla",
"name": "Vanilla",
"description": "Pale and unobtrusive",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #bbb\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #f4f4f4\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #aaaaaa\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #f4f4f4\nsidebar-tab-background: #e0e0e0\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: #e4e4e4\nsidebar-tab-foreground-selected:\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #999999\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ec6\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/core/readme": {
"title": "$:/core/readme",
"text": "This plugin contains TiddlyWiki's core components, comprising:\n\n* JavaScript code modules\n* Icons\n* Templates needed to create TiddlyWiki's user interface\n* British English (''en-GB'') translations of the localisable strings used by the core\n"
},
"$:/core/templates/alltiddlers.template.html": {
"title": "$:/core/templates/alltiddlers.template.html",
"type": "text/vnd.tiddlywiki-html",
"text": "<!-- This template is provided for backwards compatibility with older versions of TiddlyWiki -->\n\n<$set name=\"exportFilter\" value=\"[!is[system]sort[title]]\">\n\n{{$:/core/templates/exporters/StaticRiver}}\n\n</$set>\n"
},
"$:/core/templates/canonical-uri-external-image": {
"title": "$:/core/templates/canonical-uri-external-image",
"text": "<!--\n\nThis template is used to assign the ''_canonical_uri'' field to external images.\n\nChange the `./images/` part to a different base URI. The URI can be relative or absolute.\n\n-->\n./images/<$view field=\"title\" format=\"doubleurlencoded\"/>"
},
"$:/core/templates/canonical-uri-external-text": {
"title": "$:/core/templates/canonical-uri-external-text",
"text": "<!--\n\nThis template is used to assign the ''_canonical_uri'' field to external text files.\n\nChange the `./text/` part to a different base URI. The URI can be relative or absolute.\n\n-->\n./text/<$view field=\"title\" format=\"doubleurlencoded\"/>.tid"
},
"$:/core/templates/css-tiddler": {
"title": "$:/core/templates/css-tiddler",
"text": "<!--\n\nThis template is used for saving CSS tiddlers as a style tag with data attributes representing the tiddler fields.\n\n-->`<style`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/css\">`<$view field=\"text\" format=\"text\" />`</style>`"
},
"$:/core/templates/exporters/CsvFile": {
"title": "$:/core/templates/exporters/CsvFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/CsvFile}}",
"extension": ".csv",
"text": "\\define renderContent()\n<$text text=<<csvtiddlers filter:\"\"\"$(exportFilter)$\"\"\" format:\"quoted-comma-sep\">>/>\n\\end\n<<renderContent>>\n"
},
"$:/core/templates/exporters/JsonFile": {
"title": "$:/core/templates/exporters/JsonFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/JsonFile}}",
"extension": ".json",
"text": "\\define renderContent()\n<$text text=<<jsontiddlers filter:\"\"\"$(exportFilter)$\"\"\">>/>\n\\end\n<<renderContent>>\n"
},
"$:/core/templates/exporters/StaticRiver": {
"title": "$:/core/templates/exporters/StaticRiver",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/StaticRiver}}",
"extension": ".html",
"text": "\\define tv-wikilink-template() #$uri_encoded$\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<style type=\"text/css\">\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n</style>\n</head>\n<body class=\"tc-body\">\n{{$:/StaticBanner||$:/core/templates/html-tiddler}}\n<section class=\"tc-story-river\">\n{{$:/core/templates/exporters/StaticRiver/Content||$:/core/templates/html-tiddler}}\n</section>\n</body>\n</html>\n"
},
"$:/core/templates/exporters/StaticRiver/Content": {
"title": "$:/core/templates/exporters/StaticRiver/Content",
"text": "\\define renderContent()\n{{{ $(exportFilter)$ ||$:/core/templates/static-tiddler}}}\n\\end\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n<<renderContent>>\n</$importvariables>\n"
},
"$:/core/templates/exporters/TidFile": {
"title": "$:/core/templates/exporters/TidFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/TidFile}}",
"extension": ".tid",
"text": "\\define renderContent()\n{{{ $(exportFilter)$ +[limit[1]] ||$:/core/templates/tid-tiddler}}}\n\\end\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\"><<renderContent>></$importvariables>"
},
"$:/core/templates/html-div-tiddler": {
"title": "$:/core/templates/html-div-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as an HTML DIV tag with attributes representing the tiddler fields.\n\n-->`<div`<$fields template=' $name$=\"$encoded_value$\"'></$fields>`>\n<pre>`<$view field=\"text\" format=\"htmlencoded\" />`</pre>\n</div>`\n"
},
"$:/core/templates/html-tiddler": {
"title": "$:/core/templates/html-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as raw HTML\n\n--><$view field=\"text\" format=\"htmlwikified\" />"
},
"$:/core/templates/javascript-tiddler": {
"title": "$:/core/templates/javascript-tiddler",
"text": "<!--\n\nThis template is used for saving JavaScript tiddlers as a script tag with data attributes representing the tiddler fields.\n\n-->`<script`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/javascript\">`<$view field=\"text\" format=\"text\" />`</script>`"
},
"$:/core/templates/module-tiddler": {
"title": "$:/core/templates/module-tiddler",
"text": "<!--\n\nThis template is used for saving JavaScript tiddlers as a script tag with data attributes representing the tiddler fields. The body of the tiddler is wrapped in a call to the `$tw.modules.define` function in order to define the body of the tiddler as a module\n\n-->`<script`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/javascript\" data-module=\"yes\">$tw.modules.define(\"`<$view field=\"title\" format=\"jsencoded\" />`\",\"`<$view field=\"module-type\" format=\"jsencoded\" />`\",function(module,exports,require) {`<$view field=\"text\" format=\"text\" />`});\n</script>`"
},
"$:/core/templates/MOTW.html": {
"title": "$:/core/templates/MOTW.html",
"text": "\\rules only filteredtranscludeinline transcludeinline entity\n<!-- The following comment is called a MOTW comment and is necessary for the TiddlyIE Internet Explorer extension -->\n<!-- saved from url=(0021)http://tiddlywiki.com --> "
},
"$:/core/templates/plain-text-tiddler": {
"title": "$:/core/templates/plain-text-tiddler",
"text": "<$view field=\"text\" format=\"text\" />"
},
"$:/core/templates/raw-static-tiddler": {
"title": "$:/core/templates/raw-static-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as static HTML\n\n--><$view field=\"text\" format=\"plainwikified\" />"
},
"$:/core/save/all": {
"title": "$:/core/save/all",
"text": "\\define saveTiddlerFilter()\n[is[tiddler]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]] $(publishFilter)$\n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/empty": {
"title": "$:/core/save/empty",
"text": "\\define saveTiddlerFilter()\n[is[system]] -[prefix[$:/state/popup/]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]]\n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/lazy-all": {
"title": "$:/core/save/lazy-all",
"text": "\\define saveTiddlerFilter()\n[is[system]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]] \n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/lazy-images": {
"title": "$:/core/save/lazy-images",
"text": "\\define saveTiddlerFilter()\n[is[tiddler]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] -[!is[system]is[image]] +[sort[title]] \n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/templates/single.tiddler.window": {
"title": "$:/core/templates/single.tiddler.window",
"text": "<$set name=\"themeTitle\" value={{$:/view}}>\n\n<$set name=\"tempCurrentTiddler\" value=<<currentTiddler>>>\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$set name=\"currentTiddler\" value=<<tempCurrentTiddler>>>\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$navigator story=\"$:/StoryList\" history=\"$:/HistoryList\">\n\n<$transclude mode=\"block\"/>\n\n</$navigator>\n\n</$importvariables>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n"
},
"$:/core/templates/split-recipe": {
"title": "$:/core/templates/split-recipe",
"text": "<$list filter=\"[!is[system]]\">\ntiddler: <$view field=\"title\" format=\"urlencoded\"/>.tid\n</$list>\n"
},
"$:/core/templates/static-tiddler": {
"title": "$:/core/templates/static-tiddler",
"text": "<a name=<<currentTiddler>>>\n<$transclude tiddler=\"$:/core/ui/ViewTemplate\"/>\n</a>"
},
"$:/core/templates/static.area": {
"title": "$:/core/templates/static.area",
"text": "<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n{{{ [all[shadows+tiddlers]tag[$:/tags/RawStaticContent]!has[draft.of]] ||$:/core/templates/raw-static-tiddler}}}\n{{$:/core/templates/static.content||$:/core/templates/html-tiddler}}\n</$reveal>\n<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\nThis file contains an encrypted ~TiddlyWiki. Enable ~JavaScript and enter the decryption password when prompted.\n</$reveal>\n"
},
"$:/core/templates/static.content": {
"title": "$:/core/templates/static.content",
"type": "text/vnd.tiddlywiki",
"text": "<!-- For Google, and people without JavaScript-->\nThis [[TiddlyWiki|http://tiddlywiki.com]] contains the following tiddlers:\n\n<ul>\n<$list filter=<<saveTiddlerFilter>>>\n<li><$view field=\"title\" format=\"text\"></$view></li>\n</$list>\n</ul>\n"
},
"$:/core/templates/static.template.css": {
"title": "$:/core/templates/static.template.css",
"text": "{{$:/boot/boot.css||$:/core/templates/plain-text-tiddler}}\n\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n"
},
"$:/core/templates/static.template.html": {
"title": "$:/core/templates/static.template.html",
"type": "text/vnd.tiddlywiki-html",
"text": "\\define tv-wikilink-template() static/$uri_doubleencoded$.html\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<style type=\"text/css\">\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n</style>\n</head>\n<body class=\"tc-body\">\n{{$:/StaticBanner||$:/core/templates/html-tiddler}}\n{{$:/core/ui/PageTemplate||$:/core/templates/html-tiddler}}\n</body>\n</html>\n"
},
"$:/core/templates/static.tiddler.html": {
"title": "$:/core/templates/static.tiddler.html",
"text": "\\define tv-wikilink-template() $uri_doubleencoded$.html\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n`<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"`{{$:/core/templates/version}}`\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<link rel=\"stylesheet\" href=\"static.css\">\n<title>`<$view field=\"caption\"><$view field=\"title\"/></$view>: {{$:/core/wiki/title}}`</title>\n</head>\n<body class=\"tc-body\">\n`{{$:/StaticBanner||$:/core/templates/html-tiddler}}`\n<section class=\"tc-story-river\">\n`<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n<$view tiddler=\"$:/core/ui/ViewTemplate\" format=\"htmlwikified\"/>\n</$importvariables>`\n</section>\n</body>\n</html>\n`"
},
"$:/core/templates/store.area.template.html": {
"title": "$:/core/templates/store.area.template.html",
"text": "<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n`<div id=\"storeArea\" style=\"display:none;\">`\n<$list filter=<<saveTiddlerFilter>> template=\"$:/core/templates/html-div-tiddler\"/>\n`</div>`\n</$reveal>\n<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\n`<!--~~ Encrypted tiddlers ~~-->`\n`<pre id=\"encryptedStoreArea\" type=\"text/plain\" style=\"display:none;\">`\n<$encrypt filter=<<saveTiddlerFilter>>/>\n`</pre>`\n</$reveal>"
},
"$:/core/templates/tid-tiddler": {
"title": "$:/core/templates/tid-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers in TiddlyWeb *.tid format\n\n--><$fields exclude='text bag' template='$name$: $value$\n'></$fields>`\n`<$view field=\"text\" format=\"text\" />"
},
"$:/core/templates/tiddler-metadata": {
"title": "$:/core/templates/tiddler-metadata",
"text": "<!--\n\nThis template is used for saving tiddler metadata *.meta files\n\n--><$fields exclude='text bag' template='$name$: $value$\n'></$fields>"
},
"$:/core/templates/tiddlywiki5.html": {
"title": "$:/core/templates/tiddlywiki5.html",
"text": "\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n{{$:/core/templates/MOTW.html}}<html>\n<head>\n<meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\" />\t\t<!-- Force IE standards mode for Intranet and HTA - should be the first meta -->\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"application-name\" content=\"TiddlyWiki\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\" />\n<meta name=\"copyright\" content=\"{{$:/core/copyright.txt}}\" />\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<!--~~ This is a Tiddlywiki file. The points of interest in the file are marked with this pattern ~~-->\n\n<!--~~ Raw markup ~~-->\n{{{ [all[shadows+tiddlers]tag[$:/core/wiki/rawmarkup]] [all[shadows+tiddlers]tag[$:/tags/RawMarkup]] ||$:/core/templates/plain-text-tiddler}}}\n</head>\n<body class=\"tc-body\">\n<!--~~ Static styles ~~-->\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<!--~~ Static content for Google and browsers without JavaScript ~~-->\n<noscript>\n<div id=\"splashArea\">\n{{$:/core/templates/static.area}}\n</div>\n</noscript>\n<!--~~ Ordinary tiddlers ~~-->\n{{$:/core/templates/store.area.template.html}}\n<!--~~ Library modules ~~-->\n<div id=\"libraryModules\" style=\"display:none;\">\n{{{ [is[system]type[application/javascript]library[yes]] ||$:/core/templates/javascript-tiddler}}}\n</div>\n<!--~~ Boot kernel prologue ~~-->\n<div id=\"bootKernelPrefix\" style=\"display:none;\">\n{{ $:/boot/bootprefix.js ||$:/core/templates/javascript-tiddler}}\n</div>\n<!--~~ Boot kernel ~~-->\n<div id=\"bootKernel\" style=\"display:none;\">\n{{ $:/boot/boot.js ||$:/core/templates/javascript-tiddler}}\n</div>\n</body>\n</html>\n"
},
"$:/core/templates/version": {
"title": "$:/core/templates/version",
"text": "<<version>>"
},
"$:/core/templates/wikified-tiddler": {
"title": "$:/core/templates/wikified-tiddler",
"text": "<$transclude />"
},
"$:/core/ui/AboveStory/tw2-plugin-check": {
"title": "$:/core/ui/AboveStory/tw2-plugin-check",
"tags": "$:/tags/AboveStory",
"text": "\\define lingo-base() $:/language/AboveStory/ClassicPlugin/\n<$list filter=\"[all[system+tiddlers]tag[systemConfig]limit[1]]\">\n\n<div class=\"tc-message-box\">\n\n<<lingo Warning>>\n\n<ul>\n\n<$list filter=\"[all[system+tiddlers]tag[systemConfig]limit[1]]\">\n\n<li>\n\n<$link><$view field=\"title\"/></$link>\n\n</li>\n\n</$list>\n\n</ul>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/AdvancedSearch/Filter": {
"title": "$:/core/ui/AdvancedSearch/Filter",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Filter/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<<lingo Filter/Hint>>\n\n<div class=\"tc-search tc-advanced-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/AdvancedSearch/FilterButton]!has[draft.of]]\"><$transclude/></$list>\n</div>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter={{$:/temp/advancedsearch}}/>\"\"\">\n<div class=\"tc-search-results\">\n<<lingo Filter/Matches>>\n<$list filter={{$:/temp/advancedsearch}} template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$set>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/clear": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/clear",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/delete": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/delete",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button popup=<<qualify \"$:/state/filterDeleteDropdown\">> class=\"tc-btn-invisible\">\n{{$:/core/images/delete-button}}\n</$button>\n</$reveal>\n\n<$reveal state=<<qualify \"$:/state/filterDeleteDropdown\">> type=\"popup\" position=\"belowleft\" animate=\"yes\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<div class=\"tc-dropdown-item-plain\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter={{$:/temp/advancedsearch}}/>\"\"\">\nAre you sure you wish to delete <<resultCount>> tiddler(s)?\n</$set>\n</div>\n<div class=\"tc-dropdown-item-plain\">\n<$button class=\"tc-btn\">\n<$action-deletetiddler $filter={{$:/temp/advancedsearch}}/>\nDelete these tiddlers\n</$button>\n</div>\n</div>\n</div>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/filterDropdown\">> class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</span>\n\n<$reveal state=<<qualify \"$:/state/filterDropdown\">> type=\"popup\" position=\"belowleft\" animate=\"yes\">\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Filter]]\"><$link to={{!!filter}}><$transclude field=\"description\"/></$link>\n</$list>\n</div>\n</div>\n</$linkcatcher>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/export": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/export",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$macrocall $name=\"exportButton\" exportFilter={{$:/temp/advancedsearch}} lingoBase=\"$:/language/Buttons/ExportTiddlers/\"/>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Shadows": {
"title": "$:/core/ui/AdvancedSearch/Shadows",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Shadows/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Shadows/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[all[shadows]search{$:/temp/advancedsearch}] -[[$:/temp/advancedsearch]]\"/>\"\"\">\n\n<div class=\"tc-search-results\">\n\n<<lingo Shadows/Matches>>\n\n<$list filter=\"[all[shadows]search{$:/temp/advancedsearch}sort[title]limit[250]] -[[$:/temp/advancedsearch]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n</div>\n\n</$set>\n\n</$reveal>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"match\" text=\"\">\n\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Standard": {
"title": "$:/core/ui/AdvancedSearch/Standard",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Standard/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Standard/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$set name=\"searchTiddler\" value=\"$:/temp/advancedsearch\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]butfirst[]limit[1]]\" emptyMessage=\"\"\"\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\">\n<$transclude/>\n</$list>\n\"\"\">\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\" default={{$:/config/SearchResults/Default}}/>\n</$list>\n</$set>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/System": {
"title": "$:/core/ui/AdvancedSearch/System",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/System/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo System/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[is[system]search{$:/temp/advancedsearch}] -[[$:/temp/advancedsearch]]\"/>\"\"\">\n\n<div class=\"tc-search-results\">\n\n<<lingo System/Matches>>\n\n<$list filter=\"[is[system]search{$:/temp/advancedsearch}sort[title]limit[250]] -[[$:/temp/advancedsearch]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n</div>\n\n</$set>\n\n</$reveal>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"match\" text=\"\">\n\n</$reveal>\n"
},
"$:/AdvancedSearch": {
"title": "$:/AdvancedSearch",
"icon": "$:/core/images/advanced-search-button",
"color": "#bbb",
"text": "<div class=\"tc-advanced-search\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/AdvancedSearch]!has[draft.of]]\" \"$:/core/ui/AdvancedSearch/System\">>\n</div>\n"
},
"$:/core/ui/AlertTemplate": {
"title": "$:/core/ui/AlertTemplate",
"text": "<div class=\"tc-alert\">\n<div class=\"tc-alert-toolbar\">\n<$button class=\"tc-btn-invisible\"><$action-deletetiddler $tiddler=<<currentTiddler>>/>{{$:/core/images/delete-button}}</$button>\n</div>\n<div class=\"tc-alert-subtitle\">\n<$view field=\"component\"/> - <$view field=\"modified\" format=\"date\" template=\"0hh:0mm:0ss DD MM YYYY\"/> <$reveal type=\"nomatch\" state=\"!!count\" text=\"\"><span class=\"tc-alert-highlight\">({{$:/language/Count}}: <$view field=\"count\"/>)</span></$reveal>\n</div>\n<div class=\"tc-alert-body\">\n\n<$transclude/>\n\n</div>\n</div>\n"
},
"$:/core/ui/BinaryWarning": {
"title": "$:/core/ui/BinaryWarning",
"text": "\\define lingo-base() $:/language/BinaryWarning/\n<div class=\"tc-binary-warning\">\n\n<<lingo Prompt>>\n\n</div>\n"
},
"$:/core/ui/Components/tag-link": {
"title": "$:/core/ui/Components/tag-link",
"text": "<$link>\n<$set name=\"backgroundColor\" value={{!!color}}>\n<span style=<<tag-styles>> class=\"tc-tag-label\">\n<$view field=\"title\" format=\"text\"/>\n</span>\n</$set>\n</$link>"
},
"$:/core/ui/ControlPanel/Advanced": {
"title": "$:/core/ui/ControlPanel/Advanced",
"tags": "$:/tags/ControlPanel/Info",
"caption": "{{$:/language/ControlPanel/Advanced/Caption}}",
"text": "{{$:/language/ControlPanel/Advanced/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Advanced]!has[draft.of]]\" \"$:/core/ui/ControlPanel/TiddlerFields\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/Appearance": {
"title": "$:/core/ui/ControlPanel/Appearance",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Appearance/Caption}}",
"text": "{{$:/language/ControlPanel/Appearance/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Appearance]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Theme\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/Basics": {
"title": "$:/core/ui/ControlPanel/Basics",
"tags": "$:/tags/ControlPanel/Info",
"caption": "{{$:/language/ControlPanel/Basics/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Basics/\n\n\\define show-filter-count(filter)\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $value=\"\"\"$filter$\"\"\"/>\n<$action-setfield $tiddler=\"$:/state/tab--1498284803\" $value=\"$:/core/ui/AdvancedSearch/Filter\"/>\n<$action-navigate $to=\"$:/AdvancedSearch\"/>\n''<$count filter=\"\"\"$filter$\"\"\"/>''\n{{$:/core/images/advanced-search-button}}\n</$button>\n\\end\n\n|<<lingo Version/Prompt>> |''<<version>>'' |\n|<$link to=\"$:/SiteTitle\"><<lingo Title/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteTitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/SiteSubtitle\"><<lingo Subtitle/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteSubtitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/status/UserName\"><<lingo Username/Prompt>></$link> |<$edit-text tiddler=\"$:/status/UserName\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/config/AnimationDuration\"><<lingo AnimDuration/Prompt>></$link> |<$edit-text tiddler=\"$:/config/AnimationDuration\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/DefaultTiddlers\"><<lingo DefaultTiddlers/Prompt>></$link> |<<lingo DefaultTiddlers/TopHint>><br> <$edit tag=\"textarea\" tiddler=\"$:/DefaultTiddlers\" class=\"tc-edit-texteditor\"/><br>//<<lingo DefaultTiddlers/BottomHint>>// |\n|<$link to=\"$:/config/NewJournal/Title\"><<lingo NewJournal/Title/Prompt>></$link> |<$edit-text tiddler=\"$:/config/NewJournal/Title\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/config/NewJournal/Tags\"><<lingo NewJournal/Tags/Prompt>></$link> |<$edit-text tiddler=\"$:/config/NewJournal/Tags\" default=\"\" tag=\"input\"/> |\n|<<lingo Language/Prompt>> |{{$:/snippets/minilanguageswitcher}} |\n|<<lingo Tiddlers/Prompt>> |<<show-filter-count \"[!is[system]sort[title]]\">> |\n|<<lingo Tags/Prompt>> |<<show-filter-count \"[tags[]sort[title]]\">> |\n|<<lingo SystemTiddlers/Prompt>> |<<show-filter-count \"[is[system]sort[title]]\">> |\n|<<lingo ShadowTiddlers/Prompt>> |<<show-filter-count \"[all[shadows]sort[title]]\">> |\n|<<lingo OverriddenShadowTiddlers/Prompt>> |<<show-filter-count \"[is[tiddler]is[shadow]sort[title]]\">> |\n"
},
"$:/core/ui/ControlPanel/EditorTypes": {
"title": "$:/core/ui/ControlPanel/EditorTypes",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/EditorTypes/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/EditorTypes/\n\n<<lingo Hint>>\n\n<table>\n<tbody>\n<tr>\n<th><<lingo Type/Caption>></th>\n<th><<lingo Editor/Caption>></th>\n</tr>\n<$list filter=\"[all[shadows+tiddlers]prefix[$:/config/EditorTypeMappings/]sort[title]]\">\n<tr>\n<td>\n<$link>\n<$list filter=\"[all[current]removeprefix[$:/config/EditorTypeMappings/]]\">\n<$text text={{!!title}}/>\n</$list>\n</$link>\n</td>\n<td>\n<$view field=\"text\"/>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ControlPanel/Info": {
"title": "$:/core/ui/ControlPanel/Info",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Info/Caption}}",
"text": "{{$:/language/ControlPanel/Info/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Info]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Basics\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/KeyboardShortcuts": {
"title": "$:/core/ui/ControlPanel/KeyboardShortcuts",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/KeyboardShortcuts/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/KeyboardShortcuts/\n\n\\define new-shortcut(title)\n<div class=\"tc-dropdown-item-plain\">\n<$edit-shortcut tiddler=\"$title$\" placeholder={{$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt}} style=\"width:auto;\"/> <$button>\n<<lingo Add/Caption>>\n<$action-listops\n\t$tiddler=\"$(shortcutTitle)$\"\n\t$field=\"text\"\n\t$subfilter=\"[{$title$}]\"\n/>\n<$action-deletetiddler\n\t$tiddler=\"$title$\"\n/>\n</$button>\n</div>\n\\end\n\n\\define shortcut-list-item(caption)\n<td>\n</td>\n<td style=\"text-align:right;font-size:0.7em;\">\n<<lingo Platform/$caption$>>\n</td>\n<td>\n<div style=\"position:relative;\">\n<$button popup=<<qualify \"$:/state/dropdown/$(shortcutTitle)$\">> class=\"tc-btn-invisible\">\n{{$:/core/images/edit-button}}\n</$button>\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts={{$(shortcutTitle)$}} prefix=\"<kbd>\" separator=\"</kbd> <kbd>\" suffix=\"</kbd>\"/>\n\n<$reveal state=<<qualify \"$:/state/dropdown/$(shortcutTitle)$\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown tc-popup-keep\">\n<$list filter=\"[list[$(shortcutTitle)$!!text]sort[title]]\" variable=\"shortcut\" emptyMessage=\"\"\"\n<div class=\"tc-dropdown-item-plain\">\n//<<lingo NoShortcuts/Caption>>//\n</div>\n\"\"\">\n<div class=\"tc-dropdown-item-plain\">\n<$button class=\"tc-btn-invisible\" tooltip=<<lingo Remove/Hint>>>\n<$action-listops\n\t$tiddler=\"$(shortcutTitle)$\"\n\t$field=\"text\"\n\t$subfilter=\"+[remove<shortcut>]\"\n/>\n×\n</$button>\n<kbd>\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts=<<shortcut>>/>\n</kbd>\n</div>\n</$list>\n<hr/>\n<$macrocall $name=\"new-shortcut\" title=<<qualify \"$:/state/new-shortcut/$(shortcutTitle)$\">>/>\n</div>\n</div>\n</$reveal>\n</div>\n</td>\n\\end\n\n\\define shortcut-list(caption,prefix)\n<tr>\n<$list filter=\"[all[tiddlers+shadows][$prefix$$(shortcutName)$]]\" variable=\"shortcutTitle\">\n<<shortcut-list-item \"$caption$\">>\n</$list>\n</tr>\n\\end\n\n\\define shortcut-editor()\n<<shortcut-list \"All\" \"$:/config/shortcuts/\">>\n<<shortcut-list \"Mac\" \"$:/config/shortcuts-mac/\">>\n<<shortcut-list \"NonMac\" \"$:/config/shortcuts-not-mac/\">>\n<<shortcut-list \"Linux\" \"$:/config/shortcuts-linux/\">>\n<<shortcut-list \"NonLinux\" \"$:/config/shortcuts-not-linux/\">>\n<<shortcut-list \"Windows\" \"$:/config/shortcuts-windows/\">>\n<<shortcut-list \"NonWindows\" \"$:/config/shortcuts-not-windows/\">>\n\\end\n\n\\define shortcut-preview()\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts={{$(shortcutPrefix)$$(shortcutName)$}} prefix=\"<kbd>\" separator=\"</kbd> <kbd>\" suffix=\"</kbd>\"/>\n\\end\n\n\\define shortcut-item-inner()\n<tr>\n<td>\n<$reveal type=\"nomatch\" state=<<dropdownStateTitle>> text=\"open\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield\n\t$tiddler=<<dropdownStateTitle>>\n\t$value=\"open\"\n/>\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<dropdownStateTitle>> text=\"open\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield\n\t$tiddler=<<dropdownStateTitle>>\n\t$value=\"close\"\n/>\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n''<$text text=<<shortcutName>>/>''\n</td>\n<td>\n<$transclude tiddler=\"$:/config/ShortcutInfo/$(shortcutName)$\"/>\n</td>\n<td>\n<$list filter=\"$:/config/shortcuts/ $:/config/shortcuts-mac/ $:/config/shortcuts-not-mac/ $:/config/shortcuts-linux/ $:/config/shortcuts-not-linux/ $:/config/shortcuts-windows/ $:/config/shortcuts-not-windows/\" variable=\"shortcutPrefix\">\n<<shortcut-preview>>\n</$list>\n</td>\n</tr>\n<$set name=\"dropdownState\" value={{$(dropdownStateTitle)$}}>\n<$list filter=\"[<dropdownState>prefix[open]]\" variable=\"listItem\">\n<<shortcut-editor>>\n</$list>\n</$set>\n\\end\n\n\\define shortcut-item()\n<$set name=\"dropdownStateTitle\" value=<<qualify \"$:/state/dropdown/keyboardshortcut/$(shortcutName)$\">>>\n<<shortcut-item-inner>>\n</$set>\n\\end\n\n<table>\n<tbody>\n<$list filter=\"[all[shadows+tiddlers]removeprefix[$:/config/ShortcutInfo/]]\" variable=\"shortcutName\">\n<<shortcut-item>>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ControlPanel/LoadedModules": {
"title": "$:/core/ui/ControlPanel/LoadedModules",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/LoadedModules/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n<<lingo LoadedModules/Hint>>\n\n{{$:/snippets/modules}}\n"
},
"$:/core/ui/ControlPanel/Modals/AddPlugins": {
"title": "$:/core/ui/ControlPanel/Modals/AddPlugins",
"subtitle": "{{$:/core/images/download-button}} {{$:/language/ControlPanel/Plugins/Add/Caption}}",
"text": "\\define install-plugin-button()\n<$button>\n<$action-sendmessage $message=\"tm-load-plugin-from-library\" url={{!!url}} title={{$(assetInfo)$!!original-title}}/>\n<$list filter=\"[<assetInfo>get[original-title]get[version]]\" variable=\"installedVersion\" emptyMessage=\"\"\"{{$:/language/ControlPanel/Plugins/Install/Caption}}\"\"\">\n{{$:/language/ControlPanel/Plugins/Reinstall/Caption}}\n</$list>\n</$button>\n\\end\n\n\\define popup-state-macro()\n$:/state/add-plugin-info/$(connectionTiddler)$/$(assetInfo)$\n\\end\n\n\\define display-plugin-info(type)\n<$set name=\"popup-state\" value=<<popup-state-macro>>>\n<div class=\"tc-plugin-info\">\n<div class=\"tc-plugin-info-chunk tc-small-icon\">\n<$reveal type=\"nomatch\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"yes\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"no\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<$list filter=\"[<assetInfo>has[icon]]\" emptyMessage=\"\"\"<$transclude tiddler=\"$:/core/images/plugin-generic-$type$\"/>\"\"\">\n<img src={{$(assetInfo)$!!icon}}/>\n</$list>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<h1><$view tiddler=<<assetInfo>> field=\"description\"/></h1>\n<h2><$view tiddler=<<assetInfo>> field=\"original-title\"/></h2>\n<div><em><$view tiddler=<<assetInfo>> field=\"version\"/></em></div>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<<install-plugin-button>>\n</div>\n</div>\n<$reveal type=\"match\" text=\"yes\" state=<<popup-state>>>\n<div class=\"tc-plugin-info-dropdown\">\n<div class=\"tc-plugin-info-dropdown-message\">\n<$list filter=\"[<assetInfo>get[original-title]get[version]]\" variable=\"installedVersion\" emptyMessage=\"\"\"{{$:/language/ControlPanel/Plugins/NotInstalled/Hint}}\"\"\">\n<em>\n{{$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint}}\n</em>\n</$list>\n</div>\n<div class=\"tc-plugin-info-dropdown-body\">\n<$transclude tiddler=<<assetInfo>> field=\"readme\" mode=\"block\"/>\n</div>\n</div>\n</$reveal>\n</$set>\n\\end\n\n\\define load-plugin-library-button()\n<$button class=\"tc-btn-big-green\">\n<$action-sendmessage $message=\"tm-load-plugin-library\" url={{!!url}} infoTitlePrefix=\"$:/temp/RemoteAssetInfo/\"/>\n{{$:/core/images/chevron-right}} {{$:/language/ControlPanel/Plugins/OpenPluginLibrary}}\n</$button>\n\\end\n\n\\define display-server-assets(type)\n{{$:/language/Search/Search}}: <$edit-text tiddler=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" default=\"\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n<div class=\"tc-plugin-library-listing\">\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[$type$]search{$:/temp/RemoteAssetSearch/$(currentTiddler)$}sort[description]]\" variable=\"assetInfo\">\n<<display-plugin-info \"$type$\">>\n</$list>\n</div>\n\\end\n\n\\define display-server-connection()\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/ServerConnection]suffix{!!url}]\" variable=\"connectionTiddler\" emptyMessage=<<load-plugin-library-button>>>\n\n<<tabs \"[[$:/core/ui/ControlPanel/Plugins/Add/Plugins]] [[$:/core/ui/ControlPanel/Plugins/Add/Themes]] [[$:/core/ui/ControlPanel/Plugins/Add/Languages]]\" \"$:/core/ui/ControlPanel/Plugins/Add/Plugins\">>\n\n</$list>\n\\end\n\n\\define plugin-library-listing()\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/PluginLibrary]]\">\n<div class=\"tc-plugin-library\">\n\n!! <$link><$transclude field=\"caption\"><$view field=\"title\"/></$transclude></$link>\n\n//<$view field=\"url\"/>//\n\n<$transclude/>\n\n<<display-server-connection>>\n</div>\n</$list>\n\\end\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<div>\n<<plugin-library-listing>>\n</div>\n\n</$importvariables>\n"
},
"$:/core/ui/ControlPanel/Palette": {
"title": "$:/core/ui/ControlPanel/Palette",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Palette/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/\n\n{{$:/snippets/paletteswitcher}}\n\n<$reveal type=\"nomatch\" state=\"$:/state/ShowPaletteEditor\" text=\"yes\">\n\n<$button set=\"$:/state/ShowPaletteEditor\" setTo=\"yes\"><<lingo ShowEditor/Caption>></$button>\n\n</$reveal>\n\n<$reveal type=\"match\" state=\"$:/state/ShowPaletteEditor\" text=\"yes\">\n\n<$button set=\"$:/state/ShowPaletteEditor\" setTo=\"no\"><<lingo HideEditor/Caption>></$button>\n{{$:/snippets/paletteeditor}}\n\n</$reveal>\n\n"
},
"$:/core/ui/ControlPanel/Parsing": {
"title": "$:/core/ui/ControlPanel/Parsing",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/Parsing/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Parsing/\n\n\\define parsing-inner(typeCap)\n<li>\n<$checkbox tiddler=\"\"\"$:/config/WikiParserRules/$typeCap$/$(currentTiddler)$\"\"\" field=\"text\" checked=\"enable\" unchecked=\"disable\" default=\"enable\"> ''<$text text=<<currentTiddler>>/>'': </$checkbox>\n</li>\n\\end\n\n\\define parsing-outer(typeLower,typeCap)\n<ul>\n<$list filter=\"[wikiparserrules[$typeLower$]]\">\n<<parsing-inner typeCap:\"$typeCap$\">>\n</$list>\n</ul>\n\\end\n\n<<lingo Hint>>\n\n! <<lingo Pragma/Caption>>\n\n<<parsing-outer typeLower:\"pragma\" typeCap:\"Pragma\">>\n\n! <<lingo Inline/Caption>>\n\n<<parsing-outer typeLower:\"inline\" typeCap:\"Inline\">>\n\n! <<lingo Block/Caption>>\n\n<<parsing-outer typeLower:\"block\" typeCap:\"Block\">>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Languages": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Languages",
"caption": "{{$:/language/ControlPanel/Plugins/Languages/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[language]]\"/>)",
"text": "<<display-server-assets language>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Plugins",
"caption": "{{$:/language/ControlPanel/Plugins/Plugins/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[plugin]]\"/>)",
"text": "<<display-server-assets plugin>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Themes": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Themes",
"caption": "{{$:/language/ControlPanel/Plugins/Themes/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[theme]]\"/>)",
"text": "<<display-server-assets theme>>\n"
},
"$:/core/ui/ControlPanel/Plugins/AddPlugins": {
"title": "$:/core/ui/ControlPanel/Plugins/AddPlugins",
"text": "\\define lingo-base() $:/language/ControlPanel/Plugins/\n\n<$button message=\"tm-modal\" param=\"$:/core/ui/ControlPanel/Modals/AddPlugins\" tooltip={{$:/language/ControlPanel/Plugins/Add/Hint}} class=\"tc-btn-big-green\" style=\"background:blue;\">\n{{$:/core/images/download-button}} <<lingo Add/Caption>>\n</$button>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Languages": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Languages",
"caption": "{{$:/language/ControlPanel/Plugins/Languages/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[language]]\"/>)",
"text": "<<plugin-table language>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Plugins",
"caption": "{{$:/language/ControlPanel/Plugins/Plugins/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[plugin]]\"/>)",
"text": "<<plugin-table plugin>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Themes": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Themes",
"caption": "{{$:/language/ControlPanel/Plugins/Themes/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[theme]]\"/>)",
"text": "<<plugin-table theme>>\n"
},
"$:/core/ui/ControlPanel/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Plugins/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Plugins/\n\n\\define popup-state-macro()\n$(qualified-state)$-$(currentTiddler)$\n\\end\n\n\\define tabs-state-macro()\n$(popup-state)$-$(pluginInfoType)$\n\\end\n\n\\define plugin-icon-title()\n$(currentTiddler)$/icon\n\\end\n\n\\define plugin-disable-title()\n$:/config/Plugins/Disabled/$(currentTiddler)$\n\\end\n\n\\define plugin-table-body(type,disabledMessage)\n<div class=\"tc-plugin-info-chunk tc-small-icon\">\n<$reveal type=\"nomatch\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"yes\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"no\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<plugin-icon-title>>>\n<$transclude tiddler=\"$:/core/images/plugin-generic-$type$\"/>\n</$transclude>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<h1>\n''<$view field=\"description\"><$view field=\"title\"/></$view>'' $disabledMessage$\n</h1>\n<h2>\n<$view field=\"title\"/>\n</h2>\n<h2>\n<div><em><$view field=\"version\"/></em></div>\n</h2>\n</div>\n\\end\n\n\\define plugin-table(type)\n<$set name=\"qualified-state\" value=<<qualify \"$:/state/plugin-info\">>>\n<$list filter=\"[!has[draft.of]plugin-type[$type$]sort[description]]\" emptyMessage=<<lingo \"Empty/Hint\">>>\n<$set name=\"popup-state\" value=<<popup-state-macro>>>\n<$reveal type=\"nomatch\" state=<<plugin-disable-title>> text=\"yes\">\n<$link to={{!!title}} class=\"tc-plugin-info\">\n<<plugin-table-body type:\"$type$\">>\n</$link>\n</$reveal>\n<$reveal type=\"match\" state=<<plugin-disable-title>> text=\"yes\">\n<$link to={{!!title}} class=\"tc-plugin-info tc-plugin-info-disabled\">\n<<plugin-table-body type:\"$type$\" disabledMessage:\"<$macrocall $name='lingo' title='Disabled/Status'/>\">>\n</$link>\n</$reveal>\n<$reveal type=\"match\" text=\"yes\" state=<<popup-state>>>\n<div class=\"tc-plugin-info-dropdown\">\n<div class=\"tc-plugin-info-dropdown-body\">\n<$list filter=\"[all[current]] -[[$:/core]]\">\n<div style=\"float:right;\">\n<$reveal type=\"nomatch\" state=<<plugin-disable-title>> text=\"yes\">\n<$button set=<<plugin-disable-title>> setTo=\"yes\" tooltip={{$:/language/ControlPanel/Plugins/Disable/Hint}} aria-label={{$:/language/ControlPanel/Plugins/Disable/Caption}}>\n<<lingo Disable/Caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<plugin-disable-title>> text=\"yes\">\n<$button set=<<plugin-disable-title>> setTo=\"no\" tooltip={{$:/language/ControlPanel/Plugins/Enable/Hint}} aria-label={{$:/language/ControlPanel/Plugins/Enable/Caption}}>\n<<lingo Enable/Caption>>\n</$button>\n</$reveal>\n</div>\n</$list>\n<$reveal type=\"nomatch\" text=\"\" state=\"!!list\">\n<$macrocall $name=\"tabs\" state=<<tabs-state-macro>> tabsList={{!!list}} default=\"readme\" template=\"$:/core/ui/PluginInfo\"/>\n</$reveal>\n<$reveal type=\"match\" text=\"\" state=\"!!list\">\n<<lingo NoInformation/Hint>>\n</$reveal>\n</div>\n</div>\n</$reveal>\n</$set>\n</$list>\n</$set>\n\\end\n\n{{$:/core/ui/ControlPanel/Plugins/AddPlugins}}\n\n<<lingo Installed/Hint>>\n\n<<tabs \"[[$:/core/ui/ControlPanel/Plugins/Installed/Plugins]] [[$:/core/ui/ControlPanel/Plugins/Installed/Themes]] [[$:/core/ui/ControlPanel/Plugins/Installed/Languages]]\" \"$:/core/ui/ControlPanel/Plugins/Installed/Plugins\">>\n"
},
"$:/core/ui/ControlPanel/Saving": {
"title": "$:/core/ui/ControlPanel/Saving",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Saving/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Saving/\n\\define backupURL()\nhttp://$(userName)$.tiddlyspot.com/backup/\n\\end\n\\define backupLink()\n<$reveal type=\"nomatch\" state=\"$:/UploadName\" text=\"\">\n<$set name=\"userName\" value={{$:/UploadName}}>\n<$reveal type=\"match\" state=\"$:/UploadURL\" text=\"\">\n<<backupURL>>\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/UploadURL\" text=\"\">\n<$macrocall $name=resolvePath source={{$:/UploadBackupDir}} root={{$:/UploadURL}}>>\n</$reveal>\n</$set>\n</$reveal>\n\\end\n! <<lingo TiddlySpot/Heading>>\n\n<<lingo TiddlySpot/Description>>\n\n|<<lingo TiddlySpot/UserName>> |<$edit-text tiddler=\"$:/UploadName\" default=\"\" tag=\"input\"/> |\n|<<lingo TiddlySpot/Password>> |<$password name=\"upload\"/> |\n|<<lingo TiddlySpot/Backups>> |<<backupLink>> |\n\n''<<lingo TiddlySpot/Advanced/Heading>>''\n\n|<<lingo TiddlySpot/ServerURL>> |<$edit-text tiddler=\"$:/UploadURL\" default=\"\" tag=\"input\"/> |\n|<<lingo TiddlySpot/Filename>> |<$edit-text tiddler=\"$:/UploadFilename\" default=\"index.html\" tag=\"input\"/> |\n|<<lingo TiddlySpot/UploadDir>> |<$edit-text tiddler=\"$:/UploadDir\" default=\".\" tag=\"input\"/> |\n|<<lingo TiddlySpot/BackupDir>> |<$edit-text tiddler=\"$:/UploadBackupDir\" default=\".\" tag=\"input\"/> |\n\n<<lingo TiddlySpot/Hint>>"
},
"$:/core/ui/ControlPanel/Settings/AutoSave": {
"title": "$:/core/ui/ControlPanel/Settings/AutoSave",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/AutoSave/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/AutoSave/\n\n<$link to=\"$:/config/AutoSave\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/AutoSave\" value=\"yes\"> <<lingo Enabled/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/AutoSave\" value=\"no\"> <<lingo Disabled/Description>> </$radio>\n"
},
"$:/core/buttonstyles/Borderless": {
"title": "$:/core/buttonstyles/Borderless",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless}}",
"text": "tc-btn-invisible"
},
"$:/core/buttonstyles/Boxed": {
"title": "$:/core/buttonstyles/Boxed",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed}}",
"text": "tc-btn-boxed"
},
"$:/core/buttonstyles/Rounded": {
"title": "$:/core/buttonstyles/Rounded",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded}}",
"text": "tc-btn-rounded"
},
"$:/core/ui/ControlPanel/Settings/CamelCase": {
"title": "$:/core/ui/ControlPanel/Settings/CamelCase",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/CamelCase/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/CamelCase/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/WikiParserRules/Inline/wikilink\" field=\"text\" checked=\"enable\" unchecked=\"disable\" default=\"enable\"> <$link to=\"$:/config/WikiParserRules/Inline/wikilink\"><<lingo Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/DefaultSidebarTab": {
"caption": "{{$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption}}",
"tags": "$:/tags/ControlPanel/Settings",
"title": "$:/core/ui/ControlPanel/Settings/DefaultSidebarTab",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/DefaultSidebarTab/\n\n<$link to=\"$:/config/DefaultSidebarTab\"><<lingo Hint>></$link>\n\n<$select tiddler=\"$:/config/DefaultSidebarTab\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SideBar]!has[draft.of]]\">\n<option value=<<currentTiddler>>><$transclude field=\"caption\"><$text text=<<currentTiddler>>/></$transclude></option>\n</$list>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings/EditorToolbar": {
"title": "$:/core/ui/ControlPanel/Settings/EditorToolbar",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/EditorToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/EditorToolbar/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/TextEditor/EnableToolbar\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/TextEditor/EnableToolbar\"><<lingo Description>></$link> </$checkbox>\n\n"
},
"$:/core/ui/ControlPanel/Settings/LinkToBehaviour": {
"title": "$:/core/ui/ControlPanel/Settings/LinkToBehaviour",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/LinkToBehaviour/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/LinkToBehaviour/\n\n<$link to=\"$:/config/Navigation/openLinkFromInsideRiver\"><<lingo \"InsideRiver/Hint\">></$link>\n\n<$select tiddler=\"$:/config/Navigation/openLinkFromInsideRiver\">\n <option value=\"above\"><<lingo \"OpenAbove\">></option>\n <option value=\"below\"><<lingo \"OpenBelow\">></option>\n <option value=\"top\"><<lingo \"OpenAtTop\">></option>\n <option value=\"bottom\"><<lingo \"OpenAtBottom\">></option>\n</$select>\n\n<$link to=\"$:/config/Navigation/openLinkFromOutsideRiver\"><<lingo \"OutsideRiver/Hint\">></$link>\n\n<$select tiddler=\"$:/config/Navigation/openLinkFromOutsideRiver\">\n <option value=\"top\"><<lingo \"OpenAtTop\">></option>\n <option value=\"bottom\"><<lingo \"OpenAtBottom\">></option>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings/MissingLinks": {
"title": "$:/core/ui/ControlPanel/Settings/MissingLinks",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/MissingLinks/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/MissingLinks/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/MissingLinks\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/MissingLinks\"><<lingo Description>></$link> </$checkbox>\n\n"
},
"$:/core/ui/ControlPanel/Settings/NavigationAddressBar": {
"title": "$:/core/ui/ControlPanel/Settings/NavigationAddressBar",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/NavigationAddressBar/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/NavigationAddressBar/\n\n<$link to=\"$:/config/Navigation/UpdateAddressBar\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"permaview\"> <<lingo Permaview/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"permalink\"> <<lingo Permalink/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/NavigationHistory": {
"title": "$:/core/ui/ControlPanel/Settings/NavigationHistory",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/NavigationHistory/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/NavigationHistory/\n<$link to=\"$:/config/Navigation/UpdateHistory\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateHistory\" value=\"yes\"> <<lingo Yes/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateHistory\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/PerformanceInstrumentation": {
"title": "$:/core/ui/ControlPanel/Settings/PerformanceInstrumentation",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/PerformanceInstrumentation/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/Performance/Instrumentation\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"no\"> <$link to=\"$:/config/Performance/Instrumentation\"><<lingo Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/TitleLinks": {
"title": "$:/core/ui/ControlPanel/Settings/TitleLinks",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/TitleLinks/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/TitleLinks/\n<$link to=\"$:/config/Tiddlers/TitleLinks\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Tiddlers/TitleLinks\" value=\"yes\"> <<lingo Yes/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Tiddlers/TitleLinks\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/ToolbarButtons": {
"title": "$:/core/ui/ControlPanel/Settings/ToolbarButtons",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtons/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/ToolbarButtons/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/Toolbar/Icons\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/Toolbar/Icons\"><<lingo Icons/Description>></$link> </$checkbox>\n\n<$checkbox tiddler=\"$:/config/Toolbar/Text\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"no\"> <$link to=\"$:/config/Toolbar/Text\"><<lingo Text/Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/ToolbarButtonStyle": {
"title": "$:/core/ui/ControlPanel/Settings/ToolbarButtonStyle",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/ToolbarButtonStyle/\n<$link to=\"$:/config/Toolbar/ButtonClass\"><<lingo \"Hint\">></$link>\n\n<$select tiddler=\"$:/config/Toolbar/ButtonClass\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ToolbarButtonStyle]]\">\n<option value={{!!text}}>{{!!caption}}</option>\n</$list>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings": {
"title": "$:/core/ui/ControlPanel/Settings",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Settings/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/\n\n<<lingo Hint>>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Settings]]\">\n\n<div style=\"border-top:1px solid #eee;\">\n\n!! <$link><$transclude field=\"caption\"/></$link>\n\n<$transclude/>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/ControlPanel/StoryView": {
"title": "$:/core/ui/ControlPanel/StoryView",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/StoryView/Caption}}",
"text": "{{$:/snippets/viewswitcher}}\n"
},
"$:/core/ui/ControlPanel/Theme": {
"title": "$:/core/ui/ControlPanel/Theme",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Theme/Caption}}",
"text": "{{$:/snippets/themeswitcher}}\n"
},
"$:/core/ui/ControlPanel/TiddlerFields": {
"title": "$:/core/ui/ControlPanel/TiddlerFields",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/TiddlerFields/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n\n<<lingo TiddlerFields/Hint>>\n\n{{$:/snippets/allfields}}"
},
"$:/core/ui/ControlPanel/Toolbars/EditorToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/EditorToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/EditorToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\n\\define config-title()\n$:/config/EditorToolbarButtons/Visibility/$(listItem)$\n\\end\n\n\\define toolbar-button()\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"> <$transclude tiddler={{$(listItem)$!!icon}}/> <$transclude tiddler=<<listItem>> field=\"caption\"/> -- <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i></$checkbox>\n\\end\n\n{{$:/language/ControlPanel/Toolbars/EditorToolbar/Hint}}\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<<toolbar-button>>\n\n</$list>\n"
},
"$:/core/ui/ControlPanel/Toolbars/EditToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/EditToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/EditToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/EditToolbarButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/EditToolbar/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars/PageControls": {
"title": "$:/core/ui/ControlPanel/Toolbars/PageControls",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/PageControls/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/PageControls/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars/ViewToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/ViewToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/ViewToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/ViewToolbar/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars": {
"title": "$:/core/ui/ControlPanel/Toolbars",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Toolbars/Caption}}",
"text": "{{$:/language/ControlPanel/Toolbars/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Toolbars]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Toolbars/ViewToolbar\" \"$:/state/tabs/controlpanel/toolbars\" \"tc-vertical\">>\n</div>\n"
},
"$:/ControlPanel": {
"title": "$:/ControlPanel",
"icon": "$:/core/images/options-button",
"color": "#bbb",
"text": "<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Info\">>\n</div>\n"
},
"$:/core/ui/DefaultSearchResultList": {
"title": "$:/core/ui/DefaultSearchResultList",
"tags": "$:/tags/SearchResults",
"caption": "{{$:/language/Search/DefaultResults/Caption}}",
"text": "\\define searchResultList()\n//<small>{{$:/language/Search/Matches/Title}}</small>//\n\n<$list filter=\"[!is[system]search:title{$(searchTiddler)$}sort[title]limit[250]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n//<small>{{$:/language/Search/Matches/All}}</small>//\n\n<$list filter=\"[!is[system]search{$(searchTiddler)$}sort[title]limit[250]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n\\end\n<<searchResultList>>\n"
},
"$:/core/ui/EditorToolbar/bold": {
"title": "$:/core/ui/EditorToolbar/bold",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/bold",
"caption": "{{$:/language/Buttons/Bold/Caption}}",
"description": "{{$:/language/Buttons/Bold/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((bold))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"''\"\n\tsuffix=\"''\"\n/>\n"
},
"$:/core/ui/EditorToolbar/clear-dropdown": {
"title": "$:/core/ui/EditorToolbar/clear-dropdown",
"text": "''{{$:/language/Buttons/Clear/Hint}}''\n\n<div class=\"tc-colour-chooser\">\n\n<$macrocall $name=\"colour-picker\" actions=\"\"\"\n\n<$action-sendmessage\n\t$message=\"tm-edit-bitmap-operation\"\n\t$param=\"clear\"\n\tcolour=<<colour-picker-value>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n\n</div>\n"
},
"$:/core/ui/EditorToolbar/clear": {
"title": "$:/core/ui/EditorToolbar/clear",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/erase",
"caption": "{{$:/language/Buttons/Clear/Caption}}",
"description": "{{$:/language/Buttons/Clear/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/clear-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/editor-height-dropdown": {
"title": "$:/core/ui/EditorToolbar/editor-height-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/EditorHeight/\n''<<lingo Hint>>''\n\n<$radio tiddler=\"$:/config/TextEditor/EditorHeight/Mode\" value=\"auto\"> {{$:/core/images/auto-height}} <<lingo Caption/Auto>></$radio>\n\n<$radio tiddler=\"$:/config/TextEditor/EditorHeight/Mode\" value=\"fixed\"> {{$:/core/images/fixed-height}} <<lingo Caption/Fixed>> <$edit-text tag=\"input\" tiddler=\"$:/config/TextEditor/EditorHeight/Height\" default=\"100px\"/></$radio>\n"
},
"$:/core/ui/EditorToolbar/editor-height": {
"title": "$:/core/ui/EditorToolbar/editor-height",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/fixed-height",
"custom-icon": "yes",
"caption": "{{$:/language/Buttons/EditorHeight/Caption}}",
"description": "{{$:/language/Buttons/EditorHeight/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/editor-height-dropdown",
"text": "<$reveal tag=\"span\" state=\"$:/config/TextEditor/EditorHeight/Mode\" type=\"match\" text=\"fixed\">\n{{$:/core/images/fixed-height}}\n</$reveal>\n<$reveal tag=\"span\" state=\"$:/config/TextEditor/EditorHeight/Mode\" type=\"match\" text=\"auto\">\n{{$:/core/images/auto-height}}\n</$reveal>\n"
},
"$:/core/ui/EditorToolbar/excise-dropdown": {
"title": "$:/core/ui/EditorToolbar/excise-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Excise/\n\n\\define body(config-title)\n''<<lingo Hint>>''\n\n<<lingo Caption/NewTitle>> <$edit-text tag=\"input\" tiddler=\"$config-title$/new-title\" default=\"\" focus=\"true\"/>\n\n<$set name=\"new-title\" value={{$config-title$/new-title}}>\n<$list filter=\"\"\"[<new-title>is[tiddler]]\"\"\">\n<div class=\"tc-error\">\n<<lingo Caption/TiddlerExists>>\n</div>\n</$list>\n</$set>\n\n<$checkbox tiddler=\"\"\"$config-title$/tagnew\"\"\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"false\"> <<lingo Caption/Tag>></$checkbox>\n\n<<lingo Caption/Replace>> <$select tiddler=\"\"\"$config-title$/type\"\"\" default=\"transclude\">\n<option value=\"link\"><<lingo Caption/Replace/Link>></option>\n<option value=\"transclude\"><<lingo Caption/Replace/Transclusion>></option>\n<option value=\"macro\"><<lingo Caption/Replace/Macro>></option>\n</$select>\n\n<$reveal state=\"\"\"$config-title$/type\"\"\" type=\"match\" text=\"macro\">\n<<lingo Caption/MacroName>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/macro-title\"\"\" default=\"translink\"/>\n</$reveal>\n\n<$button>\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"excise\"\n\ttitle={{$config-title$/new-title}}\n\ttype={{$config-title$/type}}\n\tmacro={{$config-title$/macro-title}}\n\ttagnew={{$config-title$/tagnew}}\n/>\n<$action-deletetiddler\n\t$tiddler=<<qualify \"$:/state/Excise/NewTitle\">>\n/>\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n<<lingo Caption/Excise>>\n</$button>\n\\end\n\n<$macrocall $name=\"body\" config-title=<<qualify \"$:/state/Excise/\">>/>\n"
},
"$:/core/ui/EditorToolbar/excise": {
"title": "$:/core/ui/EditorToolbar/excise",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/excise",
"caption": "{{$:/language/Buttons/Excise/Caption}}",
"description": "{{$:/language/Buttons/Excise/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"shortcuts": "((excise))",
"dropdown": "$:/core/ui/EditorToolbar/excise-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/heading-1": {
"title": "$:/core/ui/EditorToolbar/heading-1",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-1",
"caption": "{{$:/language/Buttons/Heading1/Caption}}",
"description": "{{$:/language/Buttons/Heading1/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((heading-1))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-2": {
"title": "$:/core/ui/EditorToolbar/heading-2",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-2",
"caption": "{{$:/language/Buttons/Heading2/Caption}}",
"description": "{{$:/language/Buttons/Heading2/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-2))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"2\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-3": {
"title": "$:/core/ui/EditorToolbar/heading-3",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-3",
"caption": "{{$:/language/Buttons/Heading3/Caption}}",
"description": "{{$:/language/Buttons/Heading3/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-3))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"3\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-4": {
"title": "$:/core/ui/EditorToolbar/heading-4",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-4",
"caption": "{{$:/language/Buttons/Heading4/Caption}}",
"description": "{{$:/language/Buttons/Heading4/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-4))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"4\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-5": {
"title": "$:/core/ui/EditorToolbar/heading-5",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-5",
"caption": "{{$:/language/Buttons/Heading5/Caption}}",
"description": "{{$:/language/Buttons/Heading5/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-5))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"5\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-6": {
"title": "$:/core/ui/EditorToolbar/heading-6",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-6",
"caption": "{{$:/language/Buttons/Heading6/Caption}}",
"description": "{{$:/language/Buttons/Heading6/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-6))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"6\"\n/>\n"
},
"$:/core/ui/EditorToolbar/italic": {
"title": "$:/core/ui/EditorToolbar/italic",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/italic",
"caption": "{{$:/language/Buttons/Italic/Caption}}",
"description": "{{$:/language/Buttons/Italic/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((italic))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"//\"\n\tsuffix=\"//\"\n/>\n"
},
"$:/core/ui/EditorToolbar/line-width-dropdown": {
"title": "$:/core/ui/EditorToolbar/line-width-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/LineWidth/\n\n\\define toolbar-line-width-inner()\n<$button tag=\"a\" tooltip=\"\"\"$(line-width)$\"\"\">\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/LineWidth\"\n\t$value=\"$(line-width)$\"\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<div style=\"display: inline-block; margin: 4px calc(80px - $(line-width)$); background-color: #000; width: calc(100px + $(line-width)$ * 2); height: $(line-width)$; border-radius: 120px; vertical-align: middle;\"/>\n\n<span style=\"margin-left: 8px;\">\n\n<$text text=\"\"\"$(line-width)$\"\"\"/>\n\n<$reveal state=\"$:/config/BitmapEditor/LineWidth\" type=\"match\" text=\"\"\"$(line-width)$\"\"\" tag=\"span\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</span>\n\n</$button>\n\\end\n\n''<<lingo Hint>>''\n\n<$list filter={{$:/config/BitmapEditor/LineWidths}} variable=\"line-width\">\n\n<<toolbar-line-width-inner>>\n\n</$list>\n"
},
"$:/core/ui/EditorToolbar/line-width": {
"title": "$:/core/ui/EditorToolbar/line-width",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/line-width",
"caption": "{{$:/language/Buttons/LineWidth/Caption}}",
"description": "{{$:/language/Buttons/LineWidth/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/line-width-dropdown",
"text": "<$text text={{$:/config/BitmapEditor/LineWidth}}/>"
},
"$:/core/ui/EditorToolbar/link-dropdown": {
"title": "$:/core/ui/EditorToolbar/link-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Link/\n\n\\define link-actions()\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"make-link\"\n\ttext={{$(linkTiddler)$}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<searchTiddler>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<linkTiddler>>\n/>\n\\end\n\n\\define body(config-title)\n''<<lingo Hint>>''\n\n<$vars searchTiddler=\"\"\"$config-title$/search\"\"\" linkTiddler=\"\"\"$config-title$/link\"\"\">\n\n<$edit-text tiddler=<<searchTiddler>> type=\"search\" tag=\"input\" focus=\"true\" placeholder={{$:/language/Search/Search}} default=\"\"/>\n<$reveal tag=\"span\" state=<<searchTiddler>> type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\" style=\"width: auto; display: inline-block; background-colour: inherit;\">\n<$action-setfield $tiddler=<<searchTiddler>> text=\"\" />\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n\n<$reveal tag=\"div\" state=<<searchTiddler>> type=\"nomatch\" text=\"\">\n\n<$linkcatcher actions=<<link-actions>> to=<<linkTiddler>>>\n\n{{$:/core/ui/SearchResults}}\n\n</$linkcatcher>\n\n</$reveal>\n\n</$vars>\n\n\\end\n\n<$macrocall $name=\"body\" config-title=<<qualify \"$:/state/Link/\">>/>\n"
},
"$:/core/ui/EditorToolbar/link": {
"title": "$:/core/ui/EditorToolbar/link",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/link",
"caption": "{{$:/language/Buttons/Link/Caption}}",
"description": "{{$:/language/Buttons/Link/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((link))",
"dropdown": "$:/core/ui/EditorToolbar/link-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/list-bullet": {
"title": "$:/core/ui/EditorToolbar/list-bullet",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/list-bullet",
"caption": "{{$:/language/Buttons/ListBullet/Caption}}",
"description": "{{$:/language/Buttons/ListBullet/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((list-bullet))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"*\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/list-number": {
"title": "$:/core/ui/EditorToolbar/list-number",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/list-number",
"caption": "{{$:/language/Buttons/ListNumber/Caption}}",
"description": "{{$:/language/Buttons/ListNumber/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((list-number))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"#\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/mono-block": {
"title": "$:/core/ui/EditorToolbar/mono-block",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/mono-block",
"caption": "{{$:/language/Buttons/MonoBlock/Caption}}",
"description": "{{$:/language/Buttons/MonoBlock/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((mono-block))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-lines\"\n\tprefix=\"\n```\"\n\tsuffix=\"```\"\n/>\n"
},
"$:/core/ui/EditorToolbar/mono-line": {
"title": "$:/core/ui/EditorToolbar/mono-line",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/mono-line",
"caption": "{{$:/language/Buttons/MonoLine/Caption}}",
"description": "{{$:/language/Buttons/MonoLine/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((mono-line))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"`\"\n\tsuffix=\"`\"\n/>\n"
},
"$:/core/ui/EditorToolbar/more-dropdown": {
"title": "$:/core/ui/EditorToolbar/more-dropdown",
"text": "\\define config-title()\n$:/config/EditorToolbarButtons/Visibility/$(toolbarItem)$\n\\end\n\n\\define conditional-button()\n<$list filter={{$(toolbarItem)$!!condition}} variable=\"condition\">\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/toolbar/button\" mode=\"inline\"/> <$transclude tiddler=<<toolbarItem>> field=\"description\"/>\n</$list>\n\\end\n\n<div class=\"tc-text-editor-toolbar-more\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]] -[[$:/core/ui/EditorToolbar/more]]\">\n<$reveal type=\"match\" state=<<config-visibility-title>> text=\"hide\" tag=\"div\">\n<<conditional-button>>\n</$reveal>\n</$list>\n</div>\n"
},
"$:/core/ui/EditorToolbar/more": {
"title": "$:/core/ui/EditorToolbar/more",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/down-arrow",
"caption": "{{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"condition": "[<targetTiddler>]",
"dropdown": "$:/core/ui/EditorToolbar/more-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/opacity-dropdown": {
"title": "$:/core/ui/EditorToolbar/opacity-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Opacity/\n\n\\define toolbar-opacity-inner()\n<$button tag=\"a\" tooltip=\"\"\"$(opacity)$\"\"\">\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/Opacity\"\n\t$value=\"$(opacity)$\"\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<div style=\"display: inline-block; vertical-align: middle; background-color: $(current-paint-colour)$; opacity: $(opacity)$; width: 1em; height: 1em; border-radius: 50%;\"/>\n\n<span style=\"margin-left: 8px;\">\n\n<$text text=\"\"\"$(opacity)$\"\"\"/>\n\n<$reveal state=\"$:/config/BitmapEditor/Opacity\" type=\"match\" text=\"\"\"$(opacity)$\"\"\" tag=\"span\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</span>\n\n</$button>\n\\end\n\n\\define toolbar-opacity()\n''<<lingo Hint>>''\n\n<$list filter={{$:/config/BitmapEditor/Opacities}} variable=\"opacity\">\n\n<<toolbar-opacity-inner>>\n\n</$list>\n\\end\n\n<$set name=\"current-paint-colour\" value={{$:/config/BitmapEditor/Colour}}>\n\n<$set name=\"current-opacity\" value={{$:/config/BitmapEditor/Opacity}}>\n\n<<toolbar-opacity>>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/EditorToolbar/opacity": {
"title": "$:/core/ui/EditorToolbar/opacity",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/opacity",
"caption": "{{$:/language/Buttons/Opacity/Caption}}",
"description": "{{$:/language/Buttons/Opacity/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/opacity-dropdown",
"text": "<$text text={{$:/config/BitmapEditor/Opacity}}/>\n"
},
"$:/core/ui/EditorToolbar/paint-dropdown": {
"title": "$:/core/ui/EditorToolbar/paint-dropdown",
"text": "''{{$:/language/Buttons/Paint/Hint}}''\n\n<$macrocall $name=\"colour-picker\" actions=\"\"\"\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/Colour\"\n\t$value=<<colour-picker-value>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n"
},
"$:/core/ui/EditorToolbar/paint": {
"title": "$:/core/ui/EditorToolbar/paint",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/paint",
"caption": "{{$:/language/Buttons/Paint/Caption}}",
"description": "{{$:/language/Buttons/Paint/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/paint-dropdown",
"text": "\\define toolbar-paint()\n<div style=\"display: inline-block; vertical-align: middle; background-color: $(colour-picker-value)$; width: 1em; height: 1em; border-radius: 50%;\"/>\n\\end\n<$set name=\"colour-picker-value\" value={{$:/config/BitmapEditor/Colour}}>\n<<toolbar-paint>>\n</$set>\n"
},
"$:/core/ui/EditorToolbar/picture-dropdown": {
"title": "$:/core/ui/EditorToolbar/picture-dropdown",
"text": "\\define replacement-text()\n[img[$(imageTitle)$]]\n\\end\n\n''{{$:/language/Buttons/Picture/Hint}}''\n\n<$macrocall $name=\"image-picker\" actions=\"\"\"\n\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"replace-selection\"\n\ttext=<<replacement-text>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n"
},
"$:/core/ui/EditorToolbar/picture": {
"title": "$:/core/ui/EditorToolbar/picture",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/picture",
"caption": "{{$:/language/Buttons/Picture/Caption}}",
"description": "{{$:/language/Buttons/Picture/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((picture))",
"dropdown": "$:/core/ui/EditorToolbar/picture-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/preview-type-dropdown": {
"title": "$:/core/ui/EditorToolbar/preview-type-dropdown",
"text": "\\define preview-type-button()\n<$button tag=\"a\">\n\n<$action-setfield $tiddler=\"$:/state/editpreviewtype\" $value=\"$(previewType)$\"/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$transclude tiddler=<<previewType>> field=\"caption\" mode=\"inline\">\n\n<$view tiddler=<<previewType>> field=\"title\" mode=\"inline\"/>\n\n</$transclude> \n\n<$reveal tag=\"span\" state=\"$:/state/editpreviewtype\" type=\"match\" text=<<previewType>> default=\"$:/core/ui/EditTemplate/body/preview/output\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</$button>\n\\end\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditPreview]!has[draft.of]]\" variable=\"previewType\">\n\n<<preview-type-button>>\n\n</$list>\n"
},
"$:/core/ui/EditorToolbar/preview-type": {
"title": "$:/core/ui/EditorToolbar/preview-type",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/chevron-down",
"caption": "{{$:/language/Buttons/PreviewType/Caption}}",
"description": "{{$:/language/Buttons/PreviewType/Hint}}",
"condition": "[all[shadows+tiddlers]tag[$:/tags/EditPreview]!has[draft.of]butfirst[]limit[1]]",
"button-classes": "tc-text-editor-toolbar-item-adjunct",
"dropdown": "$:/core/ui/EditorToolbar/preview-type-dropdown"
},
"$:/core/ui/EditorToolbar/preview": {
"title": "$:/core/ui/EditorToolbar/preview",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/preview-open",
"custom-icon": "yes",
"caption": "{{$:/language/Buttons/Preview/Caption}}",
"description": "{{$:/language/Buttons/Preview/Hint}}",
"condition": "[<targetTiddler>]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((preview))",
"text": "<$reveal state=\"$:/state/showeditpreview\" type=\"match\" text=\"yes\" tag=\"span\">\n{{$:/core/images/preview-open}}\n<$action-setfield $tiddler=\"$:/state/showeditpreview\" $value=\"no\"/>\n</$reveal>\n<$reveal state=\"$:/state/showeditpreview\" type=\"nomatch\" text=\"yes\" tag=\"span\">\n{{$:/core/images/preview-closed}}\n<$action-setfield $tiddler=\"$:/state/showeditpreview\" $value=\"yes\"/>\n</$reveal>\n"
},
"$:/core/ui/EditorToolbar/quote": {
"title": "$:/core/ui/EditorToolbar/quote",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/quote",
"caption": "{{$:/language/Buttons/Quote/Caption}}",
"description": "{{$:/language/Buttons/Quote/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((quote))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-lines\"\n\tprefix=\"\n<<<\"\n\tsuffix=\"<<<\"\n/>\n"
},
"$:/core/ui/EditorToolbar/size-dropdown": {
"title": "$:/core/ui/EditorToolbar/size-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Size/\n\n\\define toolbar-button-size-preset(config-title)\n<$set name=\"width\" filter=\"$(sizePair)$ +[first[]]\">\n\n<$set name=\"height\" filter=\"$(sizePair)$ +[last[]]\">\n\n<$button tag=\"a\">\n\n<$action-setfield\n\t$tiddler=\"\"\"$config-title$/new-width\"\"\"\n\t$value=<<width>>\n/>\n\n<$action-setfield\n\t$tiddler=\"\"\"$config-title$/new-height\"\"\"\n\t$value=<<height>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/presets-popup\"\"\"\n/>\n\n<$text text=<<width>>/> × <$text text=<<height>>/>\n\n</$button>\n\n</$set>\n\n</$set>\n\\end\n\n\\define toolbar-button-size(config-title)\n''{{$:/language/Buttons/Size/Hint}}''\n\n<<lingo Caption/Width>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/new-width\"\"\" default=<<tv-bitmap-editor-width>> focus=\"true\" size=\"8\"/> <<lingo Caption/Height>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/new-height\"\"\" default=<<tv-bitmap-editor-height>> size=\"8\"/> <$button popup=\"\"\"$config-title$/presets-popup\"\"\" class=\"tc-btn-invisible tc-popup-keep\" style=\"width: auto; display: inline-block; background-colour: inherit;\" selectedClass=\"tc-selected\">\n{{$:/core/images/down-arrow}}\n</$button>\n\n<$reveal tag=\"span\" state=\"\"\"$config-title$/presets-popup\"\"\" type=\"popup\" position=\"belowleft\" animate=\"yes\">\n\n<div class=\"tc-drop-down tc-popup-keep\">\n\n<$list filter={{$:/config/BitmapEditor/ImageSizes}} variable=\"sizePair\">\n\n<$macrocall $name=\"toolbar-button-size-preset\" config-title=\"$config-title$\"/>\n\n</$list>\n\n</div>\n\n</$reveal>\n\n<$button>\n<$action-sendmessage\n\t$message=\"tm-edit-bitmap-operation\"\n\t$param=\"resize\"\n\twidth={{$config-title$/new-width}}\n\theight={{$config-title$/new-height}}\n/>\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/new-width\"\"\"\n/>\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/new-height\"\"\"\n/>\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n<<lingo Caption/Resize>>\n</$button>\n\\end\n\n<$macrocall $name=\"toolbar-button-size\" config-title=<<qualify \"$:/state/Size/\">>/>\n"
},
"$:/core/ui/EditorToolbar/size": {
"title": "$:/core/ui/EditorToolbar/size",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/size",
"caption": "{{$:/language/Buttons/Size/Caption}}",
"description": "{{$:/language/Buttons/Size/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/size-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/stamp-dropdown": {
"title": "$:/core/ui/EditorToolbar/stamp-dropdown",
"text": "\\define toolbar-button-stamp-inner()\n<$button tag=\"a\">\n\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"replace-selection\"\n\ttext={{$(snippetTitle)$}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$view tiddler=<<snippetTitle>> field=\"caption\" mode=\"inline\">\n\n<$view tiddler=<<snippetTitle>> field=\"title\" mode=\"inline\"/>\n\n</$view>\n\n</$button>\n\\end\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TextEditor/Snippet]!has[draft.of]sort[caption]]\" variable=\"snippetTitle\">\n\n<<toolbar-button-stamp-inner>>\n\n</$list>\n\n----\n\n<$button tag=\"a\">\n\n<$action-sendmessage\n\t$message=\"tm-new-tiddler\"\n\ttags=\"$:/tags/TextEditor/Snippet\"\n\tcaption={{$:/language/Buttons/Stamp/New/Title}}\n\ttext={{$:/language/Buttons/Stamp/New/Text}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<em>\n\n<$text text={{$:/language/Buttons/Stamp/Caption/New}}/>\n\n</em>\n\n</$button>\n"
},
"$:/core/ui/EditorToolbar/stamp": {
"title": "$:/core/ui/EditorToolbar/stamp",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/stamp",
"caption": "{{$:/language/Buttons/Stamp/Caption}}",
"description": "{{$:/language/Buttons/Stamp/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"shortcuts": "((stamp))",
"dropdown": "$:/core/ui/EditorToolbar/stamp-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/strikethrough": {
"title": "$:/core/ui/EditorToolbar/strikethrough",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/strikethrough",
"caption": "{{$:/language/Buttons/Strikethrough/Caption}}",
"description": "{{$:/language/Buttons/Strikethrough/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((strikethrough))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"~~\"\n\tsuffix=\"~~\"\n/>\n"
},
"$:/core/ui/EditorToolbar/subscript": {
"title": "$:/core/ui/EditorToolbar/subscript",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/subscript",
"caption": "{{$:/language/Buttons/Subscript/Caption}}",
"description": "{{$:/language/Buttons/Subscript/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((subscript))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\",,\"\n\tsuffix=\",,\"\n/>\n"
},
"$:/core/ui/EditorToolbar/superscript": {
"title": "$:/core/ui/EditorToolbar/superscript",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/superscript",
"caption": "{{$:/language/Buttons/Superscript/Caption}}",
"description": "{{$:/language/Buttons/Superscript/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((superscript))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"^^\"\n\tsuffix=\"^^\"\n/>\n"
},
"$:/core/ui/EditorToolbar/underline": {
"title": "$:/core/ui/EditorToolbar/underline",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/underline",
"caption": "{{$:/language/Buttons/Underline/Caption}}",
"description": "{{$:/language/Buttons/Underline/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((underline))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"__\"\n\tsuffix=\"__\"\n/>\n"
},
"$:/core/ui/EditTemplate/body/editor": {
"title": "$:/core/ui/EditTemplate/body/editor",
"text": "<$edit\n\n field=\"text\"\n class=\"tc-edit-texteditor\"\n placeholder={{$:/language/EditTemplate/Body/Placeholder}}\n\n><$set\n\n name=\"targetTiddler\"\n value=<<currentTiddler>>\n\n><$list\n\n filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]]\"\n\n><$reveal\n\n type=\"nomatch\"\n state=<<config-visibility-title>>\n text=\"hide\"\n class=\"tc-text-editor-toolbar-item-wrapper\"\n\n><$transclude\n\n tiddler=\"$:/core/ui/EditTemplate/body/toolbar/button\"\n mode=\"inline\"\n\n/></$reveal></$list></$set></$edit>\n"
},
"$:/core/ui/EditTemplate/body/toolbar/button": {
"title": "$:/core/ui/EditTemplate/body/toolbar/button",
"text": "\\define toolbar-button-icon()\n<$list\n\n filter=\"[all[current]!has[custom-icon]]\"\n variable=\"no-custom-icon\"\n\n><$transclude\n\n tiddler={{!!icon}}\n\n/></$list>\n\\end\n\n\\define toolbar-button-tooltip()\n{{!!description}}<$macrocall $name=\"displayshortcuts\" $output=\"text/plain\" shortcuts={{!!shortcuts}} prefix=\"` - [\" separator=\"] [\" suffix=\"]`\"/>\n\\end\n\n\\define toolbar-button()\n<$list\n\n filter={{!!condition}}\n variable=\"list-condition\"\n\n><$wikify\n\n name=\"tooltip-text\"\n text=<<toolbar-button-tooltip>>\n mode=\"inline\"\n output=\"text\"\n\n><$list\n\n filter=\"[all[current]!has[dropdown]]\"\n variable=\"no-dropdown\"\n\n><$button\n\n class=\"tc-btn-invisible $(buttonClasses)$\"\n tooltip=<<tooltip-text>>\n\n><span\n\n data-tw-keyboard-shortcut={{!!shortcuts}}\n\n/><<toolbar-button-icon>><$transclude\n\n tiddler=<<currentTiddler>>\n field=\"text\"\n\n/></$button></$list><$list\n\n filter=\"[all[current]has[dropdown]]\"\n variable=\"dropdown\"\n\n><$set\n\n name=\"dropdown-state\"\n value=<<qualify \"$:/state/EditorToolbarDropdown\">>\n\n><$button\n\n popup=<<dropdown-state>>\n class=\"tc-popup-keep tc-btn-invisible $(buttonClasses)$\"\n selectedClass=\"tc-selected\"\n tooltip=<<tooltip-text>>\n\n><span\n\n data-tw-keyboard-shortcut={{!!shortcuts}}\n\n/><<toolbar-button-icon>><$transclude\n\n tiddler=<<currentTiddler>>\n field=\"text\"\n\n/></$button><$reveal\n\n state=<<dropdown-state>>\n type=\"popup\"\n position=\"below\"\n animate=\"yes\"\n tag=\"span\"\n\n><div\n\n class=\"tc-drop-down tc-popup-keep\"\n\n><$transclude\n\n tiddler={{!!dropdown}}\n mode=\"block\"\n\n/></div></$reveal></$set></$list></$wikify></$list>\n\\end\n\n\\define toolbar-button-outer()\n<$set\n\n name=\"buttonClasses\"\n value={{!!button-classes}}\n\n><<toolbar-button>></$set>\n\\end\n\n<<toolbar-button-outer>>"
},
"$:/core/ui/EditTemplate/body": {
"title": "$:/core/ui/EditTemplate/body",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/Body/\n\\define config-visibility-title()\n$:/config/EditorToolbarButtons/Visibility/$(currentTiddler)$\n\\end\n<$list filter=\"[is[current]has[_canonical_uri]]\">\n\n<div class=\"tc-message-box\">\n\n<<lingo External/Hint>>\n\n<a href={{!!_canonical_uri}}><$text text={{!!_canonical_uri}}/></a>\n\n<$edit-text field=\"_canonical_uri\" class=\"tc-edit-fields\"></$edit-text>\n\n</div>\n\n</$list>\n\n<$list filter=\"[is[current]!has[_canonical_uri]]\">\n\n<$reveal state=\"$:/state/showeditpreview\" type=\"match\" text=\"yes\">\n\n<div class=\"tc-tiddler-preview\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/editor\" mode=\"inline\"/>\n\n<div class=\"tc-tiddler-preview-preview\">\n\n<$transclude tiddler={{$:/state/editpreviewtype}} mode=\"inline\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/preview/output\" mode=\"inline\"/>\n\n</$transclude>\n\n</div>\n\n</div>\n\n</$reveal>\n\n<$reveal state=\"$:/state/showeditpreview\" type=\"nomatch\" text=\"yes\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/editor\" mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n"
},
"$:/core/ui/EditTemplate/controls": {
"title": "$:/core/ui/EditTemplate/controls",
"tags": "$:/tags/EditTemplate",
"text": "\\define config-title()\n$:/config/EditToolbarButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-tiddler-title tc-tiddler-edit-title\">\n<$view field=\"title\"/>\n<span class=\"tc-tiddler-controls tc-titlebar\"><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditToolbar]!has[draft.of]]\" variable=\"listItem\"><$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\"><$transclude tiddler=<<listItem>>/></$reveal></$list></span>\n<div style=\"clear: both;\"></div>\n</div>\n"
},
"$:/core/ui/EditTemplate/fields": {
"title": "$:/core/ui/EditTemplate/fields",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n\\define config-title()\n$:/config/EditTemplateFields/Visibility/$(currentField)$\n\\end\n\n\\define config-filter()\n[[hide]] -[title{$(config-title)$}]\n\\end\n\n\\define new-field-inner()\n<$reveal type=\"nomatch\" text=\"\" default=<<name>>>\n<$button>\n<$action-sendmessage $message=\"tm-add-field\" $name=<<name>> $value=<<value>>/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldname\"/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldvalue\"/>\n<<lingo Fields/Add/Button>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" text=\"\" default=<<name>>>\n<$button>\n<<lingo Fields/Add/Button>>\n</$button>\n</$reveal>\n\\end\n\n\\define new-field()\n<$set name=\"name\" value={{$:/temp/newfieldname}}>\n<$set name=\"value\" value={{$:/temp/newfieldvalue}}>\n<<new-field-inner>>\n</$set>\n</$set>\n\\end\n\n<div class=\"tc-edit-fields\">\n<table class=\"tc-edit-fields\">\n<tbody>\n<$list filter=\"[all[current]fields[]] +[sort[title]]\" variable=\"currentField\">\n<$list filter=<<config-filter>> variable=\"temp\">\n<tr class=\"tc-edit-field\">\n<td class=\"tc-edit-field-name\">\n<$text text=<<currentField>>/>:</td>\n<td class=\"tc-edit-field-value\">\n<$edit-text tiddler=<<currentTiddler>> field=<<currentField>> placeholder={{$:/language/EditTemplate/Fields/Add/Value/Placeholder}}/>\n</td>\n<td class=\"tc-edit-field-remove\">\n<$button class=\"tc-btn-invisible\" tooltip={{$:/language/EditTemplate/Field/Remove/Hint}} aria-label={{$:/language/EditTemplate/Field/Remove/Caption}}>\n<$action-deletefield $field=<<currentField>>/>\n{{$:/core/images/delete-button}}\n</$button>\n</td>\n</tr>\n</$list>\n</$list>\n</tbody>\n</table>\n</div>\n\n<$fieldmangler>\n<div class=\"tc-edit-field-add\">\n<em class=\"tc-edit\">\n<<lingo Fields/Add/Prompt>>\n</em>\n<span class=\"tc-edit-field-add-name\">\n<$edit-text tiddler=\"$:/temp/newfieldname\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Fields/Add/Name/Placeholder}} focusPopup=<<qualify \"$:/state/popup/field-dropdown\">> class=\"tc-edit-texteditor tc-popup-handle\"/>\n</span>\n<$button popup=<<qualify \"$:/state/popup/field-dropdown\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Field/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Field/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/field-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$linkcatcher to=\"$:/temp/newfieldname\">\n<div class=\"tc-dropdown-item\">\n<<lingo Fields/Add/Dropdown/User>>\n</div>\n<$list filter=\"[!is[shadow]!is[system]fields[]sort[]] -created -creator -draft.of -draft.title -modified -modifier -tags -text -title -type\" variable=\"currentField\">\n<$link to=<<currentField>>>\n<<currentField>>\n</$link>\n</$list>\n<div class=\"tc-dropdown-item\">\n<<lingo Fields/Add/Dropdown/System>>\n</div>\n<$list filter=\"[fields[]sort[]] -[!is[shadow]!is[system]fields[]]\" variable=\"currentField\">\n<$link to=<<currentField>>>\n<<currentField>>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n<span class=\"tc-edit-field-add-value\">\n<$edit-text tiddler=\"$:/temp/newfieldvalue\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Fields/Add/Value/Placeholder}} class=\"tc-edit-texteditor\"/>\n</span>\n<span class=\"tc-edit-field-add-button\">\n<$macrocall $name=\"new-field\"/>\n</span>\n</div>\n</$fieldmangler>\n\n"
},
"$:/core/ui/EditTemplate/body/preview/output": {
"title": "$:/core/ui/EditTemplate/body/preview/output",
"tags": "$:/tags/EditPreview",
"caption": "{{$:/language/EditTemplate/Body/Preview/Type/Output}}",
"text": "<$set name=\"tv-tiddler-preview\" value=\"yes\">\n\n<$transclude />\n\n</$set>\n"
},
"$:/core/ui/EditTemplate/shadow": {
"title": "$:/core/ui/EditTemplate/shadow",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/Shadow/\n\\define pluginLinkBody()\n<$link to=\"\"\"$(pluginTitle)$\"\"\">\n<$text text=\"\"\"$(pluginTitle)$\"\"\"/>\n</$link>\n\\end\n<$list filter=\"[all[current]get[draft.of]is[shadow]!is[tiddler]]\">\n\n<$list filter=\"[all[current]shadowsource[]]\" variable=\"pluginTitle\">\n\n<$set name=\"pluginLink\" value=<<pluginLinkBody>>>\n<div class=\"tc-message-box\">\n\n<<lingo Warning>>\n\n</div>\n</$set>\n</$list>\n\n</$list>\n\n<$list filter=\"[all[current]get[draft.of]is[shadow]is[tiddler]]\">\n\n<$list filter=\"[all[current]shadowsource[]]\" variable=\"pluginTitle\">\n\n<$set name=\"pluginLink\" value=<<pluginLinkBody>>>\n<div class=\"tc-message-box\">\n\n<<lingo OverriddenWarning>>\n\n</div>\n</$set>\n</$list>\n\n</$list>"
},
"$:/core/ui/EditTemplate/tags": {
"title": "$:/core/ui/EditTemplate/tags",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n\\define tag-styles()\nbackground-color:$(backgroundColor)$;\nfill:$(foregroundColor)$;\ncolor:$(foregroundColor)$;\n\\end\n\\define tag-body-inner(colour,fallbackTarget,colourA,colourB)\n<$vars foregroundColor=<<contrastcolour target:\"\"\"$colour$\"\"\" fallbackTarget:\"\"\"$fallbackTarget$\"\"\" colourA:\"\"\"$colourA$\"\"\" colourB:\"\"\"$colourB$\"\"\">> backgroundColor=\"\"\"$colour$\"\"\">\n<span style=<<tag-styles>> class=\"tc-tag-label\">\n<$view field=\"title\" format=\"text\" />\n<$button message=\"tm-remove-tag\" param={{!!title}} class=\"tc-btn-invisible tc-remove-tag-button\">×</$button>\n</span>\n</$vars>\n\\end\n\\define tag-body(colour,palette)\n<$macrocall $name=\"tag-body-inner\" colour=\"\"\"$colour$\"\"\" fallbackTarget={{$palette$##tag-background}} colourA={{$palette$##foreground}} colourB={{$palette$##background}}/>\n\\end\n<div class=\"tc-edit-tags\">\n<$fieldmangler>\n<$list filter=\"[all[current]tags[]sort[title]]\" storyview=\"pop\">\n<$macrocall $name=\"tag-body\" colour={{!!color}} palette={{$:/palette}}/>\n</$list>\n\n<div class=\"tc-edit-add-tag\">\n<span class=\"tc-add-tag-name\">\n<$edit-text tiddler=\"$:/temp/NewTagName\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Tags/Add/Placeholder}} focusPopup=<<qualify \"$:/state/popup/tags-auto-complete\">> class=\"tc-edit-texteditor tc-popup-handle\"/>\n</span> <$button popup=<<qualify \"$:/state/popup/tags-auto-complete\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Tags/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Tags/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button> <span class=\"tc-add-tag-button\">\n<$button message=\"tm-add-tag\" param={{$:/temp/NewTagName}} set=\"$:/temp/NewTagName\" setTo=\"\" class=\"\">\n<<lingo Tags/Add/Button>>\n</$button>\n</span>\n</div>\n\n<div class=\"tc-block-dropdown-wrapper\">\n<$reveal state=<<qualify \"$:/state/popup/tags-auto-complete\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown\">\n<$linkcatcher set=\"$:/temp/NewTagName\" setTo=\"\" message=\"tm-add-tag\">\n<$list filter=\"[tags[]!is[system]search:title{$:/temp/NewTagName}sort[]]\">\n{{||$:/core/ui/Components/tag-link}}\n</$list>\n<hr>\n<$list filter=\"[tags[]is[system]search:title{$:/temp/NewTagName}sort[]]\">\n{{||$:/core/ui/Components/tag-link}}\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>\n</$fieldmangler>\n</div>"
},
"$:/core/ui/EditTemplate/title": {
"title": "$:/core/ui/EditTemplate/title",
"tags": "$:/tags/EditTemplate",
"text": "<$vars pattern=\"\"\"[\\|\\[\\]{}]\"\"\" bad-chars=\"\"\"`| [ ] { }`\"\"\">\n\n<$list filter=\"[is[current]regexp:draft.title<pattern>]\" variable=\"listItem\">\n\n<div class=\"tc-message-box\">\n\n{{$:/language/EditTemplate/Title/BadCharacterWarning}}\n\n</div>\n\n</$list>\n\n</$vars>\n\n<$edit-text field=\"draft.title\" class=\"tc-titlebar tc-edit-texteditor\" focus=\"true\"/>\n"
},
"$:/core/ui/EditTemplate/type": {
"title": "$:/core/ui/EditTemplate/type",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n<div class=\"tc-type-selector\"><$fieldmangler>\n<em class=\"tc-edit\"><<lingo Type/Prompt>></em> <$edit-text field=\"type\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Type/Placeholder}} focusPopup=<<qualify \"$:/state/popup/type-dropdown\">> class=\"tc-edit-typeeditor tc-popup-handle\"/> <$button popup=<<qualify \"$:/state/popup/type-dropdown\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Type/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Type/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button> <$button message=\"tm-remove-field\" param=\"type\" class=\"tc-btn-invisible tc-btn-icon\" tooltip={{$:/language/EditTemplate/Type/Delete/Hint}} aria-label={{$:/language/EditTemplate/Type/Delete/Caption}}>{{$:/core/images/delete-button}}</$button>\n</$fieldmangler></div>\n\n<div class=\"tc-block-dropdown-wrapper\">\n<$reveal state=<<qualify \"$:/state/popup/type-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$linkcatcher to=\"!!type\">\n<$list filter='[all[shadows+tiddlers]prefix[$:/language/Docs/Types/]each[group]sort[group]]'>\n<div class=\"tc-dropdown-item\">\n<$text text={{!!group}}/>\n</div>\n<$list filter=\"[all[shadows+tiddlers]prefix[$:/language/Docs/Types/]group{!!group}] +[sort[description]]\"><$link to={{!!name}}><$view field=\"description\"/> (<$view field=\"name\"/>)</$link>\n</$list>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>"
},
"$:/core/ui/EditTemplate": {
"title": "$:/core/ui/EditTemplate",
"text": "\\define frame-classes()\ntc-tiddler-frame tc-tiddler-edit-frame $(missingTiddlerClass)$ $(shadowTiddlerClass)$ $(systemTiddlerClass)$\n\\end\n<div class=<<frame-classes>>>\n<$set name=\"storyTiddler\" value=<<currentTiddler>>>\n<$keyboard key=\"((cancel-edit-tiddler))\" message=\"tm-cancel-tiddler\">\n<$keyboard key=\"((save-tiddler))\" message=\"tm-save-tiddler\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditTemplate]!has[draft.of]]\" variable=\"listItem\">\n<$transclude tiddler=<<listItem>>/>\n</$list>\n</$keyboard>\n</$keyboard>\n</$set>\n</div>\n"
},
"$:/core/ui/Buttons/cancel": {
"title": "$:/core/ui/Buttons/cancel",
"tags": "$:/tags/EditToolbar",
"caption": "{{$:/core/images/cancel-button}} {{$:/language/Buttons/Cancel/Caption}}",
"description": "{{$:/language/Buttons/Cancel/Hint}}",
"text": "<$button message=\"tm-cancel-tiddler\" tooltip={{$:/language/Buttons/Cancel/Hint}} aria-label={{$:/language/Buttons/Cancel/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/cancel-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Cancel/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/delete": {
"title": "$:/core/ui/Buttons/delete",
"tags": "$:/tags/EditToolbar $:/tags/ViewToolbar",
"caption": "{{$:/core/images/delete-button}} {{$:/language/Buttons/Delete/Caption}}",
"description": "{{$:/language/Buttons/Delete/Hint}}",
"text": "<$button message=\"tm-delete-tiddler\" tooltip={{$:/language/Buttons/Delete/Hint}} aria-label={{$:/language/Buttons/Delete/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/delete-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Delete/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/save": {
"title": "$:/core/ui/Buttons/save",
"tags": "$:/tags/EditToolbar",
"caption": "{{$:/core/images/done-button}} {{$:/language/Buttons/Save/Caption}}",
"description": "{{$:/language/Buttons/Save/Hint}}",
"text": "<$fieldmangler><$button tooltip={{$:/language/Buttons/Save/Hint}} aria-label={{$:/language/Buttons/Save/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-add-tag\" $param={{$:/temp/NewTagName}}/>\n<$action-deletetiddler $tiddler=\"$:/temp/NewTagName\"/>\n<$action-sendmessage $message=\"tm-add-field\" $name={{$:/temp/newfieldname}} $value={{$:/temp/newfieldvalue}}/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldname\"/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldvalue\"/>\n<$action-sendmessage $message=\"tm-save-tiddler\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/done-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Save/Caption}}/></span>\n</$list>\n</$button>\n</$fieldmangler>\n"
},
"$:/core/Filters/AllTags": {
"title": "$:/core/Filters/AllTags",
"tags": "$:/tags/Filter",
"filter": "[tags[]!is[system]sort[title]]",
"description": "{{$:/language/Filters/AllTags}}",
"text": ""
},
"$:/core/Filters/AllTiddlers": {
"title": "$:/core/Filters/AllTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]sort[title]]",
"description": "{{$:/language/Filters/AllTiddlers}}",
"text": ""
},
"$:/core/Filters/Drafts": {
"title": "$:/core/Filters/Drafts",
"tags": "$:/tags/Filter",
"filter": "[has[draft.of]sort[title]]",
"description": "{{$:/language/Filters/Drafts}}",
"text": ""
},
"$:/core/Filters/Missing": {
"title": "$:/core/Filters/Missing",
"tags": "$:/tags/Filter",
"filter": "[all[missing]sort[title]]",
"description": "{{$:/language/Filters/Missing}}",
"text": ""
},
"$:/core/Filters/Orphans": {
"title": "$:/core/Filters/Orphans",
"tags": "$:/tags/Filter",
"filter": "[all[orphans]sort[title]]",
"description": "{{$:/language/Filters/Orphans}}",
"text": ""
},
"$:/core/Filters/OverriddenShadowTiddlers": {
"title": "$:/core/Filters/OverriddenShadowTiddlers",
"tags": "$:/tags/Filter",
"filter": "[is[shadow]]",
"description": "{{$:/language/Filters/OverriddenShadowTiddlers}}",
"text": ""
},
"$:/core/Filters/RecentSystemTiddlers": {
"title": "$:/core/Filters/RecentSystemTiddlers",
"tags": "$:/tags/Filter",
"filter": "[has[modified]!sort[modified]limit[50]]",
"description": "{{$:/language/Filters/RecentSystemTiddlers}}",
"text": ""
},
"$:/core/Filters/RecentTiddlers": {
"title": "$:/core/Filters/RecentTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]has[modified]!sort[modified]limit[50]]",
"description": "{{$:/language/Filters/RecentTiddlers}}",
"text": ""
},
"$:/core/Filters/ShadowTiddlers": {
"title": "$:/core/Filters/ShadowTiddlers",
"tags": "$:/tags/Filter",
"filter": "[all[shadows]sort[title]]",
"description": "{{$:/language/Filters/ShadowTiddlers}}",
"text": ""
},
"$:/core/Filters/SystemTags": {
"title": "$:/core/Filters/SystemTags",
"tags": "$:/tags/Filter",
"filter": "[all[shadows+tiddlers]tags[]is[system]sort[title]]",
"description": "{{$:/language/Filters/SystemTags}}",
"text": ""
},
"$:/core/Filters/SystemTiddlers": {
"title": "$:/core/Filters/SystemTiddlers",
"tags": "$:/tags/Filter",
"filter": "[is[system]sort[title]]",
"description": "{{$:/language/Filters/SystemTiddlers}}",
"text": ""
},
"$:/core/Filters/TypedTiddlers": {
"title": "$:/core/Filters/TypedTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]has[type]each[type]sort[type]] -[type[text/vnd.tiddlywiki]]",
"description": "{{$:/language/Filters/TypedTiddlers}}",
"text": ""
},
"$:/core/ui/ImportListing": {
"title": "$:/core/ui/ImportListing",
"text": "\\define lingo-base() $:/language/Import/\n\\define messageField()\nmessage-$(payloadTiddler)$\n\\end\n\\define selectionField()\nselection-$(payloadTiddler)$\n\\end\n\\define previewPopupState()\n$(currentTiddler)$!!popup-$(payloadTiddler)$\n\\end\n<table>\n<tbody>\n<tr>\n<th>\n<<lingo Listing/Select/Caption>>\n</th>\n<th>\n<<lingo Listing/Title/Caption>>\n</th>\n<th>\n<<lingo Listing/Status/Caption>>\n</th>\n</tr>\n<$list filter=\"[all[current]plugintiddlers[]sort[title]]\" variable=\"payloadTiddler\">\n<tr>\n<td>\n<$checkbox field=<<selectionField>> checked=\"checked\" unchecked=\"unchecked\" default=\"checked\"/>\n</td>\n<td>\n<$reveal type=\"nomatch\" state=<<previewPopupState>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<previewPopupState>> setTo=\"yes\">\n{{$:/core/images/right-arrow}} <$text text=<<payloadTiddler>>/>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<previewPopupState>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<previewPopupState>> setTo=\"no\">\n{{$:/core/images/down-arrow}} <$text text=<<payloadTiddler>>/>\n</$button>\n</$reveal>\n</td>\n<td>\n<$view field=<<messageField>>/>\n</td>\n</tr>\n<tr>\n<td colspan=\"3\">\n<$reveal type=\"match\" text=\"yes\" state=<<previewPopupState>>>\n<$transclude subtiddler=<<payloadTiddler>> mode=\"block\"/>\n</$reveal>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ListItemTemplate": {
"title": "$:/core/ui/ListItemTemplate",
"text": "<div class=\"tc-menu-list-item\">\n<$link to={{!!title}}>\n<$view field=\"title\"/>\n</$link>\n</div>"
},
"$:/core/ui/MissingTemplate": {
"title": "$:/core/ui/MissingTemplate",
"text": "<div class=\"tc-tiddler-missing\">\n<$button popup=<<qualify \"$:/state/popup/missing\">> class=\"tc-btn-invisible tc-missing-tiddler-label\">\n<$view field=\"title\" format=\"text\" />\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/missing\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$transclude tiddler=\"$:/core/ui/ListItemTemplate\"/>\n<hr>\n<$list filter=\"[all[current]backlinks[]sort[title]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$reveal>\n</div>\n"
},
"$:/core/ui/MoreSideBar/All": {
"title": "$:/core/ui/MoreSideBar/All",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/All/Caption}}",
"text": "<$list filter={{$:/core/Filters/AllTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Drafts": {
"title": "$:/core/ui/MoreSideBar/Drafts",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Drafts/Caption}}",
"text": "<$list filter={{$:/core/Filters/Drafts!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Missing": {
"title": "$:/core/ui/MoreSideBar/Missing",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Missing/Caption}}",
"text": "<$list filter={{$:/core/Filters/Missing!!filter}} template=\"$:/core/ui/MissingTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Orphans": {
"title": "$:/core/ui/MoreSideBar/Orphans",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Orphans/Caption}}",
"text": "<$list filter={{$:/core/Filters/Orphans!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Recent": {
"title": "$:/core/ui/MoreSideBar/Recent",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Recent/Caption}}",
"text": "<$macrocall $name=\"timeline\" format={{$:/language/RecentChanges/DateFormat}}/>\n"
},
"$:/core/ui/MoreSideBar/Shadows": {
"title": "$:/core/ui/MoreSideBar/Shadows",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Shadows/Caption}}",
"text": "<$list filter={{$:/core/Filters/ShadowTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/System": {
"title": "$:/core/ui/MoreSideBar/System",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/System/Caption}}",
"text": "<$list filter={{$:/core/Filters/SystemTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Tags": {
"title": "$:/core/ui/MoreSideBar/Tags",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Tags/Caption}}",
"text": "<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n{{$:/core/ui/Buttons/tag-manager}}\n\n</$set>\n\n</$set>\n\n</$set>\n\n<$list filter={{$:/core/Filters/AllTags!!filter}}>\n\n<$transclude tiddler=\"$:/core/ui/TagTemplate\"/>\n\n</$list>\n\n<hr class=\"tc-untagged-separator\">\n\n{{$:/core/ui/UntaggedTemplate}}\n"
},
"$:/core/ui/MoreSideBar/Types": {
"title": "$:/core/ui/MoreSideBar/Types",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Types/Caption}}",
"text": "<$list filter={{$:/core/Filters/TypedTiddlers!!filter}}>\n<div class=\"tc-menu-list-item\">\n<$view field=\"type\"/>\n<$list filter=\"[type{!!type}!is[system]sort[title]]\">\n<div class=\"tc-menu-list-subitem\">\n<$link to={{!!title}}><$view field=\"title\"/></$link>\n</div>\n</$list>\n</div>\n</$list>\n"
},
"$:/core/ui/Buttons/advanced-search": {
"title": "$:/core/ui/Buttons/advanced-search",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/advanced-search-button}} {{$:/language/Buttons/AdvancedSearch/Caption}}",
"description": "{{$:/language/Buttons/AdvancedSearch/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/AdvancedSearch\" tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/advanced-search-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/AdvancedSearch/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/AdvancedSearch]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/close-all": {
"title": "$:/core/ui/Buttons/close-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/close-all-button}} {{$:/language/Buttons/CloseAll/Caption}}",
"description": "{{$:/language/Buttons/CloseAll/Hint}}",
"text": "<$button message=\"tm-close-all-tiddlers\" tooltip={{$:/language/Buttons/CloseAll/Hint}} aria-label={{$:/language/Buttons/CloseAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/CloseAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/control-panel": {
"title": "$:/core/ui/Buttons/control-panel",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/options-button}} {{$:/language/Buttons/ControlPanel/Caption}}",
"description": "{{$:/language/Buttons/ControlPanel/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/ControlPanel\" tooltip={{$:/language/Buttons/ControlPanel/Hint}} aria-label={{$:/language/Buttons/ControlPanel/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/options-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/ControlPanel/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/ControlPanel]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/encryption": {
"title": "$:/core/ui/Buttons/encryption",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/locked-padlock}} {{$:/language/Buttons/Encryption/Caption}}",
"description": "{{$:/language/Buttons/Encryption/Hint}}",
"text": "<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\n<$button message=\"tm-clear-password\" tooltip={{$:/language/Buttons/Encryption/ClearPassword/Hint}} aria-label={{$:/language/Buttons/Encryption/ClearPassword/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/locked-padlock}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Encryption/ClearPassword/Caption}}/></span>\n</$list>\n</$button>\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n<$button message=\"tm-set-password\" tooltip={{$:/language/Buttons/Encryption/SetPassword/Hint}} aria-label={{$:/language/Buttons/Encryption/SetPassword/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/unlocked-padlock}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Encryption/SetPassword/Caption}}/></span>\n</$list>\n</$button>\n</$reveal>"
},
"$:/core/ui/Buttons/export-page": {
"title": "$:/core/ui/Buttons/export-page",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/export-button}} {{$:/language/Buttons/ExportPage/Caption}}",
"description": "{{$:/language/Buttons/ExportPage/Hint}}",
"text": "<$macrocall $name=\"exportButton\" exportFilter=\"[!is[system]sort[title]]\" lingoBase=\"$:/language/Buttons/ExportPage/\"/>"
},
"$:/core/ui/Buttons/fold-all": {
"title": "$:/core/ui/Buttons/fold-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/fold-all-button}} {{$:/language/Buttons/FoldAll/Caption}}",
"description": "{{$:/language/Buttons/FoldAll/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/FoldAll/Hint}} aria-label={{$:/language/Buttons/FoldAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-all-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FoldAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/full-screen": {
"title": "$:/core/ui/Buttons/full-screen",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/full-screen-button}} {{$:/language/Buttons/FullScreen/Caption}}",
"description": "{{$:/language/Buttons/FullScreen/Hint}}",
"text": "<$button message=\"tm-full-screen\" tooltip={{$:/language/Buttons/FullScreen/Hint}} aria-label={{$:/language/Buttons/FullScreen/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/full-screen-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FullScreen/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/home": {
"title": "$:/core/ui/Buttons/home",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/home-button}} {{$:/language/Buttons/Home/Caption}}",
"description": "{{$:/language/Buttons/Home/Hint}}",
"text": "<$button message=\"tm-home\" tooltip={{$:/language/Buttons/Home/Hint}} aria-label={{$:/language/Buttons/Home/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/home-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Home/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/import": {
"title": "$:/core/ui/Buttons/import",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/import-button}} {{$:/language/Buttons/Import/Caption}}",
"description": "{{$:/language/Buttons/Import/Hint}}",
"text": "<div class=\"tc-file-input-wrapper\">\n<$button tooltip={{$:/language/Buttons/Import/Hint}} aria-label={{$:/language/Buttons/Import/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/import-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Import/Caption}}/></span>\n</$list>\n</$button>\n<$browse tooltip={{$:/language/Buttons/Import/Hint}}/>\n</div>"
},
"$:/core/ui/Buttons/language": {
"title": "$:/core/ui/Buttons/language",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/globe}} {{$:/language/Buttons/Language/Caption}}",
"description": "{{$:/language/Buttons/Language/Hint}}",
"text": "\\define flag-title()\n$(languagePluginTitle)$/icon\n\\end\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/language\">> tooltip={{$:/language/Buttons/Language/Hint}} aria-label={{$:/language/Buttons/Language/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n<span class=\"tc-image-button\">\n<$set name=\"languagePluginTitle\" value={{$:/language}}>\n<$image source=<<flag-title>>/>\n</$set>\n</span>\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Language/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/language\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down tc-drop-down-language-chooser\">\n<$linkcatcher to=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[description]]\">\n<$link>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/language\" text=<<currentTiddler>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/language\" text=<<currentTiddler>>>\n \n</$reveal>\n</span>\n<span class=\"tc-image-button\">\n<$set name=\"languagePluginTitle\" value=<<currentTiddler>>>\n<$transclude subtiddler=<<flag-title>>>\n<$list filter=\"[all[current]field:title[$:/languages/en-GB]]\">\n<$transclude tiddler=\"$:/languages/en-GB/icon\"/>\n</$list>\n</$transclude>\n</$set>\n</span>\n<$view field=\"description\">\n<$view field=\"name\">\n<$view field=\"title\"/>\n</$view>\n</$view>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/more-page-actions": {
"title": "$:/core/ui/Buttons/more-page-actions",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/down-arrow}} {{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"text": "\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n<$button popup=<<qualify \"$:/state/popup/more\">> tooltip={{$:/language/Buttons/More/Hint}} aria-label={{$:/language/Buttons/More/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/down-arrow}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/More/Caption}}/></span>\n</$list>\n</$button><$reveal state=<<qualify \"$:/state/popup/more\">> type=\"popup\" position=\"below\" animate=\"yes\">\n\n<div class=\"tc-drop-down\">\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"tc-btn-invisible\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]] -[[$:/core/ui/Buttons/more-page-actions]]\" variable=\"listItem\">\n\n<$reveal type=\"match\" state=<<config-title>> text=\"hide\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</div>\n\n</$reveal>"
},
"$:/core/ui/Buttons/new-image": {
"title": "$:/core/ui/Buttons/new-image",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-image-button}} {{$:/language/Buttons/NewImage/Caption}}",
"description": "{{$:/language/Buttons/NewImage/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/NewImage/Hint}} aria-label={{$:/language/Buttons/NewImage/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" type=\"image/jpeg\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-image-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewImage/Caption}}/></span>\n</$list>\n</$button>\n"
},
"$:/core/ui/Buttons/new-journal": {
"title": "$:/core/ui/Buttons/new-journal",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-journal-button}} {{$:/language/Buttons/NewJournal/Caption}}",
"description": "{{$:/language/Buttons/NewJournal/Hint}}",
"text": "\\define journalButton()\n<$button tooltip={{$:/language/Buttons/NewJournal/Hint}} aria-label={{$:/language/Buttons/NewJournal/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" title=<<now \"$(journalTitleTemplate)$\">> tags=\"$(journalTags)$\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-journal-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewJournal/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<$set name=\"journalTitleTemplate\" value={{$:/config/NewJournal/Title}}>\n<$set name=\"journalTags\" value={{$:/config/NewJournal/Tags}}>\n<<journalButton>>\n</$set></$set>"
},
"$:/core/ui/Buttons/new-tiddler": {
"title": "$:/core/ui/Buttons/new-tiddler",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-button}} {{$:/language/Buttons/NewTiddler/Caption}}",
"description": "{{$:/language/Buttons/NewTiddler/Hint}}",
"text": "<$button message=\"tm-new-tiddler\" tooltip={{$:/language/Buttons/NewTiddler/Hint}} aria-label={{$:/language/Buttons/NewTiddler/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewTiddler/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/palette": {
"title": "$:/core/ui/Buttons/palette",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/palette}} {{$:/language/Buttons/Palette/Caption}}",
"description": "{{$:/language/Buttons/Palette/Hint}}",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/palette\">> tooltip={{$:/language/Buttons/Palette/Hint}} aria-label={{$:/language/Buttons/Palette/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/palette}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Palette/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/palette\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\" style=\"font-size:0.7em;\">\n{{$:/snippets/paletteswitcher}}\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/refresh": {
"title": "$:/core/ui/Buttons/refresh",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/refresh-button}} {{$:/language/Buttons/Refresh/Caption}}",
"description": "{{$:/language/Buttons/Refresh/Hint}}",
"text": "<$button message=\"tm-browser-refresh\" tooltip={{$:/language/Buttons/Refresh/Hint}} aria-label={{$:/language/Buttons/Refresh/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/refresh-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Refresh/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/save-wiki": {
"title": "$:/core/ui/Buttons/save-wiki",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/save-button}} {{$:/language/Buttons/SaveWiki/Caption}}",
"description": "{{$:/language/Buttons/SaveWiki/Hint}}",
"text": "<$button message=\"tm-save-wiki\" param={{$:/config/SaveWikiButton/Template}} tooltip={{$:/language/Buttons/SaveWiki/Hint}} aria-label={{$:/language/Buttons/SaveWiki/Caption}} class=<<tv-config-toolbar-class>>>\n<span class=\"tc-dirty-indicator\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/save-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/SaveWiki/Caption}}/></span>\n</$list>\n</span>\n</$button>"
},
"$:/core/ui/Buttons/storyview": {
"title": "$:/core/ui/Buttons/storyview",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/storyview-classic}} {{$:/language/Buttons/StoryView/Caption}}",
"description": "{{$:/language/Buttons/StoryView/Hint}}",
"text": "\\define icon()\n$:/core/images/storyview-$(storyview)$\n\\end\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/storyview\">> tooltip={{$:/language/Buttons/StoryView/Hint}} aria-label={{$:/language/Buttons/StoryView/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n<$set name=\"storyview\" value={{$:/view}}>\n<$transclude tiddler=<<icon>>/>\n</$set>\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/StoryView/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/storyview\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$:/view\">\n<$list filter=\"[storyviews[]]\" variable=\"storyview\">\n<$link to=<<storyview>>>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/view\" text=<<storyview>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/view\" text=<<storyview>>>\n \n</$reveal>\n</span>\n<$transclude tiddler=<<icon>>/>\n<$text text=<<storyview>>/></$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/tag-manager": {
"title": "$:/core/ui/Buttons/tag-manager",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/tag-button}} {{$:/language/Buttons/TagManager/Caption}}",
"description": "{{$:/language/Buttons/TagManager/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/TagManager\" tooltip={{$:/language/Buttons/TagManager/Hint}} aria-label={{$:/language/Buttons/TagManager/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/tag-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/TagManager/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/TagManager]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/theme": {
"title": "$:/core/ui/Buttons/theme",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/theme-button}} {{$:/language/Buttons/Theme/Caption}}",
"description": "{{$:/language/Buttons/Theme/Hint}}",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/theme\">> tooltip={{$:/language/Buttons/Theme/Hint}} aria-label={{$:/language/Buttons/Theme/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/theme-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Theme/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/theme\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\" variable=\"themeTitle\">\n<$link to=<<themeTitle>>>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/theme\" text=<<themeTitle>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/theme\" text=<<themeTitle>>>\n \n</$reveal>\n</span>\n<$view tiddler=<<themeTitle>> field=\"name\"/>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/unfold-all": {
"title": "$:/core/ui/Buttons/unfold-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/unfold-all-button}} {{$:/language/Buttons/UnfoldAll/Caption}}",
"description": "{{$:/language/Buttons/UnfoldAll/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/UnfoldAll/Hint}} aria-label={{$:/language/Buttons/UnfoldAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-unfold-all-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/unfold-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/UnfoldAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/PageTemplate/pagecontrols": {
"title": "$:/core/ui/PageTemplate/pagecontrols",
"text": "\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-page-controls\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n<$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\">\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n</$reveal>\n</$list>\n</div>\n\n"
},
"$:/core/ui/PageStylesheet": {
"title": "$:/core/ui/PageStylesheet",
"text": "<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Stylesheet]!has[draft.of]]\">\n<$transclude mode=\"block\"/>\n</$list>\n\n</$set>\n\n</$set>\n\n</$importvariables>\n"
},
"$:/core/ui/PageTemplate/alerts": {
"title": "$:/core/ui/PageTemplate/alerts",
"tags": "$:/tags/PageTemplate",
"text": "<div class=\"tc-alerts\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Alert]!has[draft.of]]\" template=\"$:/core/ui/AlertTemplate\" storyview=\"pop\"/>\n\n</div>\n"
},
"$:/core/ui/PageTemplate/pluginreloadwarning": {
"title": "$:/core/ui/PageTemplate/pluginreloadwarning",
"tags": "$:/tags/PageTemplate",
"text": "\\define lingo-base() $:/language/\n\n<$list filter=\"[has[plugin-type]haschanged[]!plugin-type[import]limit[1]]\">\n\n<$reveal type=\"nomatch\" state=\"$:/temp/HidePluginWarning\" text=\"yes\">\n\n<div class=\"tc-plugin-reload-warning\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<<lingo PluginReloadWarning>> <$button set=\"$:/temp/HidePluginWarning\" setTo=\"yes\" class=\"tc-btn-invisible\">{{$:/core/images/close-button}}</$button>\n\n</$set>\n\n</div>\n\n</$reveal>\n\n</$list>\n"
},
"$:/core/ui/PageTemplate/sidebar": {
"title": "$:/core/ui/PageTemplate/sidebar",
"tags": "$:/tags/PageTemplate",
"text": "<$scrollable fallthrough=\"no\" class=\"tc-sidebar-scrollable\">\n\n<div class=\"tc-sidebar-header\">\n\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"yes\" default=\"yes\" retain=\"yes\" animate=\"yes\">\n\n<h1 class=\"tc-site-title\">\n\n<$transclude tiddler=\"$:/SiteTitle\" mode=\"inline\"/>\n\n</h1>\n\n<div class=\"tc-site-subtitle\">\n\n<$transclude tiddler=\"$:/SiteSubtitle\" mode=\"inline\"/>\n\n</div>\n\n{{||$:/core/ui/PageTemplate/pagecontrols}}\n\n<$transclude tiddler=\"$:/core/ui/SideBarLists\" mode=\"inline\"/>\n\n</$reveal>\n\n</div>\n\n</$scrollable>"
},
"$:/core/ui/PageTemplate/story": {
"title": "$:/core/ui/PageTemplate/story",
"tags": "$:/tags/PageTemplate",
"text": "<section class=\"tc-story-river\">\n\n<section class=\"story-backdrop\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/AboveStory]!has[draft.of]]\">\n\n<$transclude/>\n\n</$list>\n\n</section>\n\n<$list filter=\"[list[$:/StoryList]]\" history=\"$:/HistoryList\" template=\"$:/core/ui/ViewTemplate\" editTemplate=\"$:/core/ui/EditTemplate\" storyview={{$:/view}} emptyMessage={{$:/config/EmptyStoryMessage}}/>\n\n<section class=\"story-frontdrop\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/BelowStory]!has[draft.of]]\">\n\n<$transclude/>\n\n</$list>\n\n</section>\n\n</section>\n"
},
"$:/core/ui/PageTemplate/topleftbar": {
"title": "$:/core/ui/PageTemplate/topleftbar",
"tags": "$:/tags/PageTemplate",
"text": "<span class=\"tc-topbar tc-topbar-left\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TopLeftBar]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$list>\n\n</span>\n"
},
"$:/core/ui/PageTemplate/toprightbar": {
"title": "$:/core/ui/PageTemplate/toprightbar",
"tags": "$:/tags/PageTemplate",
"text": "<span class=\"tc-topbar tc-topbar-right\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TopRightBar]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$list>\n\n</span>\n"
},
"$:/core/ui/PageTemplate": {
"title": "$:/core/ui/PageTemplate",
"text": "\\define containerClasses()\ntc-page-container tc-page-view-$(themeTitle)$ tc-language-$(languageTitle)$\n\\end\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$set name=\"tv-config-toolbar-icons\" value={{$:/config/Toolbar/Icons}}>\n\n<$set name=\"tv-config-toolbar-text\" value={{$:/config/Toolbar/Text}}>\n\n<$set name=\"tv-config-toolbar-class\" value={{$:/config/Toolbar/ButtonClass}}>\n\n<$set name=\"themeTitle\" value={{$:/view}}>\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$set name=\"currentTiddler\" value=\"\">\n\n<div class=<<containerClasses>>>\n\n<$navigator story=\"$:/StoryList\" history=\"$:/HistoryList\" openLinkFromInsideRiver={{$:/config/Navigation/openLinkFromInsideRiver}} openLinkFromOutsideRiver={{$:/config/Navigation/openLinkFromOutsideRiver}}>\n\n<$dropzone>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageTemplate]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>>/>\n\n</$list>\n\n</$dropzone>\n\n</$navigator>\n\n</div>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$importvariables>\n"
},
"$:/core/ui/PluginInfo": {
"title": "$:/core/ui/PluginInfo",
"text": "\\define localised-info-tiddler-title()\n$(currentTiddler)$/$(languageTitle)$/$(currentTab)$\n\\end\n\\define info-tiddler-title()\n$(currentTiddler)$/$(currentTab)$\n\\end\n<$transclude tiddler=<<localised-info-tiddler-title>> mode=\"block\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<localised-info-tiddler-title>> mode=\"block\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<info-tiddler-title>> mode=\"block\">\n{{$:/language/ControlPanel/Plugin/NoInfoFound/Hint}}\n</$transclude>\n</$transclude>\n</$transclude>\n"
},
"$:/core/ui/SearchResults": {
"title": "$:/core/ui/SearchResults",
"text": "<div class=\"tc-search-results\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]butfirst[]limit[1]]\" emptyMessage=\"\"\"\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\">\n<$transclude mode=\"block\"/>\n</$list>\n\"\"\">\n\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\" default={{$:/config/SearchResults/Default}}/>\n\n</$list>\n\n</div>\n"
},
"$:/core/ui/SideBar/More": {
"title": "$:/core/ui/SideBar/More",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/More/Caption}}",
"text": "<div class=\"tc-more-sidebar\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/MoreSideBar]!has[draft.of]]\" \"$:/core/ui/MoreSideBar/Tags\" \"$:/state/tab/moresidebar\" \"tc-vertical\">>\n</div>\n"
},
"$:/core/ui/SideBar/Open": {
"title": "$:/core/ui/SideBar/Open",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Open/Caption}}",
"text": "\\define lingo-base() $:/language/CloseAll/\n<$list filter=\"[list[$:/StoryList]]\" history=\"$:/HistoryList\" storyview=\"pop\">\n\n<$button message=\"tm-close-tiddler\" tooltip={{$:/language/Buttons/Close/Hint}} aria-label={{$:/language/Buttons/Close/Caption}} class=\"tc-btn-invisible tc-btn-mini\">×</$button> <$link to={{!!title}}><$view field=\"title\"/></$link>\n\n</$list>\n\n<$button message=\"tm-close-all-tiddlers\" class=\"tc-btn-invisible tc-btn-mini\"><<lingo Button>></$button>\n"
},
"$:/core/ui/SideBar/Recent": {
"title": "$:/core/ui/SideBar/Recent",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Recent/Caption}}",
"text": "<$macrocall $name=\"timeline\" format={{$:/language/RecentChanges/DateFormat}}/>\n"
},
"$:/core/ui/SideBar/Tools": {
"title": "$:/core/ui/SideBar/Tools",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Tools/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n\n<<lingo Basics/Version/Prompt>> <<version>>\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n\n<div style=\"position:relative;\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>>/> <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</div>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/SideBarLists": {
"title": "$:/core/ui/SideBarLists",
"text": "<div class=\"tc-sidebar-lists\">\n\n<$set name=\"searchTiddler\" value=\"$:/temp/search\">\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/search\" type=\"search\" tag=\"input\" focus={{$:/config/Search/AutoFocus}} focusPopup=<<qualify \"$:/state/popup/search-dropdown\">> class=\"tc-popup-handle\"/>\n<$reveal state=\"$:/temp/search\" type=\"nomatch\" text=\"\">\n<$button tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" text={{$:/temp/search}}/>\n<$action-setfield $tiddler=\"$:/temp/search\" text=\"\"/>\n<$action-navigate $to=\"$:/AdvancedSearch\"/>\n{{$:/core/images/advanced-search-button}}\n</$button>\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/search\" text=\"\" />\n{{$:/core/images/close-button}}\n</$button>\n<$button popup=<<qualify \"$:/state/popup/search-dropdown\">> class=\"tc-btn-invisible\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[!is[system]search{$(searchTiddler)$}]\"/>\"\"\">\n{{$:/core/images/down-arrow}} {{$:/language/Search/Matches}}\n</$set>\n</$button>\n</$reveal>\n<$reveal state=\"$:/temp/search\" type=\"match\" text=\"\">\n<$button to=\"$:/AdvancedSearch\" tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"tc-btn-invisible\">\n{{$:/core/images/advanced-search-button}}\n</$button>\n</$reveal>\n</div>\n\n<$reveal tag=\"div\" class=\"tc-block-dropdown-wrapper\" state=\"$:/temp/search\" type=\"nomatch\" text=\"\">\n\n<$reveal tag=\"div\" class=\"tc-block-dropdown tc-search-drop-down tc-popup-handle\" state=<<qualify \"$:/state/popup/search-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n\n{{$:/core/ui/SearchResults}}\n\n</$reveal>\n\n</$reveal>\n\n</$set>\n\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SideBar]!has[draft.of]]\" default={{$:/config/DefaultSidebarTab}} state=\"$:/state/tab/sidebar\" />\n\n</div>\n"
},
"$:/TagManager": {
"title": "$:/TagManager",
"icon": "$:/core/images/tag-button",
"color": "#bbb",
"text": "\\define lingo-base() $:/language/TagManager/\n\\define iconEditorTab(type)\n<$list filter=\"[all[shadows+tiddlers]is[image]] [all[shadows+tiddlers]tag[$:/tags/Image]] -[type[application/pdf]] +[sort[title]] +[$type$is[system]]\">\n<$link to={{!!title}}>\n<$transclude/> <$view field=\"title\"/>\n</$link>\n</$list>\n\\end\n\\define iconEditor(title)\n<div class=\"tc-drop-down-wrapper\">\n<$button popup=<<qualify \"$:/state/popup/icon/$title$\">> class=\"tc-btn-invisible tc-btn-dropdown\">{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/icon/$title$\">> type=\"popup\" position=\"belowleft\" text=\"\" default=\"\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$title$!!icon\">\n<<iconEditorTab type:\"!\">>\n<hr/>\n<<iconEditorTab type:\"\">>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>\n\\end\n\\define qualifyTitle(title)\n$title$$(currentTiddler)$\n\\end\n\\define toggleButton(state)\n<$reveal state=\"$state$\" type=\"match\" text=\"closed\" default=\"closed\">\n<$button set=\"$state$\" setTo=\"open\" class=\"tc-btn-invisible tc-btn-dropdown\" selectedClass=\"tc-selected\">\n{{$:/core/images/info-button}}\n</$button>\n</$reveal>\n<$reveal state=\"$state$\" type=\"match\" text=\"open\" default=\"closed\">\n<$button set=\"$state$\" setTo=\"closed\" class=\"tc-btn-invisible tc-btn-dropdown\" selectedClass=\"tc-selected\">\n{{$:/core/images/info-button}}\n</$button>\n</$reveal>\n\\end\n<table class=\"tc-tag-manager-table\">\n<tbody>\n<tr>\n<th><<lingo Colour/Heading>></th>\n<th class=\"tc-tag-manager-tag\"><<lingo Tag/Heading>></th>\n<th><<lingo Count/Heading>></th>\n<th><<lingo Icon/Heading>></th>\n<th><<lingo Info/Heading>></th>\n</tr>\n<$list filter=\"[tags[]!is[system]sort[title]]\">\n<tr>\n<td><$edit-text field=\"color\" tag=\"input\" type=\"color\"/></td>\n<td><$transclude tiddler=\"$:/core/ui/TagTemplate\"/></td>\n<td><$count filter=\"[all[current]tagging[]]\"/></td>\n<td>\n<$macrocall $name=\"iconEditor\" title={{!!title}}/>\n</td>\n<td>\n<$macrocall $name=\"toggleButton\" state=<<qualifyTitle \"$:/state/tag-manager/\">> /> \n</td>\n</tr>\n<tr>\n<td></td>\n<td colspan=\"4\">\n<$reveal state=<<qualifyTitle \"$:/state/tag-manager/\">> type=\"match\" text=\"open\" default=\"\">\n<table>\n<tbody>\n<tr><td><<lingo Colour/Heading>></td><td><$edit-text field=\"color\" tag=\"input\" type=\"text\" size=\"9\"/></td></tr>\n<tr><td><<lingo Icon/Heading>></td><td><$edit-text field=\"icon\" tag=\"input\" size=\"45\"/></td></tr>\n</tbody>\n</table>\n</$reveal>\n</td>\n</tr>\n</$list>\n<tr>\n<td></td>\n<td>\n{{$:/core/ui/UntaggedTemplate}}\n</td>\n<td>\n<small class=\"tc-menu-list-count\"><$count filter=\"[untagged[]!is[system]] -[tags[]]\"/></small>\n</td>\n<td></td>\n<td></td>\n</tr>\n</tbody>\n</table>\n"
},
"$:/core/ui/TagTemplate": {
"title": "$:/core/ui/TagTemplate",
"text": "\\define tag-styles()\nbackground-color:$(backgroundColor)$;\nfill:$(foregroundColor)$;\ncolor:$(foregroundColor)$;\n\\end\n\n\\define tag-body-inner(colour,fallbackTarget,colourA,colourB)\n<$vars foregroundColor=<<contrastcolour target:\"\"\"$colour$\"\"\" fallbackTarget:\"\"\"$fallbackTarget$\"\"\" colourA:\"\"\"$colourA$\"\"\" colourB:\"\"\"$colourB$\"\"\">> backgroundColor=\"\"\"$colour$\"\"\">\n<$button popup=<<qualify \"$:/state/popup/tag\">> class=\"tc-btn-invisible tc-tag-label\" style=<<tag-styles>>>\n<$transclude tiddler={{!!icon}}/> <$view field=\"title\" format=\"text\" />\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/tag\">> type=\"popup\" position=\"below\" animate=\"yes\" class=\"tc-drop-down\"><$transclude tiddler=\"$:/core/ui/ListItemTemplate\"/>\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TagDropdown]!has[draft.of]]\" variable=\"listItem\"> \n<$transclude tiddler=<<listItem>>/> \n</$list> \n<hr>\n<$list filter=\"[all[current]tagging[]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</$reveal>\n</$vars>\n\\end\n\n\\define tag-body(colour,palette)\n<span class=\"tc-tag-list-item\">\n<$macrocall $name=\"tag-body-inner\" colour=\"\"\"$colour$\"\"\" fallbackTarget={{$palette$##tag-background}} colourA={{$palette$##foreground}} colourB={{$palette$##background}}/>\n</span>\n\\end\n\n<$macrocall $name=\"tag-body\" colour={{!!color}} palette={{$:/palette}}/>\n"
},
"$:/core/ui/TiddlerFields": {
"title": "$:/core/ui/TiddlerFields",
"text": "<table class=\"tc-view-field-table\">\n<tbody>\n<$list filter=\"[all[current]fields[]sort[title]] -text\" template=\"$:/core/ui/TiddlerFieldTemplate\" variable=\"listItem\"/>\n</tbody>\n</table>\n"
},
"$:/core/ui/TiddlerFieldTemplate": {
"title": "$:/core/ui/TiddlerFieldTemplate",
"text": "<tr class=\"tc-view-field\">\n<td class=\"tc-view-field-name\">\n<$text text=<<listItem>>/>\n</td>\n<td class=\"tc-view-field-value\">\n<$view field=<<listItem>>/>\n</td>\n</tr>"
},
"$:/core/ui/TiddlerInfo/Advanced/PluginInfo": {
"title": "$:/core/ui/TiddlerInfo/Advanced/PluginInfo",
"tags": "$:/tags/TiddlerInfo/Advanced",
"text": "\\define lingo-base() $:/language/TiddlerInfo/Advanced/PluginInfo/\n<$list filter=\"[all[current]has[plugin-type]]\">\n\n! <<lingo Heading>>\n\n<<lingo Hint>>\n<ul>\n<$list filter=\"[all[current]plugintiddlers[]sort[title]]\" emptyMessage=<<lingo Empty/Hint>>>\n<li>\n<$link to={{!!title}}>\n<$view field=\"title\"/>\n</$link>\n</li>\n</$list>\n</ul>\n\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Advanced/ShadowInfo": {
"title": "$:/core/ui/TiddlerInfo/Advanced/ShadowInfo",
"tags": "$:/tags/TiddlerInfo/Advanced",
"text": "\\define lingo-base() $:/language/TiddlerInfo/Advanced/ShadowInfo/\n<$set name=\"infoTiddler\" value=<<currentTiddler>>>\n\n''<<lingo Heading>>''\n\n<$list filter=\"[all[current]!is[shadow]]\">\n\n<<lingo NotShadow/Hint>>\n\n</$list>\n\n<$list filter=\"[all[current]is[shadow]]\">\n\n<<lingo Shadow/Hint>>\n\n<$list filter=\"[all[current]shadowsource[]]\">\n\n<$set name=\"pluginTiddler\" value=<<currentTiddler>>>\n<<lingo Shadow/Source>>\n</$set>\n\n</$list>\n\n<$list filter=\"[all[current]is[shadow]is[tiddler]]\">\n\n<<lingo OverriddenShadow/Hint>>\n\n</$list>\n\n\n</$list>\n</$set>\n"
},
"$:/core/ui/TiddlerInfo/Advanced": {
"title": "$:/core/ui/TiddlerInfo/Advanced",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Advanced/Caption}}",
"text": "<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TiddlerInfo/Advanced]!has[draft.of]]\" variable=\"listItem\">\n<$transclude tiddler=<<listItem>>/>\n\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Fields": {
"title": "$:/core/ui/TiddlerInfo/Fields",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Fields/Caption}}",
"text": "<$transclude tiddler=\"$:/core/ui/TiddlerFields\"/>\n"
},
"$:/core/ui/TiddlerInfo/List": {
"title": "$:/core/ui/TiddlerInfo/List",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/List/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[list{!!title}]\" emptyMessage=<<lingo List/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/Listed": {
"title": "$:/core/ui/TiddlerInfo/Listed",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Listed/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]listed[]!is[system]]\" emptyMessage=<<lingo Listed/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/References": {
"title": "$:/core/ui/TiddlerInfo/References",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/References/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]backlinks[]sort[title]]\" emptyMessage=<<lingo References/Empty>> template=\"$:/core/ui/ListItemTemplate\">\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Tagging": {
"title": "$:/core/ui/TiddlerInfo/Tagging",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Tagging/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]tagging[]]\" emptyMessage=<<lingo Tagging/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/Tools": {
"title": "$:/core/ui/TiddlerInfo/Tools",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Tools/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>>/> <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/TiddlerInfo": {
"title": "$:/core/ui/TiddlerInfo",
"text": "<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/TiddlerInfo]!has[draft.of]]\" default={{$:/config/TiddlerInfo/Default}}/>"
},
"$:/core/ui/TopBar/menu": {
"title": "$:/core/ui/TopBar/menu",
"tags": "$:/tags/TopRightBar",
"text": "<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"no\">\n<$button set=\"$:/state/sidebar\" setTo=\"no\" tooltip={{$:/language/Buttons/HideSideBar/Hint}} aria-label={{$:/language/Buttons/HideSideBar/Caption}} class=\"tc-btn-invisible\">{{$:/core/images/chevron-right}}</$button>\n</$reveal>\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"no\">\n<$button set=\"$:/state/sidebar\" setTo=\"yes\" tooltip={{$:/language/Buttons/ShowSideBar/Hint}} aria-label={{$:/language/Buttons/ShowSideBar/Caption}} class=\"tc-btn-invisible\">{{$:/core/images/chevron-left}}</$button>\n</$reveal>\n"
},
"$:/core/ui/UntaggedTemplate": {
"title": "$:/core/ui/UntaggedTemplate",
"text": "\\define lingo-base() $:/language/SideBar/\n<$button popup=<<qualify \"$:/state/popup/tag\">> class=\"tc-btn-invisible tc-untagged-label tc-tag-label\">\n<<lingo Tags/Untagged/Caption>>\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/tag\">> type=\"popup\" position=\"below\">\n<div class=\"tc-drop-down\">\n<$list filter=\"[untagged[]!is[system]] -[tags[]] +[sort[title]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/body": {
"title": "$:/core/ui/ViewTemplate/body",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal tag=\"div\" class=\"tc-tiddler-body\" type=\"nomatch\" state=<<folded-state>> text=\"hide\" retain=\"yes\" animate=\"yes\">\n\n<$list filter=\"[all[current]!has[plugin-type]!field:hide-body[yes]]\">\n\n<$transclude>\n\n<$transclude tiddler=\"$:/language/MissingTiddler/Hint\"/>\n\n</$transclude>\n\n</$list>\n\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/classic": {
"title": "$:/core/ui/ViewTemplate/classic",
"tags": "$:/tags/ViewTemplate $:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/ClassicWarning/\n<$list filter=\"[all[current]type[text/x-tiddlywiki]]\">\n<div class=\"tc-message-box\">\n\n<<lingo Hint>>\n\n<$button set=\"!!type\" setTo=\"text/vnd.tiddlywiki\"><<lingo Upgrade/Caption>></$button>\n\n</div>\n</$list>\n"
},
"$:/core/ui/ViewTemplate/import": {
"title": "$:/core/ui/ViewTemplate/import",
"tags": "$:/tags/ViewTemplate",
"text": "\\define lingo-base() $:/language/Import/\n\n<$list filter=\"[all[current]field:plugin-type[import]]\">\n\n<div class=\"tc-import\">\n\n<<lingo Listing/Hint>>\n\n<$button message=\"tm-delete-tiddler\" param=<<currentTiddler>>><<lingo Listing/Cancel/Caption>></$button>\n<$button message=\"tm-perform-import\" param=<<currentTiddler>>><<lingo Listing/Import/Caption>></$button>\n\n{{||$:/core/ui/ImportListing}}\n\n<$button message=\"tm-delete-tiddler\" param=<<currentTiddler>>><<lingo Listing/Cancel/Caption>></$button>\n<$button message=\"tm-perform-import\" param=<<currentTiddler>>><<lingo Listing/Import/Caption>></$button>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/ViewTemplate/plugin": {
"title": "$:/core/ui/ViewTemplate/plugin",
"tags": "$:/tags/ViewTemplate",
"text": "<$list filter=\"[all[current]has[plugin-type]] -[all[current]field:plugin-type[import]]\">\n\n{{||$:/core/ui/TiddlerInfo/Advanced/PluginInfo}}\n\n</$list>\n"
},
"$:/core/ui/ViewTemplate/subtitle": {
"title": "$:/core/ui/ViewTemplate/subtitle",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" tag=\"div\" retain=\"yes\" animate=\"yes\">\n<div class=\"tc-subtitle\">\n<$link to={{!!modifier}}>\n<$view field=\"modifier\"/>\n</$link> <$view field=\"modified\" format=\"date\" template={{$:/language/Tiddler/DateFormat}}/>\n</div>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/tags": {
"title": "$:/core/ui/ViewTemplate/tags",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" tag=\"div\" retain=\"yes\" animate=\"yes\">\n<div class=\"tc-tags-wrapper\"><$list filter=\"[all[current]tags[]sort[title]]\" template=\"$:/core/ui/TagTemplate\" storyview=\"pop\"/></div>\n</$reveal>"
},
"$:/core/ui/ViewTemplate/title": {
"title": "$:/core/ui/ViewTemplate/title",
"tags": "$:/tags/ViewTemplate",
"text": "\\define title-styles()\nfill:$(foregroundColor)$;\n\\end\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-tiddler-title\">\n<div class=\"tc-titlebar\">\n<span class=\"tc-tiddler-controls\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\"><$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\"><$transclude tiddler=<<listItem>>/></$reveal></$list>\n</span>\n<$set name=\"tv-wikilinks\" value={{$:/config/Tiddlers/TitleLinks}}>\n<$link>\n<$set name=\"foregroundColor\" value={{!!color}}>\n<span class=\"tc-tiddler-title-icon\" style=<<title-styles>>>\n<$transclude tiddler={{!!icon}}/>\n</span>\n</$set>\n<$list filter=\"[all[current]removeprefix[$:/]]\">\n<h2 class=\"tc-title\" title={{$:/language/SystemTiddler/Tooltip}}>\n<span class=\"tc-system-title-prefix\">$:/</span><$text text=<<currentTiddler>>/>\n</h2>\n</$list>\n<$list filter=\"[all[current]!prefix[$:/]]\">\n<h2 class=\"tc-title\">\n<$view field=\"title\"/>\n</h2>\n</$list>\n</$link>\n</$set>\n</div>\n\n<$reveal type=\"nomatch\" text=\"\" default=\"\" state=<<tiddlerInfoState>> class=\"tc-tiddler-info tc-popup-handle\" animate=\"yes\" retain=\"yes\">\n\n<$transclude tiddler=\"$:/core/ui/TiddlerInfo\"/>\n\n</$reveal>\n</div>"
},
"$:/core/ui/ViewTemplate/unfold": {
"title": "$:/core/ui/ViewTemplate/unfold",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal tag=\"div\" type=\"nomatch\" state=\"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar\" text=\"hide\">\n<$reveal tag=\"div\" type=\"nomatch\" state=<<folded-state>> text=\"hide\" default=\"show\" retain=\"yes\" animate=\"yes\">\n<$button tooltip={{$:/language/Buttons/Fold/Hint}} aria-label={{$:/language/Buttons/Fold/Caption}} class=\"tc-fold-banner\">\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n{{$:/core/images/chevron-up}}\n</$button>\n</$reveal>\n<$reveal tag=\"div\" type=\"nomatch\" state=<<folded-state>> text=\"show\" default=\"show\" retain=\"yes\" animate=\"yes\">\n<$button tooltip={{$:/language/Buttons/Unfold/Hint}} aria-label={{$:/language/Buttons/Unfold/Caption}} class=\"tc-unfold-banner\">\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n{{$:/core/images/chevron-down}}\n</$button>\n</$reveal>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate": {
"title": "$:/core/ui/ViewTemplate",
"text": "\\define frame-classes()\ntc-tiddler-frame tc-tiddler-view-frame $(missingTiddlerClass)$ $(shadowTiddlerClass)$ $(systemTiddlerClass)$ $(tiddlerTagClasses)$\n\\end\n\\define folded-state()\n$:/state/folded/$(currentTiddler)$\n\\end\n<$set name=\"storyTiddler\" value=<<currentTiddler>>><$set name=\"tiddlerInfoState\" value=<<qualify \"$:/state/popup/tiddler-info\">>><$tiddler tiddler=<<currentTiddler>>><div class=<<frame-classes>>><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewTemplate]!has[draft.of]]\" variable=\"listItem\"><$transclude tiddler=<<listItem>>/></$list>\n</div>\n</$tiddler></$set></$set>\n"
},
"$:/core/ui/Buttons/clone": {
"title": "$:/core/ui/Buttons/clone",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/clone-button}} {{$:/language/Buttons/Clone/Caption}}",
"description": "{{$:/language/Buttons/Clone/Hint}}",
"text": "<$button message=\"tm-new-tiddler\" param=<<currentTiddler>> tooltip={{$:/language/Buttons/Clone/Hint}} aria-label={{$:/language/Buttons/Clone/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/clone-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Clone/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/close-others": {
"title": "$:/core/ui/Buttons/close-others",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/close-others-button}} {{$:/language/Buttons/CloseOthers/Caption}}",
"description": "{{$:/language/Buttons/CloseOthers/Hint}}",
"text": "<$button message=\"tm-close-other-tiddlers\" param=<<currentTiddler>> tooltip={{$:/language/Buttons/CloseOthers/Hint}} aria-label={{$:/language/Buttons/CloseOthers/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-others-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/CloseOthers/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/close": {
"title": "$:/core/ui/Buttons/close",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/close-button}} {{$:/language/Buttons/Close/Caption}}",
"description": "{{$:/language/Buttons/Close/Hint}}",
"text": "<$button message=\"tm-close-tiddler\" tooltip={{$:/language/Buttons/Close/Hint}} aria-label={{$:/language/Buttons/Close/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Close/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/edit": {
"title": "$:/core/ui/Buttons/edit",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/edit-button}} {{$:/language/Buttons/Edit/Caption}}",
"description": "{{$:/language/Buttons/Edit/Hint}}",
"text": "<$button message=\"tm-edit-tiddler\" tooltip={{$:/language/Buttons/Edit/Hint}} aria-label={{$:/language/Buttons/Edit/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/edit-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Edit/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/export-tiddler": {
"title": "$:/core/ui/Buttons/export-tiddler",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/export-button}} {{$:/language/Buttons/ExportTiddler/Caption}}",
"description": "{{$:/language/Buttons/ExportTiddler/Hint}}",
"text": "\\define makeExportFilter()\n[[$(currentTiddler)$]]\n\\end\n<$macrocall $name=\"exportButton\" exportFilter=<<makeExportFilter>> lingoBase=\"$:/language/Buttons/ExportTiddler/\" baseFilename=<<currentTiddler>>/>"
},
"$:/core/ui/Buttons/fold-bar": {
"title": "$:/core/ui/Buttons/fold-bar",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/language/Buttons/Fold/FoldBar/Caption}}",
"description": "{{$:/language/Buttons/Fold/FoldBar/Hint}}",
"text": "<!-- This dummy toolbar button is here to allow visibility of the fold-bar to be controlled as if it were a toolbar button -->"
},
"$:/core/ui/Buttons/fold-others": {
"title": "$:/core/ui/Buttons/fold-others",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/fold-others-button}} {{$:/language/Buttons/FoldOthers/Caption}}",
"description": "{{$:/language/Buttons/FoldOthers/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/FoldOthers/Hint}} aria-label={{$:/language/Buttons/FoldOthers/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-other-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-others-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FoldOthers/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/fold": {
"title": "$:/core/ui/Buttons/fold",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/fold-button}} {{$:/language/Buttons/Fold/Caption}}",
"description": "{{$:/language/Buttons/Fold/Hint}}",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" default=\"show\"><$button tooltip={{$:/language/Buttons/Fold/Hint}} aria-label={{$:/language/Buttons/Fold/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\">\n<$text text={{$:/language/Buttons/Fold/Caption}}/>\n</span>\n</$list>\n</$button></$reveal><$reveal type=\"match\" state=<<folded-state>> text=\"hide\" default=\"show\"><$button tooltip={{$:/language/Buttons/Unfold/Hint}} aria-label={{$:/language/Buttons/Unfold/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/unfold-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\">\n<$text text={{$:/language/Buttons/Unfold/Caption}}/>\n</span>\n</$list>\n</$button></$reveal>"
},
"$:/core/ui/Buttons/info": {
"title": "$:/core/ui/Buttons/info",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/info-button}} {{$:/language/Buttons/Info/Caption}}",
"description": "{{$:/language/Buttons/Info/Hint}}",
"text": "<$button popup=<<tiddlerInfoState>> tooltip={{$:/language/Buttons/Info/Hint}} aria-label={{$:/language/Buttons/Info/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/info-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Info/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/more-tiddler-actions": {
"title": "$:/core/ui/Buttons/more-tiddler-actions",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/down-arrow}} {{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"text": "\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<$button popup=<<qualify \"$:/state/popup/more\">> tooltip={{$:/language/Buttons/More/Hint}} aria-label={{$:/language/Buttons/More/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/down-arrow}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/More/Caption}}/></span>\n</$list>\n</$button><$reveal state=<<qualify \"$:/state/popup/more\">> type=\"popup\" position=\"below\" animate=\"yes\">\n\n<div class=\"tc-drop-down\">\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"tc-btn-invisible\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]] -[[$:/core/ui/Buttons/more-tiddler-actions]]\" variable=\"listItem\">\n\n<$reveal type=\"match\" state=<<config-title>> text=\"hide\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</div>\n\n</$reveal>"
},
"$:/core/ui/Buttons/new-here": {
"title": "$:/core/ui/Buttons/new-here",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/new-here-button}} {{$:/language/Buttons/NewHere/Caption}}",
"description": "{{$:/language/Buttons/NewHere/Hint}}",
"text": "\\define newHereButtonTags()\n[[$(currentTiddler)$]]\n\\end\n\\define newHereButton()\n<$button tooltip={{$:/language/Buttons/NewHere/Hint}} aria-label={{$:/language/Buttons/NewHere/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" tags=<<newHereButtonTags>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-here-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewHere/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<<newHereButton>>"
},
"$:/core/ui/Buttons/new-journal-here": {
"title": "$:/core/ui/Buttons/new-journal-here",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/new-journal-button}} {{$:/language/Buttons/NewJournalHere/Caption}}",
"description": "{{$:/language/Buttons/NewJournalHere/Hint}}",
"text": "\\define journalButtonTags()\n[[$(currentTiddlerTag)$]] $(journalTags)$\n\\end\n\\define journalButton()\n<$button tooltip={{$:/language/Buttons/NewJournalHere/Hint}} aria-label={{$:/language/Buttons/NewJournalHere/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" title=<<now \"$(journalTitleTemplate)$\">> tags=<<journalButtonTags>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-journal-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewJournalHere/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<$set name=\"journalTitleTemplate\" value={{$:/config/NewJournal/Title}}>\n<$set name=\"journalTags\" value={{$:/config/NewJournal/Tags}}>\n<$set name=\"currentTiddlerTag\" value=<<currentTiddler>>>\n<<journalButton>>\n</$set></$set></$set>"
},
"$:/core/ui/Buttons/open-window": {
"title": "$:/core/ui/Buttons/open-window",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/open-window}} {{$:/language/Buttons/OpenWindow/Caption}}",
"description": "{{$:/language/Buttons/OpenWindow/Hint}}",
"text": "<$button message=\"tm-open-window\" tooltip={{$:/language/Buttons/OpenWindow/Hint}} aria-label={{$:/language/Buttons/OpenWindow/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/open-window}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/OpenWindow/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/permalink": {
"title": "$:/core/ui/Buttons/permalink",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/permalink-button}} {{$:/language/Buttons/Permalink/Caption}}",
"description": "{{$:/language/Buttons/Permalink/Hint}}",
"text": "<$button message=\"tm-permalink\" tooltip={{$:/language/Buttons/Permalink/Hint}} aria-label={{$:/language/Buttons/Permalink/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/permalink-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Permalink/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/permaview": {
"title": "$:/core/ui/Buttons/permaview",
"tags": "$:/tags/ViewToolbar $:/tags/PageControls",
"caption": "{{$:/core/images/permaview-button}} {{$:/language/Buttons/Permaview/Caption}}",
"description": "{{$:/language/Buttons/Permaview/Hint}}",
"text": "<$button message=\"tm-permaview\" tooltip={{$:/language/Buttons/Permaview/Hint}} aria-label={{$:/language/Buttons/Permaview/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/permaview-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Permaview/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/temp/advancedsearch": {
"title": "$:/temp/advancedsearch",
"text": ""
},
"$:/snippets/allfields": {
"title": "$:/snippets/allfields",
"text": "\\define renderfield(title)\n<tr class=\"tc-view-field\"><td class=\"tc-view-field-name\">''$title$'':</td><td class=\"tc-view-field-value\">//{{$:/language/Docs/Fields/$title$}}//</td></tr>\n\\end\n<table class=\"tc-view-field-table\"><tbody><$list filter=\"[fields[]sort[title]]\" variable=\"listItem\"><$macrocall $name=\"renderfield\" title=<<listItem>>/></$list>\n</tbody></table>\n"
},
"$:/config/AnimationDuration": {
"title": "$:/config/AnimationDuration",
"text": "400"
},
"$:/config/AutoSave": {
"title": "$:/config/AutoSave",
"text": "yes"
},
"$:/config/BitmapEditor/Colour": {
"title": "$:/config/BitmapEditor/Colour",
"text": "#444"
},
"$:/config/BitmapEditor/ImageSizes": {
"title": "$:/config/BitmapEditor/ImageSizes",
"text": "[[62px 100px]] [[100px 62px]] [[124px 200px]] [[200px 124px]] [[248px 400px]] [[371px 600px]] [[400px 248px]] [[556px 900px]] [[600px 371px]] [[742px 1200px]] [[900px 556px]] [[1200px 742px]]"
},
"$:/config/BitmapEditor/LineWidth": {
"title": "$:/config/BitmapEditor/LineWidth",
"text": "3px"
},
"$:/config/BitmapEditor/LineWidths": {
"title": "$:/config/BitmapEditor/LineWidths",
"text": "0.25px 0.5px 1px 2px 3px 4px 6px 8px 10px 16px 20px 28px 40px 56px 80px"
},
"$:/config/BitmapEditor/Opacities": {
"title": "$:/config/BitmapEditor/Opacities",
"text": "0.01 0.025 0.05 0.075 0.1 0.15 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0"
},
"$:/config/BitmapEditor/Opacity": {
"title": "$:/config/BitmapEditor/Opacity",
"text": "1.0"
},
"$:/config/DefaultSidebarTab": {
"title": "$:/config/DefaultSidebarTab",
"text": "$:/core/ui/SideBar/Open"
},
"$:/config/Drafts/TypingTimeout": {
"title": "$:/config/Drafts/TypingTimeout",
"text": "400"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-4": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-4",
"text": "hide"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-5": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-5",
"text": "hide"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-6": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-6",
"text": "hide"
},
"$:/config/EditorTypeMappings/image/gif": {
"title": "$:/config/EditorTypeMappings/image/gif",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/jpeg": {
"title": "$:/config/EditorTypeMappings/image/jpeg",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/jpg": {
"title": "$:/config/EditorTypeMappings/image/jpg",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/png": {
"title": "$:/config/EditorTypeMappings/image/png",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/x-icon": {
"title": "$:/config/EditorTypeMappings/image/x-icon",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/text/vnd.tiddlywiki": {
"title": "$:/config/EditorTypeMappings/text/vnd.tiddlywiki",
"text": "text"
},
"$:/config/EditTemplateFields/Visibility/title": {
"title": "$:/config/EditTemplateFields/Visibility/title",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/tags": {
"title": "$:/config/EditTemplateFields/Visibility/tags",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/text": {
"title": "$:/config/EditTemplateFields/Visibility/text",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/creator": {
"title": "$:/config/EditTemplateFields/Visibility/creator",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/created": {
"title": "$:/config/EditTemplateFields/Visibility/created",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/modified": {
"title": "$:/config/EditTemplateFields/Visibility/modified",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/modifier": {
"title": "$:/config/EditTemplateFields/Visibility/modifier",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/type": {
"title": "$:/config/EditTemplateFields/Visibility/type",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/draft.title": {
"title": "$:/config/EditTemplateFields/Visibility/draft.title",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/draft.of": {
"title": "$:/config/EditTemplateFields/Visibility/draft.of",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/revision": {
"title": "$:/config/EditTemplateFields/Visibility/revision",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/bag": {
"title": "$:/config/EditTemplateFields/Visibility/bag",
"text": "hide"
},
"$:/config/MissingLinks": {
"title": "$:/config/MissingLinks",
"text": "yes"
},
"$:/config/Navigation/UpdateAddressBar": {
"title": "$:/config/Navigation/UpdateAddressBar",
"text": "no"
},
"$:/config/Navigation/UpdateHistory": {
"title": "$:/config/Navigation/UpdateHistory",
"text": "no"
},
"$:/config/OfficialPluginLibrary": {
"title": "$:/config/OfficialPluginLibrary",
"tags": "$:/tags/PluginLibrary",
"url": "http://tiddlywiki.com/library/v5.1.13/index.html",
"caption": "{{$:/language/OfficialPluginLibrary}}",
"text": "{{$:/language/OfficialPluginLibrary/Hint}}\n"
},
"$:/config/Navigation/openLinkFromInsideRiver": {
"title": "$:/config/Navigation/openLinkFromInsideRiver",
"text": "below"
},
"$:/config/Navigation/openLinkFromOutsideRiver": {
"title": "$:/config/Navigation/openLinkFromOutsideRiver",
"text": "top"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/advanced-search": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/advanced-search",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/close-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/close-all",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/encryption": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/encryption",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/export-page": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/export-page",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/fold-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/fold-all",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/full-screen": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/full-screen",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/refresh": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/refresh",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/import": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/import",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/language": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/language",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/tag-manager": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/tag-manager",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/more-page-actions": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/more-page-actions",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-journal": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-journal",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-image": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-image",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/palette": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/palette",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/permaview": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/permaview",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/storyview": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/storyview",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/theme": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/theme",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/unfold-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/unfold-all",
"text": "hide"
},
"$:/config/Performance/Instrumentation": {
"title": "$:/config/Performance/Instrumentation",
"text": "no"
},
"$:/config/SaverFilter": {
"title": "$:/config/SaverFilter",
"text": "[all[]] -[[$:/HistoryList]] -[[$:/StoryList]] -[[$:/Import]] -[[$:/isEncrypted]] -[[$:/UploadName]] -[prefix[$:/state/]] -[prefix[$:/temp/]]"
},
"$:/config/SaveWikiButton/Template": {
"title": "$:/config/SaveWikiButton/Template",
"text": "$:/core/save/all"
},
"$:/config/Search/AutoFocus": {
"title": "$:/config/Search/AutoFocus",
"text": "true"
},
"$:/config/SearchResults/Default": {
"title": "$:/config/SearchResults/Default",
"text": "$:/core/ui/DefaultSearchResultList"
},
"$:/config/ShortcutInfo/bold": {
"title": "$:/config/ShortcutInfo/bold",
"text": "{{$:/language/Buttons/Bold/Hint}}"
},
"$:/config/ShortcutInfo/cancel-edit-tiddler": {
"title": "$:/config/ShortcutInfo/cancel-edit-tiddler",
"text": "{{$:/language/Buttons/Cancel/Hint}}"
},
"$:/config/ShortcutInfo/excise": {
"title": "$:/config/ShortcutInfo/excise",
"text": "{{$:/language/Buttons/Excise/Hint}}"
},
"$:/config/ShortcutInfo/heading-1": {
"title": "$:/config/ShortcutInfo/heading-1",
"text": "{{$:/language/Buttons/Heading1/Hint}}"
},
"$:/config/ShortcutInfo/heading-2": {
"title": "$:/config/ShortcutInfo/heading-2",
"text": "{{$:/language/Buttons/Heading2/Hint}}"
},
"$:/config/ShortcutInfo/heading-3": {
"title": "$:/config/ShortcutInfo/heading-3",
"text": "{{$:/language/Buttons/Heading3/Hint}}"
},
"$:/config/ShortcutInfo/heading-4": {
"title": "$:/config/ShortcutInfo/heading-4",
"text": "{{$:/language/Buttons/Heading4/Hint}}"
},
"$:/config/ShortcutInfo/heading-5": {
"title": "$:/config/ShortcutInfo/heading-5",
"text": "{{$:/language/Buttons/Heading5/Hint}}"
},
"$:/config/ShortcutInfo/heading-6": {
"title": "$:/config/ShortcutInfo/heading-6",
"text": "{{$:/language/Buttons/Heading6/Hint}}"
},
"$:/config/ShortcutInfo/italic": {
"title": "$:/config/ShortcutInfo/italic",
"text": "{{$:/language/Buttons/Italic/Hint}}"
},
"$:/config/ShortcutInfo/link": {
"title": "$:/config/ShortcutInfo/link",
"text": "{{$:/language/Buttons/Link/Hint}}"
},
"$:/config/ShortcutInfo/list-bullet": {
"title": "$:/config/ShortcutInfo/list-bullet",
"text": "{{$:/language/Buttons/ListBullet/Hint}}"
},
"$:/config/ShortcutInfo/list-number": {
"title": "$:/config/ShortcutInfo/list-number",
"text": "{{$:/language/Buttons/ListNumber/Hint}}"
},
"$:/config/ShortcutInfo/mono-block": {
"title": "$:/config/ShortcutInfo/mono-block",
"text": "{{$:/language/Buttons/MonoBlock/Hint}}"
},
"$:/config/ShortcutInfo/mono-line": {
"title": "$:/config/ShortcutInfo/mono-line",
"text": "{{$:/language/Buttons/MonoLine/Hint}}"
},
"$:/config/ShortcutInfo/picture": {
"title": "$:/config/ShortcutInfo/picture",
"text": "{{$:/language/Buttons/Picture/Hint}}"
},
"$:/config/ShortcutInfo/preview": {
"title": "$:/config/ShortcutInfo/preview",
"text": "{{$:/language/Buttons/Preview/Hint}}"
},
"$:/config/ShortcutInfo/quote": {
"title": "$:/config/ShortcutInfo/quote",
"text": "{{$:/language/Buttons/Quote/Hint}}"
},
"$:/config/ShortcutInfo/save-tiddler": {
"title": "$:/config/ShortcutInfo/save-tiddler",
"text": "{{$:/language/Buttons/Save/Hint}}"
},
"$:/config/ShortcutInfo/stamp": {
"title": "$:/config/ShortcutInfo/stamp",
"text": "{{$:/language/Buttons/Stamp/Hint}}"
},
"$:/config/ShortcutInfo/strikethrough": {
"title": "$:/config/ShortcutInfo/strikethrough",
"text": "{{$:/language/Buttons/Strikethrough/Hint}}"
},
"$:/config/ShortcutInfo/subscript": {
"title": "$:/config/ShortcutInfo/subscript",
"text": "{{$:/language/Buttons/Subscript/Hint}}"
},
"$:/config/ShortcutInfo/superscript": {
"title": "$:/config/ShortcutInfo/superscript",
"text": "{{$:/language/Buttons/Superscript/Hint}}"
},
"$:/config/ShortcutInfo/underline": {
"title": "$:/config/ShortcutInfo/underline",
"text": "{{$:/language/Buttons/Underline/Hint}}"
},
"$:/config/shortcuts-mac/bold": {
"title": "$:/config/shortcuts-mac/bold",
"text": "meta-B"
},
"$:/config/shortcuts-mac/italic": {
"title": "$:/config/shortcuts-mac/italic",
"text": "meta-I"
},
"$:/config/shortcuts-mac/underline": {
"title": "$:/config/shortcuts-mac/underline",
"text": "meta-U"
},
"$:/config/shortcuts-not-mac/bold": {
"title": "$:/config/shortcuts-not-mac/bold",
"text": "ctrl-B"
},
"$:/config/shortcuts-not-mac/italic": {
"title": "$:/config/shortcuts-not-mac/italic",
"text": "ctrl-I"
},
"$:/config/shortcuts-not-mac/underline": {
"title": "$:/config/shortcuts-not-mac/underline",
"text": "ctrl-U"
},
"$:/config/shortcuts/cancel-edit-tiddler": {
"title": "$:/config/shortcuts/cancel-edit-tiddler",
"text": "escape"
},
"$:/config/shortcuts/excise": {
"title": "$:/config/shortcuts/excise",
"text": "ctrl-E"
},
"$:/config/shortcuts/heading-1": {
"title": "$:/config/shortcuts/heading-1",
"text": "ctrl-1"
},
"$:/config/shortcuts/heading-2": {
"title": "$:/config/shortcuts/heading-2",
"text": "ctrl-2"
},
"$:/config/shortcuts/heading-3": {
"title": "$:/config/shortcuts/heading-3",
"text": "ctrl-3"
},
"$:/config/shortcuts/heading-4": {
"title": "$:/config/shortcuts/heading-4",
"text": "ctrl-4"
},
"$:/config/shortcuts/heading-5": {
"title": "$:/config/shortcuts/heading-5",
"text": "ctrl-5"
},
"$:/config/shortcuts/heading-6": {
"title": "$:/config/shortcuts/heading-6",
"text": "ctrl-6"
},
"$:/config/shortcuts/link": {
"title": "$:/config/shortcuts/link",
"text": "ctrl-L"
},
"$:/config/shortcuts/list-bullet": {
"title": "$:/config/shortcuts/list-bullet",
"text": "ctrl-shift-L"
},
"$:/config/shortcuts/list-number": {
"title": "$:/config/shortcuts/list-number",
"text": "ctrl-shift-N"
},
"$:/config/shortcuts/mono-block": {
"title": "$:/config/shortcuts/mono-block",
"text": "ctrl-shift-M"
},
"$:/config/shortcuts/mono-line": {
"title": "$:/config/shortcuts/mono-line",
"text": "ctrl-M"
},
"$:/config/shortcuts/picture": {
"title": "$:/config/shortcuts/picture",
"text": "ctrl-shift-I"
},
"$:/config/shortcuts/preview": {
"title": "$:/config/shortcuts/preview",
"text": "alt-P"
},
"$:/config/shortcuts/quote": {
"title": "$:/config/shortcuts/quote",
"text": "ctrl-Q"
},
"$:/config/shortcuts/save-tiddler": {
"title": "$:/config/shortcuts/save-tiddler",
"text": "ctrl+enter"
},
"$:/config/shortcuts/stamp": {
"title": "$:/config/shortcuts/stamp",
"text": "ctrl-S"
},
"$:/config/shortcuts/strikethrough": {
"title": "$:/config/shortcuts/strikethrough",
"text": "ctrl-T"
},
"$:/config/shortcuts/subscript": {
"title": "$:/config/shortcuts/subscript",
"text": "ctrl-shift-B"
},
"$:/config/shortcuts/superscript": {
"title": "$:/config/shortcuts/superscript",
"text": "ctrl-shift-P"
},
"$:/config/SyncFilter": {
"title": "$:/config/SyncFilter",
"text": "[is[tiddler]] -[[$:/HistoryList]] -[[$:/Import]] -[[$:/isEncrypted]] -[prefix[$:/status/]] -[prefix[$:/state/]] -[prefix[$:/temp/]]"
},
"$:/config/TextEditor/EditorHeight/Height": {
"title": "$:/config/TextEditor/EditorHeight/Height",
"text": "400px"
},
"$:/config/TextEditor/EditorHeight/Mode": {
"title": "$:/config/TextEditor/EditorHeight/Mode",
"text": "auto"
},
"$:/config/TiddlerInfo/Default": {
"title": "$:/config/TiddlerInfo/Default",
"text": "$:/core/ui/TiddlerInfo/Fields"
},
"$:/config/Tiddlers/TitleLinks": {
"title": "$:/config/Tiddlers/TitleLinks",
"text": "no"
},
"$:/config/Toolbar/ButtonClass": {
"title": "$:/config/Toolbar/ButtonClass",
"text": "tc-btn-invisible"
},
"$:/config/Toolbar/Icons": {
"title": "$:/config/Toolbar/Icons",
"text": "yes"
},
"$:/config/Toolbar/Text": {
"title": "$:/config/Toolbar/Text",
"text": "no"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/clone": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/clone",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/close-others": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/close-others",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/export-tiddler": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/export-tiddler",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/info": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/info",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions",
"text": "show"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-here": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-here",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-journal-here": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-journal-here",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/open-window": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/open-window",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permalink": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permalink",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permaview": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permaview",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/delete": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/delete",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-others": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-others",
"text": "hide"
},
"$:/config/WikiParserRules/Inline/wikilink": {
"title": "$:/config/WikiParserRules/Inline/wikilink",
"text": "enable"
},
"$:/snippets/currpalettepreview": {
"title": "$:/snippets/currpalettepreview",
"text": "\\define swatchStyle()\nbackground-color: $(swatchColour)$;\n\\end\n\\define swatch(colour)\n<$set name=\"swatchColour\" value={{##$colour$}}>\n<div class=\"tc-swatch\" style=<<swatchStyle>>/>\n</$set>\n\\end\n<div class=\"tc-swatches-horiz\">\n<<swatch foreground>>\n<<swatch background>>\n<<swatch muted-foreground>>\n<<swatch primary>>\n<<swatch page-background>>\n<<swatch tab-background>>\n<<swatch tiddler-info-background>>\n</div>\n"
},
"$:/DefaultTiddlers": {
"title": "$:/DefaultTiddlers",
"text": "GettingStarted\n"
},
"$:/snippets/download-wiki-button": {
"title": "$:/snippets/download-wiki-button",
"text": "\\define lingo-base() $:/language/ControlPanel/Tools/Download/\n<$button class=\"tc-btn-big-green\">\n<$action-sendmessage $message=\"tm-download-file\" $param=\"$:/core/save/all\" filename=\"index.html\"/>\n<<lingo Full/Caption>> {{$:/core/images/save-button}}\n</$button>"
},
"$:/language": {
"title": "$:/language",
"text": "$:/languages/en-GB"
},
"$:/snippets/languageswitcher": {
"title": "$:/snippets/languageswitcher",
"text": "{{$:/language/ControlPanel/Basics/Language/Prompt}} <$select tiddler=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[description]]\">\n<option value=<<currentTiddler>>><$view field=\"description\"><$view field=\"name\"><$view field=\"title\"/></$view></$view></option>\n</$list>\n</$select>"
},
"$:/core/macros/colour-picker": {
"title": "$:/core/macros/colour-picker",
"tags": "$:/tags/Macro",
"text": "\\define colour-picker-update-recent()\n<$action-listops\n\t$tiddler=\"$:/config/ColourPicker/Recent\"\n\t$subfilter=\"$(colour-picker-value)$ [list[$:/config/ColourPicker/Recent]remove[$(colour-picker-value)$]] +[limit[8]]\"\n/>\n\\end\n\n\\define colour-picker-inner(actions)\n<$button tag=\"a\" tooltip=\"\"\"$(colour-picker-value)$\"\"\">\n\n$(colour-picker-update-recent)$\n\n$actions$\n\n<div style=\"background-color: $(colour-picker-value)$; width: 100%; height: 100%; border-radius: 50%;\"/>\n\n</$button>\n\\end\n\n\\define colour-picker-recent-inner(actions)\n<$set name=\"colour-picker-value\" value=\"$(recentColour)$\">\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$set>\n\\end\n\n\\define colour-picker-recent(actions)\n{{$:/language/ColourPicker/Recent}} <$list filter=\"[list[$:/config/ColourPicker/Recent]]\" variable=\"recentColour\">\n<$macrocall $name=\"colour-picker-recent-inner\" actions=\"\"\"$actions$\"\"\"/></$list>\n\\end\n\n\\define colour-picker(actions)\n<div class=\"tc-colour-chooser\">\n\n<$macrocall $name=\"colour-picker-recent\" actions=\"\"\"$actions$\"\"\"/>\n\n---\n\n<$list filter=\"LightPink Pink Crimson LavenderBlush PaleVioletRed HotPink DeepPink MediumVioletRed Orchid Thistle Plum Violet Magenta Fuchsia DarkMagenta Purple MediumOrchid DarkViolet DarkOrchid Indigo BlueViolet MediumPurple MediumSlateBlue SlateBlue DarkSlateBlue Lavender GhostWhite Blue MediumBlue MidnightBlue DarkBlue Navy RoyalBlue CornflowerBlue LightSteelBlue LightSlateGrey SlateGrey DodgerBlue AliceBlue SteelBlue LightSkyBlue SkyBlue DeepSkyBlue LightBlue PowderBlue CadetBlue Azure LightCyan PaleTurquoise Cyan Aqua DarkTurquoise DarkSlateGrey DarkCyan Teal MediumTurquoise LightSeaGreen Turquoise Aquamarine MediumAquamarine MediumSpringGreen MintCream SpringGreen MediumSeaGreen SeaGreen Honeydew LightGreen PaleGreen DarkSeaGreen LimeGreen Lime ForestGreen Green DarkGreen Chartreuse LawnGreen GreenYellow DarkOliveGreen YellowGreen OliveDrab Beige LightGoldenrodYellow Ivory LightYellow Yellow Olive DarkKhaki LemonChiffon PaleGoldenrod Khaki Gold Cornsilk Goldenrod DarkGoldenrod FloralWhite OldLace Wheat Moccasin Orange PapayaWhip BlanchedAlmond NavajoWhite AntiqueWhite Tan BurlyWood Bisque DarkOrange Linen Peru PeachPuff SandyBrown Chocolate SaddleBrown Seashell Sienna LightSalmon Coral OrangeRed DarkSalmon Tomato MistyRose Salmon Snow LightCoral RosyBrown IndianRed Red Brown FireBrick DarkRed Maroon White WhiteSmoke Gainsboro LightGrey Silver DarkGrey Grey DimGrey Black\" variable=\"colour-picker-value\">\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$list>\n\n---\n\n<$edit-text tiddler=\"$:/config/ColourPicker/New\" tag=\"input\" default=\"\" placeholder=\"\"/> \n<$edit-text tiddler=\"$:/config/ColourPicker/New\" type=\"color\" tag=\"input\"/>\n<$set name=\"colour-picker-value\" value={{$:/config/ColourPicker/New}}>\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$set>\n\n</div>\n\n\\end\n"
},
"$:/core/macros/CSS": {
"title": "$:/core/macros/CSS",
"tags": "$:/tags/Macro",
"text": "\\define colour(name)\n<$transclude tiddler={{$:/palette}} index=\"$name$\"><$transclude tiddler=\"$:/palettes/Vanilla\" index=\"$name$\"/></$transclude>\n\\end\n\n\\define color(name)\n<<colour $name$>>\n\\end\n\n\\define box-shadow(shadow)\n``\n -webkit-box-shadow: $shadow$;\n -moz-box-shadow: $shadow$;\n box-shadow: $shadow$;\n``\n\\end\n\n\\define filter(filter)\n``\n -webkit-filter: $filter$;\n -moz-filter: $filter$;\n filter: $filter$;\n``\n\\end\n\n\\define transition(transition)\n``\n -webkit-transition: $transition$;\n -moz-transition: $transition$;\n transition: $transition$;\n``\n\\end\n\n\\define transform-origin(origin)\n``\n -webkit-transform-origin: $origin$;\n -moz-transform-origin: $origin$;\n transform-origin: $origin$;\n``\n\\end\n\n\\define background-linear-gradient(gradient)\n``\nbackground-image: linear-gradient($gradient$);\nbackground-image: -o-linear-gradient($gradient$);\nbackground-image: -moz-linear-gradient($gradient$);\nbackground-image: -webkit-linear-gradient($gradient$);\nbackground-image: -ms-linear-gradient($gradient$);\n``\n\\end\n\n\\define datauri(title)\n<$macrocall $name=\"makedatauri\" type={{$title$!!type}} text={{$title$}}/>\n\\end\n\n\\define if-sidebar(text)\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"yes\" default=\"yes\">$text$</$reveal>\n\\end\n\n\\define if-no-sidebar(text)\n<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"yes\" default=\"yes\">$text$</$reveal>\n\\end\n"
},
"$:/core/macros/export": {
"title": "$:/core/macros/export",
"tags": "$:/tags/Macro",
"text": "\\define exportButtonFilename(baseFilename)\n$baseFilename$$(extension)$\n\\end\n\n\\define exportButton(exportFilter:\"[!is[system]sort[title]]\",lingoBase,baseFilename:\"tiddlers\")\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/export\">> tooltip={{$lingoBase$Hint}} aria-label={{$lingoBase$Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/export-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$lingoBase$Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/export\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Exporter]]\">\n<$set name=\"extension\" value={{!!extension}}>\n<$button class=\"tc-btn-invisible\">\n<$action-sendmessage $message=\"tm-download-file\" $param=<<currentTiddler>> exportFilter=\"\"\"$exportFilter$\"\"\" filename=<<exportButtonFilename \"\"\"$baseFilename$\"\"\">>/>\n<$action-deletetiddler $tiddler=<<qualify \"$:/state/popup/export\">>/>\n<$transclude field=\"description\"/>\n</$button>\n</$set>\n</$list>\n</div>\n</$reveal>\n\\end\n"
},
"$:/core/macros/image-picker": {
"title": "$:/core/macros/image-picker",
"tags": "$:/tags/Macro",
"text": "\\define image-picker-inner(actions)\n<$button tag=\"a\" tooltip=\"\"\"$(imageTitle)$\"\"\">\n\n$actions$\n\n<$transclude tiddler=<<imageTitle>>/>\n\n</$button>\n\\end\n\n\\define image-picker(actions,subfilter:\"\")\n<div class=\"tc-image-chooser\">\n\n<$list filter=\"[all[shadows+tiddlers]is[image]$subfilter$!has[draft.of]] -[type[application/pdf]] +[sort[title]]\" variable=\"imageTitle\">\n\n<$macrocall $name=\"image-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n\n</$list>\n\n</div>\n\n\\end\n\n"
},
"$:/core/macros/lingo": {
"title": "$:/core/macros/lingo",
"tags": "$:/tags/Macro",
"text": "\\define lingo-base()\n$:/language/\n\\end\n\n\\define lingo(title)\n{{$(lingo-base)$$title$}}\n\\end\n"
},
"$:/core/macros/list": {
"title": "$:/core/macros/list",
"tags": "$:/tags/Macro",
"text": "\\define list-links(filter,type:\"ul\",subtype:\"li\",class:\"\")\n<$type$ class=\"$class$\">\n<$list filter=\"$filter$\">\n<$subtype$>\n<$link to={{!!title}}>\n<$transclude field=\"caption\">\n<$view field=\"title\"/>\n</$transclude>\n</$link>\n</$subtype$>\n</$list>\n</$type$>\n\\end\n"
},
"$:/core/macros/tabs": {
"title": "$:/core/macros/tabs",
"tags": "$:/tags/Macro",
"text": "\\define tabs(tabsList,default,state:\"$:/state/tab\",class,template)\n<div class=\"tc-tab-set $class$\">\n<div class=\"tc-tab-buttons $class$\">\n<$list filter=\"$tabsList$\" variable=\"currentTab\"><$set name=\"save-currentTiddler\" value=<<currentTiddler>>><$tiddler tiddler=<<currentTab>>><$button set=<<qualify \"$state$\">> setTo=<<currentTab>> default=\"$default$\" selectedClass=\"tc-tab-selected\" tooltip={{!!tooltip}}>\n<$tiddler tiddler=<<save-currentTiddler>>>\n<$set name=\"tv-wikilinks\" value=\"no\">\n<$transclude tiddler=<<currentTab>> field=\"caption\">\n<$macrocall $name=\"currentTab\" $type=\"text/plain\" $output=\"text/plain\"/>\n</$transclude>\n</$set></$tiddler></$button></$tiddler></$set></$list>\n</div>\n<div class=\"tc-tab-divider $class$\"/>\n<div class=\"tc-tab-content $class$\">\n<$list filter=\"$tabsList$\" variable=\"currentTab\">\n\n<$reveal type=\"match\" state=<<qualify \"$state$\">> text=<<currentTab>> default=\"$default$\">\n\n<$transclude tiddler=\"$template$\" mode=\"block\">\n\n<$transclude tiddler=<<currentTab>> mode=\"block\"/>\n\n</$transclude>\n\n</$reveal>\n\n</$list>\n</div>\n</div>\n\\end\n"
},
"$:/core/macros/tag": {
"title": "$:/core/macros/tag",
"tags": "$:/tags/Macro",
"text": "\\define tag(tag)\n{{$tag$||$:/core/ui/TagTemplate}}\n\\end\n"
},
"$:/core/macros/thumbnails": {
"title": "$:/core/macros/thumbnails",
"tags": "$:/tags/Macro",
"text": "\\define thumbnail(link,icon,color,background-color,image,caption,width:\"280\",height:\"157\")\n<$link to=\"\"\"$link$\"\"\"><div class=\"tc-thumbnail-wrapper\">\n<div class=\"tc-thumbnail-image\" style=\"width:$width$px;height:$height$px;\"><$reveal type=\"nomatch\" text=\"\" default=\"\"\"$image$\"\"\" tag=\"div\" style=\"width:$width$px;height:$height$px;\">\n[img[$image$]]\n</$reveal><$reveal type=\"match\" text=\"\" default=\"\"\"$image$\"\"\" tag=\"div\" class=\"tc-thumbnail-background\" style=\"width:$width$px;height:$height$px;background-color:$background-color$;\"></$reveal></div><div class=\"tc-thumbnail-icon\" style=\"fill:$color$;color:$color$;\">\n$icon$\n</div><div class=\"tc-thumbnail-caption\">\n$caption$\n</div>\n</div></$link>\n\\end\n\n\\define thumbnail-right(link,icon,color,background-color,image,caption,width:\"280\",height:\"157\")\n<div class=\"tc-thumbnail-right-wrapper\"><<thumbnail \"\"\"$link$\"\"\" \"\"\"$icon$\"\"\" \"\"\"$color$\"\"\" \"\"\"$background-color$\"\"\" \"\"\"$image$\"\"\" \"\"\"$caption$\"\"\" \"\"\"$width$\"\"\" \"\"\"$height$\"\"\">></div>\n\\end\n\n\\define list-thumbnails(filter,width:\"280\",height:\"157\")\n<$list filter=\"\"\"$filter$\"\"\"><$macrocall $name=\"thumbnail\" link={{!!link}} icon={{!!icon}} color={{!!color}} background-color={{!!background-color}} image={{!!image}} caption={{!!caption}} width=\"\"\"$width$\"\"\" height=\"\"\"$height$\"\"\"/></$list>\n\\end\n"
},
"$:/core/macros/timeline": {
"created": "20141212105914482",
"modified": "20141212110330815",
"tags": "$:/tags/Macro",
"title": "$:/core/macros/timeline",
"type": "text/vnd.tiddlywiki",
"text": "\\define timeline-title()\n<!-- Override this macro with a global macro \n of the same name if you need to change \n how titles are displayed on the timeline \n -->\n<$view field=\"title\"/>\n\\end\n\\define timeline(limit:\"100\",format:\"DDth MMM YYYY\",subfilter:\"\",dateField:\"modified\")\n<div class=\"tc-timeline\">\n<$list filter=\"[!is[system]$subfilter$has[$dateField$]!sort[$dateField$]limit[$limit$]eachday[$dateField$]]\">\n<div class=\"tc-menu-list-item\">\n<$view field=\"$dateField$\" format=\"date\" template=\"$format$\"/>\n<$list filter=\"[sameday:$dateField${!!$dateField$}!is[system]$subfilter$!sort[$dateField$]]\">\n<div class=\"tc-menu-list-subitem\">\n<$link to={{!!title}}>\n<<timeline-title>>\n</$link>\n</div>\n</$list>\n</div>\n</$list>\n</div>\n\\end\n"
},
"$:/core/macros/toc": {
"title": "$:/core/macros/toc",
"tags": "$:/tags/Macro",
"text": "\\define toc-caption()\n<$set name=\"tv-wikilinks\" value=\"no\">\n<$transclude field=\"caption\">\n<$view field=\"title\"/>\n</$transclude>\n</$set>\n\\end\n\n\\define toc-body(rootTag,tag,sort:\"\",itemClassFilter)\n<ol class=\"tc-toc\">\n<$list filter=\"\"\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\"\"\">\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$list filter=\"[all[current]toc-link[no]]\" emptyMessage=\"<$link><$view field='caption'><$view field='title'/></$view></$link>\">\n<<toc-caption>>\n</$list>\n<$list filter=\"\"\"[all[current]] -[[$rootTag$]]\"\"\">\n<$macrocall $name=\"toc-body\" rootTag=\"\"\"$rootTag$\"\"\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$list>\n</li>\n</$set>\n</$list>\n</ol>\n\\end\n\n\\define toc(tag,sort:\"\",itemClassFilter)\n<<toc-body rootTag:\"\"\"$tag$\"\"\" tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"itemClassFilter\"\"\">>\n\\end\n\n\\define toc-linked-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$link>\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n<<toc-caption>>\n</$link>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-unlinked-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-expandable-empty-message()\n<<toc-linked-expandable-body tag:\"\"\"$(tag)$\"\"\" sort:\"\"\"$(sort)$\"\"\" itemClassFilter:\"\"\"$(itemClassFilter)$\"\"\">>\n\\end\n\n\\define toc-expandable(tag,sort:\"\",itemClassFilter)\n<$vars tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\">\n<ol class=\"tc-toc toc-expandable\">\n<$list filter=\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\">\n<$list filter=\"[all[current]toc-link[no]]\" emptyMessage=<<toc-expandable-empty-message>>>\n<<toc-unlinked-expandable-body tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"itemClassFilter\"\"\">>\n</$list>\n</$list>\n</ol>\n</$vars>\n\\end\n\n\\define toc-linked-selective-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$link>\n<$list filter=\"[all[current]tagging[]limit[1]]\" variable=\"ignore\" emptyMessage=\"<$button class='tc-btn-invisible'>{{$:/core/images/blank}}</$button>\">\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</$list>\n<<toc-caption>>\n</$link>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-selective-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-unlinked-selective-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$list filter=\"[all[current]tagging[]limit[1]]\" variable=\"ignore\" emptyMessage=\"<$button class='tc-btn-invisible'>{{$:/core/images/blank}}</$button> <$view field='caption'><$view field='title'/></$view>\">\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n</$list>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"\"\"toc-selective-expandable\"\"\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-selective-expandable-empty-message()\n<<toc-linked-selective-expandable-body tag:\"\"\"$(tag)$\"\"\" sort:\"\"\"$(sort)$\"\"\" itemClassFilter:\"\"\"$(itemClassFilter)$\"\"\">>\n\\end\n\n\\define toc-selective-expandable(tag,sort:\"\",itemClassFilter)\n<$vars tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\">\n<ol class=\"tc-toc toc-selective-expandable\">\n<$list filter=\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\">\n<$list filter=\"[all[current]toc-link[no]]\" variable=\"ignore\" emptyMessage=<<toc-selective-expandable-empty-message>>>\n<<toc-unlinked-selective-expandable-body tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"$itemClassFilter$\"\"\">>\n</$list>\n</$list>\n</ol>\n</$vars>\n\\end\n\n\\define toc-tabbed-selected-item-filter(selectedTiddler)\n[all[current]field:title{$selectedTiddler$}]\n\\end\n\n\\define toc-tabbed-external-nav(tag,sort:\"\",selectedTiddler:\"$:/temp/toc/selectedTiddler\",unselectedText,missingText,template:\"\")\n<$tiddler tiddler={{$selectedTiddler$}}>\n<div class=\"tc-tabbed-table-of-contents\">\n<$linkcatcher to=\"$selectedTiddler$\">\n<div class=\"tc-table-of-contents\">\n<$macrocall $name=\"toc-selective-expandable\" tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=<<toc-tabbed-selected-item-filter selectedTiddler:\"\"\"$selectedTiddler$\"\"\">>/>\n</div>\n</$linkcatcher>\n<div class=\"tc-tabbed-table-of-contents-content\">\n<$reveal state=\"\"\"$selectedTiddler$\"\"\" type=\"nomatch\" text=\"\">\n<$transclude mode=\"block\" tiddler=\"$template$\">\n<h1><<toc-caption>></h1>\n<$transclude mode=\"block\">$missingText$</$transclude>\n</$transclude>\n</$reveal>\n<$reveal state=\"\"\"$selectedTiddler$\"\"\" type=\"match\" text=\"\">\n$unselectedText$\n</$reveal>\n</div>\n</div>\n</$tiddler>\n\\end\n\n\\define toc-tabbed-internal-nav(tag,sort:\"\",selectedTiddler:\"$:/temp/toc/selectedTiddler\",unselectedText,missingText,template:\"\")\n<$linkcatcher to=\"\"\"$selectedTiddler$\"\"\">\n<$macrocall $name=\"toc-tabbed-external-nav\" tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" selectedTiddler=\"\"\"$selectedTiddler$\"\"\" unselectedText=\"\"\"$unselectedText$\"\"\" missingText=\"\"\"$missingText$\"\"\" template=\"\"\"$template$\"\"\"/>\n</$linkcatcher>\n\\end\n\n"
},
"$:/core/macros/translink": {
"title": "$:/core/macros/translink",
"tags": "$:/tags/Macro",
"text": "\\define translink(title,mode:\"block\")\n<div style=\"border:1px solid #ccc; padding: 0.5em; background: black; foreground; white;\">\n<$link to=\"\"\"$title$\"\"\">\n<$text text=\"\"\"$title$\"\"\"/>\n</$link>\n<div style=\"border:1px solid #ccc; padding: 0.5em; background: white; foreground; black;\">\n<$transclude tiddler=\"\"\"$title$\"\"\" mode=\"$mode$\">\n\"<$text text=\"\"\"$title$\"\"\"/>\" is missing\n</$transclude>\n</div>\n</div>\n\\end\n"
},
"$:/snippets/minilanguageswitcher": {
"title": "$:/snippets/minilanguageswitcher",
"text": "<$select tiddler=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[title]]\">\n<option value=<<currentTiddler>>><$view field=\"description\"><$view field=\"name\"><$view field=\"title\"/></$view></$view></option>\n</$list>\n</$select>"
},
"$:/snippets/minithemeswitcher": {
"title": "$:/snippets/minithemeswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Theme/\n<<lingo Prompt>> <$select tiddler=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\">\n<option value=<<currentTiddler>>><$view field=\"name\"><$view field=\"title\"/></$view></option>\n</$list>\n</$select>"
},
"$:/snippets/modules": {
"title": "$:/snippets/modules",
"text": "\\define describeModuleType(type)\n{{$:/language/Docs/ModuleTypes/$type$}}\n\\end\n<$list filter=\"[moduletypes[]]\">\n\n!! <$macrocall $name=\"currentTiddler\" $type=\"text/plain\" $output=\"text/plain\"/>\n\n<$macrocall $name=\"describeModuleType\" type=<<currentTiddler>>/>\n\n<ul><$list filter=\"[all[current]modules[]]\"><li><$link><<currentTiddler>></$link>\n</li>\n</$list>\n</ul>\n</$list>\n"
},
"$:/palette": {
"title": "$:/palette",
"text": "$:/palettes/Vanilla"
},
"$:/snippets/paletteeditor": {
"title": "$:/snippets/paletteeditor",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/Editor/\n\\define describePaletteColour(colour)\n<$transclude tiddler=\"$:/language/Docs/PaletteColours/$colour$\"><$text text=\"$colour$\"/></$transclude>\n\\end\n<$set name=\"currentTiddler\" value={{$:/palette}}>\n\n<<lingo Prompt>> <$link to={{$:/palette}}><$macrocall $name=\"currentTiddler\" $output=\"text/plain\"/></$link>\n\n<$list filter=\"[all[current]is[shadow]is[tiddler]]\" variable=\"listItem\">\n<<lingo Prompt/Modified>>\n<$button message=\"tm-delete-tiddler\" param={{$:/palette}}><<lingo Reset/Caption>></$button>\n</$list>\n\n<$list filter=\"[all[current]is[shadow]!is[tiddler]]\" variable=\"listItem\">\n<<lingo Clone/Prompt>>\n</$list>\n\n<$button message=\"tm-new-tiddler\" param={{$:/palette}}><<lingo Clone/Caption>></$button>\n\n<table>\n<tbody>\n<$list filter=\"[all[current]indexes[]]\" variable=\"colourName\">\n<tr>\n<td>\n''<$macrocall $name=\"describePaletteColour\" colour=<<colourName>>/>''<br/>\n<$macrocall $name=\"colourName\" $output=\"text/plain\"/>\n</td>\n<td>\n<$edit-text index=<<colourName>> tag=\"input\"/>\n<br>\n<$edit-text index=<<colourName>> type=\"color\" tag=\"input\"/>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n</$set>\n"
},
"$:/snippets/palettepreview": {
"title": "$:/snippets/palettepreview",
"text": "<$set name=\"currentTiddler\" value={{$:/palette}}>\n<$transclude tiddler=\"$:/snippets/currpalettepreview\"/>\n</$set>\n"
},
"$:/snippets/paletteswitcher": {
"title": "$:/snippets/paletteswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/\n<div class=\"tc-prompt\">\n<<lingo Prompt>> <$view tiddler={{$:/palette}} field=\"name\"/>\n</div>\n\n<$linkcatcher to=\"$:/palette\">\n<div class=\"tc-chooser\"><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Palette]sort[description]]\"><div class=\"tc-chooser-item\"><$link to={{!!title}}><div><$reveal state=\"$:/palette\" type=\"match\" text={{!!title}}>•</$reveal><$reveal state=\"$:/palette\" type=\"nomatch\" text={{!!title}}> </$reveal> ''<$view field=\"name\" format=\"text\"/>'' - <$view field=\"description\" format=\"text\"/></div><$transclude tiddler=\"$:/snippets/currpalettepreview\"/></$link></div>\n</$list>\n</div>\n</$linkcatcher>"
},
"$:/temp/search": {
"title": "$:/temp/search",
"text": ""
},
"$:/tags/AdvancedSearch": {
"title": "$:/tags/AdvancedSearch",
"list": "[[$:/core/ui/AdvancedSearch/Standard]] [[$:/core/ui/AdvancedSearch/System]] [[$:/core/ui/AdvancedSearch/Shadows]] [[$:/core/ui/AdvancedSearch/Filter]]"
},
"$:/tags/AdvancedSearch/FilterButton": {
"title": "$:/tags/AdvancedSearch/FilterButton",
"list": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown $:/core/ui/AdvancedSearch/Filter/FilterButtons/clear $:/core/ui/AdvancedSearch/Filter/FilterButtons/export $:/core/ui/AdvancedSearch/Filter/FilterButtons/delete"
},
"$:/tags/ControlPanel": {
"title": "$:/tags/ControlPanel",
"list": "$:/core/ui/ControlPanel/Info $:/core/ui/ControlPanel/Appearance $:/core/ui/ControlPanel/Settings $:/core/ui/ControlPanel/Saving $:/core/ui/ControlPanel/Plugins $:/core/ui/ControlPanel/Tools $:/core/ui/ControlPanel/Internals"
},
"$:/tags/ControlPanel/Info": {
"title": "$:/tags/ControlPanel/Info",
"list": "$:/core/ui/ControlPanel/Basics $:/core/ui/ControlPanel/Advanced"
},
"$:/tags/ControlPanel/Plugins": {
"title": "$:/tags/ControlPanel/Plugins",
"list": "[[$:/core/ui/ControlPanel/Plugins/Installed]] [[$:/core/ui/ControlPanel/Plugins/Add]]"
},
"$:/tags/EditorToolbar": {
"title": "$:/tags/EditorToolbar",
"list": "$:/core/ui/EditorToolbar/paint $:/core/ui/EditorToolbar/opacity $:/core/ui/EditorToolbar/line-width $:/core/ui/EditorToolbar/clear $:/core/ui/EditorToolbar/bold $:/core/ui/EditorToolbar/italic $:/core/ui/EditorToolbar/strikethrough $:/core/ui/EditorToolbar/underline $:/core/ui/EditorToolbar/superscript $:/core/ui/EditorToolbar/subscript $:/core/ui/EditorToolbar/mono-line $:/core/ui/EditorToolbar/mono-block $:/core/ui/EditorToolbar/quote $:/core/ui/EditorToolbar/list-bullet $:/core/ui/EditorToolbar/list-number $:/core/ui/EditorToolbar/heading-1 $:/core/ui/EditorToolbar/heading-2 $:/core/ui/EditorToolbar/heading-3 $:/core/ui/EditorToolbar/heading-4 $:/core/ui/EditorToolbar/heading-5 $:/core/ui/EditorToolbar/heading-6 $:/core/ui/EditorToolbar/link $:/core/ui/EditorToolbar/excise $:/core/ui/EditorToolbar/picture $:/core/ui/EditorToolbar/stamp $:/core/ui/EditorToolbar/size $:/core/ui/EditorToolbar/editor-height $:/core/ui/EditorToolbar/more $:/core/ui/EditorToolbar/preview $:/core/ui/EditorToolbar/preview-type"
},
"$:/tags/EditTemplate": {
"title": "$:/tags/EditTemplate",
"list": "[[$:/core/ui/EditTemplate/controls]] [[$:/core/ui/EditTemplate/title]] [[$:/core/ui/EditTemplate/tags]] [[$:/core/ui/EditTemplate/shadow]] [[$:/core/ui/ViewTemplate/classic]] [[$:/core/ui/EditTemplate/body]] [[$:/core/ui/EditTemplate/type]] [[$:/core/ui/EditTemplate/fields]]"
},
"$:/tags/EditToolbar": {
"title": "$:/tags/EditToolbar",
"list": "[[$:/core/ui/Buttons/delete]] [[$:/core/ui/Buttons/cancel]] [[$:/core/ui/Buttons/save]]"
},
"$:/tags/MoreSideBar": {
"title": "$:/tags/MoreSideBar",
"list": "[[$:/core/ui/MoreSideBar/All]] [[$:/core/ui/MoreSideBar/Recent]] [[$:/core/ui/MoreSideBar/Tags]] [[$:/core/ui/MoreSideBar/Missing]] [[$:/core/ui/MoreSideBar/Drafts]] [[$:/core/ui/MoreSideBar/Orphans]] [[$:/core/ui/MoreSideBar/Types]] [[$:/core/ui/MoreSideBar/System]] [[$:/core/ui/MoreSideBar/Shadows]]",
"text": ""
},
"$:/tags/PageControls": {
"title": "$:/tags/PageControls",
"list": "[[$:/core/ui/Buttons/home]] [[$:/core/ui/Buttons/close-all]] [[$:/core/ui/Buttons/fold-all]] [[$:/core/ui/Buttons/unfold-all]] [[$:/core/ui/Buttons/permaview]] [[$:/core/ui/Buttons/new-tiddler]] [[$:/core/ui/Buttons/new-journal]] [[$:/core/ui/Buttons/new-image]] [[$:/core/ui/Buttons/import]] [[$:/core/ui/Buttons/export-page]] [[$:/core/ui/Buttons/control-panel]] [[$:/core/ui/Buttons/advanced-search]] [[$:/core/ui/Buttons/tag-manager]] [[$:/core/ui/Buttons/language]] [[$:/core/ui/Buttons/palette]] [[$:/core/ui/Buttons/theme]] [[$:/core/ui/Buttons/storyview]] [[$:/core/ui/Buttons/encryption]] [[$:/core/ui/Buttons/full-screen]] [[$:/core/ui/Buttons/save-wiki]] [[$:/core/ui/Buttons/refresh]] [[$:/core/ui/Buttons/more-page-actions]]"
},
"$:/tags/PageTemplate": {
"title": "$:/tags/PageTemplate",
"list": "[[$:/core/ui/PageTemplate/topleftbar]] [[$:/core/ui/PageTemplate/toprightbar]] [[$:/core/ui/PageTemplate/sidebar]] [[$:/core/ui/PageTemplate/story]] [[$:/core/ui/PageTemplate/alerts]]",
"text": ""
},
"$:/tags/SideBar": {
"title": "$:/tags/SideBar",
"list": "[[$:/core/ui/SideBar/Open]] [[$:/core/ui/SideBar/Recent]] [[$:/core/ui/SideBar/Tools]] [[$:/core/ui/SideBar/More]]",
"text": ""
},
"$:/tags/TiddlerInfo": {
"title": "$:/tags/TiddlerInfo",
"list": "[[$:/core/ui/TiddlerInfo/Tools]] [[$:/core/ui/TiddlerInfo/References]] [[$:/core/ui/TiddlerInfo/Tagging]] [[$:/core/ui/TiddlerInfo/List]] [[$:/core/ui/TiddlerInfo/Listed]] [[$:/core/ui/TiddlerInfo/Fields]]",
"text": ""
},
"$:/tags/TiddlerInfo/Advanced": {
"title": "$:/tags/TiddlerInfo/Advanced",
"list": "[[$:/core/ui/TiddlerInfo/Advanced/ShadowInfo]] [[$:/core/ui/TiddlerInfo/Advanced/PluginInfo]]"
},
"$:/tags/ViewTemplate": {
"title": "$:/tags/ViewTemplate",
"list": "[[$:/core/ui/ViewTemplate/title]] [[$:/core/ui/ViewTemplate/unfold]] [[$:/core/ui/ViewTemplate/subtitle]] [[$:/core/ui/ViewTemplate/tags]] [[$:/core/ui/ViewTemplate/classic]] [[$:/core/ui/ViewTemplate/body]]"
},
"$:/tags/ViewToolbar": {
"title": "$:/tags/ViewToolbar",
"list": "[[$:/core/ui/Buttons/more-tiddler-actions]] [[$:/core/ui/Buttons/info]] [[$:/core/ui/Buttons/new-here]] [[$:/core/ui/Buttons/new-journal-here]] [[$:/core/ui/Buttons/clone]] [[$:/core/ui/Buttons/export-tiddler]] [[$:/core/ui/Buttons/edit]] [[$:/core/ui/Buttons/delete]] [[$:/core/ui/Buttons/permalink]] [[$:/core/ui/Buttons/permaview]] [[$:/core/ui/Buttons/open-window]] [[$:/core/ui/Buttons/close-others]] [[$:/core/ui/Buttons/close]] [[$:/core/ui/Buttons/fold-others]] [[$:/core/ui/Buttons/fold]]"
},
"$:/snippets/themeswitcher": {
"title": "$:/snippets/themeswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Theme/\n<<lingo Prompt>> <$view tiddler={{$:/theme}} field=\"name\"/>\n\n<$linkcatcher to=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\"><div><$reveal state=\"$:/theme\" type=\"match\" text={{!!title}}>•</$reveal><$reveal state=\"$:/theme\" type=\"nomatch\" text={{!!title}}> </$reveal> <$link to={{!!title}}>''<$view field=\"name\" format=\"text\"/>'' <$view field=\"description\" format=\"text\"/></$link></div>\n</$list>\n</$linkcatcher>"
},
"$:/core/wiki/title": {
"title": "$:/core/wiki/title",
"type": "text/vnd.tiddlywiki",
"text": "{{$:/SiteTitle}} --- {{$:/SiteSubtitle}}"
},
"$:/view": {
"title": "$:/view",
"text": "classic"
},
"$:/snippets/viewswitcher": {
"title": "$:/snippets/viewswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/StoryView/\n<<lingo Prompt>> <$select tiddler=\"$:/view\">\n<$list filter=\"[storyviews[]]\">\n<option><$view field=\"title\"/></option>\n</$list>\n</$select>"
}
}
}
<div class="tc-more-sidebar">
<<tabs "[all[shadows+tiddlers]tag[$:/tags/MoreSideBar]!has[draft.of]]" "$:/core/ui/MoreSideBar/Tags" "$:/state/tab/moresidebar" "tc-vertical">>
</div>
<$macrocall $name="timeline" format={{$:/language/RecentChanges/DateFormat}}/>
\define lingo-base() $:/language/ControlPanel/
\define config-title()
$:/config/PageControlButtons/Visibility/$(listItem)$
\end
<<lingo Basics/Version/Prompt>> <<version>>
<$set name="tv-config-toolbar-icons" value="yes">
<$set name="tv-config-toolbar-text" value="yes">
<$set name="tv-config-toolbar-class" value="">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]" variable="listItem">
<div style="position:relative;">
<$checkbox tiddler=<<config-title>> field="text" checked="show" unchecked="hide" default="show"/> <$transclude tiddler=<<listItem>>/> <i class="tc-muted"><$transclude tiddler=<<listItem>> field="description"/></i>
</div>
</$list>
</$set>
</$set>
</$set>
{
"tiddlers": {
"$:/plugins/tiddlywiki/highlight/highlight.js": {
"type": "application/javascript",
"title": "$:/plugins/tiddlywiki/highlight/highlight.js",
"module-type": "library",
"text": "var hljs = require(\"$:/plugins/tiddlywiki/highlight/highlight.js\");\n!function(e){\"undefined\"!=typeof exports?e(exports):(window.hljs=e({}),\"function\"==typeof define&&define.amd&&define(\"hljs\",[],function(){return window.hljs}))}(function(e){function n(e){return e.replace(/&/gm,\"&\").replace(/</gm,\"<\").replace(/>/gm,\">\")}function t(e){return e.nodeName.toLowerCase()}function r(e,n){var t=e&&e.exec(n);return t&&0==t.index}function a(e){return/^(no-?highlight|plain|text)$/i.test(e)}function i(e){var n,t,r,i=e.className+\" \";if(i+=e.parentNode?e.parentNode.className:\"\",t=/\\blang(?:uage)?-([\\w-]+)\\b/i.exec(i))return w(t[1])?t[1]:\"no-highlight\";for(i=i.split(/\\s+/),n=0,r=i.length;r>n;n++)if(w(i[n])||a(i[n]))return i[n]}function o(e,n){var t,r={};for(t in e)r[t]=e[t];if(n)for(t in n)r[t]=n[t];return r}function u(e){var n=[];return function r(e,a){for(var i=e.firstChild;i;i=i.nextSibling)3==i.nodeType?a+=i.nodeValue.length:1==i.nodeType&&(n.push({event:\"start\",offset:a,node:i}),a=r(i,a),t(i).match(/br|hr|img|input/)||n.push({event:\"stop\",offset:a,node:i}));return a}(e,0),n}function c(e,r,a){function i(){return e.length&&r.length?e[0].offset!=r[0].offset?e[0].offset<r[0].offset?e:r:\"start\"==r[0].event?e:r:e.length?e:r}function o(e){function r(e){return\" \"+e.nodeName+'=\"'+n(e.value)+'\"'}f+=\"<\"+t(e)+Array.prototype.map.call(e.attributes,r).join(\"\")+\">\"}function u(e){f+=\"</\"+t(e)+\">\"}function c(e){(\"start\"==e.event?o:u)(e.node)}for(var s=0,f=\"\",l=[];e.length||r.length;){var g=i();if(f+=n(a.substr(s,g[0].offset-s)),s=g[0].offset,g==e){l.reverse().forEach(u);do c(g.splice(0,1)[0]),g=i();while(g==e&&g.length&&g[0].offset==s);l.reverse().forEach(o)}else\"start\"==g[0].event?l.push(g[0].node):l.pop(),c(g.splice(0,1)[0])}return f+n(a.substr(s))}function s(e){function n(e){return e&&e.source||e}function t(t,r){return new RegExp(n(t),\"m\"+(e.cI?\"i\":\"\")+(r?\"g\":\"\"))}function r(a,i){if(!a.compiled){if(a.compiled=!0,a.k=a.k||a.bK,a.k){var u={},c=function(n,t){e.cI&&(t=t.toLowerCase()),t.split(\" \").forEach(function(e){var t=e.split(\"|\");u[t[0]]=[n,t[1]?Number(t[1]):1]})};\"string\"==typeof a.k?c(\"keyword\",a.k):Object.keys(a.k).forEach(function(e){c(e,a.k[e])}),a.k=u}a.lR=t(a.l||/\\b\\w+\\b/,!0),i&&(a.bK&&(a.b=\"\\\\b(\"+a.bK.split(\" \").join(\"|\")+\")\\\\b\"),a.b||(a.b=/\\B|\\b/),a.bR=t(a.b),a.e||a.eW||(a.e=/\\B|\\b/),a.e&&(a.eR=t(a.e)),a.tE=n(a.e)||\"\",a.eW&&i.tE&&(a.tE+=(a.e?\"|\":\"\")+i.tE)),a.i&&(a.iR=t(a.i)),void 0===a.r&&(a.r=1),a.c||(a.c=[]);var s=[];a.c.forEach(function(e){e.v?e.v.forEach(function(n){s.push(o(e,n))}):s.push(\"self\"==e?a:e)}),a.c=s,a.c.forEach(function(e){r(e,a)}),a.starts&&r(a.starts,i);var f=a.c.map(function(e){return e.bK?\"\\\\.?(\"+e.b+\")\\\\.?\":e.b}).concat([a.tE,a.i]).map(n).filter(Boolean);a.t=f.length?t(f.join(\"|\"),!0):{exec:function(){return null}}}}r(e)}function f(e,t,a,i){function o(e,n){for(var t=0;t<n.c.length;t++)if(r(n.c[t].bR,e))return n.c[t]}function u(e,n){if(r(e.eR,n)){for(;e.endsParent&&e.parent;)e=e.parent;return e}return e.eW?u(e.parent,n):void 0}function c(e,n){return!a&&r(n.iR,e)}function g(e,n){var t=N.cI?n[0].toLowerCase():n[0];return e.k.hasOwnProperty(t)&&e.k[t]}function h(e,n,t,r){var a=r?\"\":E.classPrefix,i='<span class=\"'+a,o=t?\"\":\"</span>\";return i+=e+'\">',i+n+o}function p(){if(!L.k)return n(y);var e=\"\",t=0;L.lR.lastIndex=0;for(var r=L.lR.exec(y);r;){e+=n(y.substr(t,r.index-t));var a=g(L,r);a?(B+=a[1],e+=h(a[0],n(r[0]))):e+=n(r[0]),t=L.lR.lastIndex,r=L.lR.exec(y)}return e+n(y.substr(t))}function d(){var e=\"string\"==typeof L.sL;if(e&&!x[L.sL])return n(y);var t=e?f(L.sL,y,!0,M[L.sL]):l(y,L.sL.length?L.sL:void 0);return L.r>0&&(B+=t.r),e&&(M[L.sL]=t.top),h(t.language,t.value,!1,!0)}function b(){return void 0!==L.sL?d():p()}function v(e,t){var r=e.cN?h(e.cN,\"\",!0):\"\";e.rB?(k+=r,y=\"\"):e.eB?(k+=n(t)+r,y=\"\"):(k+=r,y=t),L=Object.create(e,{parent:{value:L}})}function m(e,t){if(y+=e,void 0===t)return k+=b(),0;var r=o(t,L);if(r)return k+=b(),v(r,t),r.rB?0:t.length;var a=u(L,t);if(a){var i=L;i.rE||i.eE||(y+=t),k+=b();do L.cN&&(k+=\"</span>\"),B+=L.r,L=L.parent;while(L!=a.parent);return i.eE&&(k+=n(t)),y=\"\",a.starts&&v(a.starts,\"\"),i.rE?0:t.length}if(c(t,L))throw new Error('Illegal lexeme \"'+t+'\" for mode \"'+(L.cN||\"<unnamed>\")+'\"');return y+=t,t.length||1}var N=w(e);if(!N)throw new Error('Unknown language: \"'+e+'\"');s(N);var R,L=i||N,M={},k=\"\";for(R=L;R!=N;R=R.parent)R.cN&&(k=h(R.cN,\"\",!0)+k);var y=\"\",B=0;try{for(var C,j,I=0;;){if(L.t.lastIndex=I,C=L.t.exec(t),!C)break;j=m(t.substr(I,C.index-I),C[0]),I=C.index+j}for(m(t.substr(I)),R=L;R.parent;R=R.parent)R.cN&&(k+=\"</span>\");return{r:B,value:k,language:e,top:L}}catch(O){if(-1!=O.message.indexOf(\"Illegal\"))return{r:0,value:n(t)};throw O}}function l(e,t){t=t||E.languages||Object.keys(x);var r={r:0,value:n(e)},a=r;return t.forEach(function(n){if(w(n)){var t=f(n,e,!1);t.language=n,t.r>a.r&&(a=t),t.r>r.r&&(a=r,r=t)}}),a.language&&(r.second_best=a),r}function g(e){return E.tabReplace&&(e=e.replace(/^((<[^>]+>|\\t)+)/gm,function(e,n){return n.replace(/\\t/g,E.tabReplace)})),E.useBR&&(e=e.replace(/\\n/g,\"<br>\")),e}function h(e,n,t){var r=n?R[n]:t,a=[e.trim()];return e.match(/\\bhljs\\b/)||a.push(\"hljs\"),-1===e.indexOf(r)&&a.push(r),a.join(\" \").trim()}function p(e){var n=i(e);if(!a(n)){var t;E.useBR?(t=document.createElementNS(\"http://www.w3.org/1999/xhtml\",\"div\"),t.innerHTML=e.innerHTML.replace(/\\n/g,\"\").replace(/<br[ \\/]*>/g,\"\\n\")):t=e;var r=t.textContent,o=n?f(n,r,!0):l(r),s=u(t);if(s.length){var p=document.createElementNS(\"http://www.w3.org/1999/xhtml\",\"div\");p.innerHTML=o.value,o.value=c(s,u(p),r)}o.value=g(o.value),e.innerHTML=o.value,e.className=h(e.className,n,o.language),e.result={language:o.language,re:o.r},o.second_best&&(e.second_best={language:o.second_best.language,re:o.second_best.r})}}function d(e){E=o(E,e)}function b(){if(!b.called){b.called=!0;var e=document.querySelectorAll(\"pre code\");Array.prototype.forEach.call(e,p)}}function v(){addEventListener(\"DOMContentLoaded\",b,!1),addEventListener(\"load\",b,!1)}function m(n,t){var r=x[n]=t(e);r.aliases&&r.aliases.forEach(function(e){R[e]=n})}function N(){return Object.keys(x)}function w(e){return e=e.toLowerCase(),x[e]||x[R[e]]}var E={classPrefix:\"hljs-\",tabReplace:null,useBR:!1,languages:void 0},x={},R={};return e.highlight=f,e.highlightAuto=l,e.fixMarkup=g,e.highlightBlock=p,e.configure=d,e.initHighlighting=b,e.initHighlightingOnLoad=v,e.registerLanguage=m,e.listLanguages=N,e.getLanguage=w,e.inherit=o,e.IR=\"[a-zA-Z]\\\\w*\",e.UIR=\"[a-zA-Z_]\\\\w*\",e.NR=\"\\\\b\\\\d+(\\\\.\\\\d+)?\",e.CNR=\"(\\\\b0[xX][a-fA-F0-9]+|(\\\\b\\\\d+(\\\\.\\\\d*)?|\\\\.\\\\d+)([eE][-+]?\\\\d+)?)\",e.BNR=\"\\\\b(0b[01]+)\",e.RSR=\"!|!=|!==|%|%=|&|&&|&=|\\\\*|\\\\*=|\\\\+|\\\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\\\?|\\\\[|\\\\{|\\\\(|\\\\^|\\\\^=|\\\\||\\\\|=|\\\\|\\\\||~\",e.BE={b:\"\\\\\\\\[\\\\s\\\\S]\",r:0},e.ASM={cN:\"string\",b:\"'\",e:\"'\",i:\"\\\\n\",c:[e.BE]},e.QSM={cN:\"string\",b:'\"',e:'\"',i:\"\\\\n\",c:[e.BE]},e.PWM={b:/\\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\\b/},e.C=function(n,t,r){var a=e.inherit({cN:\"comment\",b:n,e:t,c:[]},r||{});return a.c.push(e.PWM),a.c.push({cN:\"doctag\",b:\"(?:TODO|FIXME|NOTE|BUG|XXX):\",r:0}),a},e.CLCM=e.C(\"//\",\"$\"),e.CBCM=e.C(\"/\\\\*\",\"\\\\*/\"),e.HCM=e.C(\"#\",\"$\"),e.NM={cN:\"number\",b:e.NR,r:0},e.CNM={cN:\"number\",b:e.CNR,r:0},e.BNM={cN:\"number\",b:e.BNR,r:0},e.CSSNM={cN:\"number\",b:e.NR+\"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?\",r:0},e.RM={cN:\"regexp\",b:/\\//,e:/\\/[gimuy]*/,i:/\\n/,c:[e.BE,{b:/\\[/,e:/\\]/,r:0,c:[e.BE]}]},e.TM={cN:\"title\",b:e.IR,r:0},e.UTM={cN:\"title\",b:e.UIR,r:0},e});hljs.registerLanguage(\"markdown\",function(e){return{aliases:[\"md\",\"mkdown\",\"mkd\"],c:[{cN:\"header\",v:[{b:\"^#{1,6}\",e:\"$\"},{b:\"^.+?\\\\n[=-]{2,}$\"}]},{b:\"<\",e:\">\",sL:\"xml\",r:0},{cN:\"bullet\",b:\"^([*+-]|(\\\\d+\\\\.))\\\\s+\"},{cN:\"strong\",b:\"[*_]{2}.+?[*_]{2}\"},{cN:\"emphasis\",v:[{b:\"\\\\*.+?\\\\*\"},{b:\"_.+?_\",r:0}]},{cN:\"blockquote\",b:\"^>\\\\s+\",e:\"$\"},{cN:\"code\",v:[{b:\"`.+?`\"},{b:\"^( {4}|\t)\",e:\"$\",r:0}]},{cN:\"horizontal_rule\",b:\"^[-\\\\*]{3,}\",e:\"$\"},{b:\"\\\\[.+?\\\\][\\\\(\\\\[].*?[\\\\)\\\\]]\",rB:!0,c:[{cN:\"link_label\",b:\"\\\\[\",e:\"\\\\]\",eB:!0,rE:!0,r:0},{cN:\"link_url\",b:\"\\\\]\\\\(\",e:\"\\\\)\",eB:!0,eE:!0},{cN:\"link_reference\",b:\"\\\\]\\\\[\",e:\"\\\\]\",eB:!0,eE:!0}],r:10},{b:\"^\\\\[.+\\\\]:\",rB:!0,c:[{cN:\"link_reference\",b:\"\\\\[\",e:\"\\\\]:\",eB:!0,eE:!0,starts:{cN:\"link_url\",e:\"$\"}}]}]}});hljs.registerLanguage(\"ruby\",function(e){var c=\"[a-zA-Z_]\\\\w*[!?=]?|[-+~]\\\\@|<<|>>|=~|===?|<=>|[<>]=?|\\\\*\\\\*|[-/+%^&*~`|]|\\\\[\\\\]=?\",r=\"and false then defined module in return redo if BEGIN retry end for true self when next until do begin unless END rescue nil else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor\",b={cN:\"doctag\",b:\"@[A-Za-z]+\"},a={cN:\"value\",b:\"#<\",e:\">\"},n=[e.C(\"#\",\"$\",{c:[b]}),e.C(\"^\\\\=begin\",\"^\\\\=end\",{c:[b],r:10}),e.C(\"^__END__\",\"\\\\n$\")],s={cN:\"subst\",b:\"#\\\\{\",e:\"}\",k:r},t={cN:\"string\",c:[e.BE,s],v:[{b:/'/,e:/'/},{b:/\"/,e:/\"/},{b:/`/,e:/`/},{b:\"%[qQwWx]?\\\\(\",e:\"\\\\)\"},{b:\"%[qQwWx]?\\\\[\",e:\"\\\\]\"},{b:\"%[qQwWx]?{\",e:\"}\"},{b:\"%[qQwWx]?<\",e:\">\"},{b:\"%[qQwWx]?/\",e:\"/\"},{b:\"%[qQwWx]?%\",e:\"%\"},{b:\"%[qQwWx]?-\",e:\"-\"},{b:\"%[qQwWx]?\\\\|\",e:\"\\\\|\"},{b:/\\B\\?(\\\\\\d{1,3}|\\\\x[A-Fa-f0-9]{1,2}|\\\\u[A-Fa-f0-9]{4}|\\\\?\\S)\\b/}]},i={cN:\"params\",b:\"\\\\(\",e:\"\\\\)\",k:r},d=[t,a,{cN:\"class\",bK:\"class module\",e:\"$|;\",i:/=/,c:[e.inherit(e.TM,{b:\"[A-Za-z_]\\\\w*(::\\\\w+)*(\\\\?|\\\\!)?\"}),{cN:\"inheritance\",b:\"<\\\\s*\",c:[{cN:\"parent\",b:\"(\"+e.IR+\"::)?\"+e.IR}]}].concat(n)},{cN:\"function\",bK:\"def\",e:\"$|;\",c:[e.inherit(e.TM,{b:c}),i].concat(n)},{cN:\"constant\",b:\"(::)?(\\\\b[A-Z]\\\\w*(::)?)+\",r:0},{cN:\"symbol\",b:e.UIR+\"(\\\\!|\\\\?)?:\",r:0},{cN:\"symbol\",b:\":\",c:[t,{b:c}],r:0},{cN:\"number\",b:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",r:0},{cN:\"variable\",b:\"(\\\\$\\\\W)|((\\\\$|\\\\@\\\\@?)(\\\\w+))\"},{b:\"(\"+e.RSR+\")\\\\s*\",c:[a,{cN:\"regexp\",c:[e.BE,s],i:/\\n/,v:[{b:\"/\",e:\"/[a-z]*\"},{b:\"%r{\",e:\"}[a-z]*\"},{b:\"%r\\\\(\",e:\"\\\\)[a-z]*\"},{b:\"%r!\",e:\"![a-z]*\"},{b:\"%r\\\\[\",e:\"\\\\][a-z]*\"}]}].concat(n),r:0}].concat(n);s.c=d,i.c=d;var o=\"[>?]>\",l=\"[\\\\w#]+\\\\(\\\\w+\\\\):\\\\d+:\\\\d+>\",u=\"(\\\\w+-)?\\\\d+\\\\.\\\\d+\\\\.\\\\d(p\\\\d+)?[^>]+>\",N=[{b:/^\\s*=>/,cN:\"status\",starts:{e:\"$\",c:d}},{cN:\"prompt\",b:\"^(\"+o+\"|\"+l+\"|\"+u+\")\",starts:{e:\"$\",c:d}}];return{aliases:[\"rb\",\"gemspec\",\"podspec\",\"thor\",\"irb\"],k:r,c:n.concat(N).concat(d)}});hljs.registerLanguage(\"makefile\",function(e){var a={cN:\"variable\",b:/\\$\\(/,e:/\\)/,c:[e.BE]};return{aliases:[\"mk\",\"mak\"],c:[e.HCM,{b:/^\\w+\\s*\\W*=/,rB:!0,r:0,starts:{cN:\"constant\",e:/\\s*\\W*=/,eE:!0,starts:{e:/$/,r:0,c:[a]}}},{cN:\"title\",b:/^[\\w]+:\\s*$/},{cN:\"phony\",b:/^\\.PHONY:/,e:/$/,k:\".PHONY\",l:/[\\.\\w]+/},{b:/^\\t+/,e:/$/,r:0,c:[e.QSM,a]}]}});hljs.registerLanguage(\"json\",function(e){var t={literal:\"true false null\"},i=[e.QSM,e.CNM],l={cN:\"value\",e:\",\",eW:!0,eE:!0,c:i,k:t},c={b:\"{\",e:\"}\",c:[{cN:\"attribute\",b:'\\\\s*\"',e:'\"\\\\s*:\\\\s*',eB:!0,eE:!0,c:[e.BE],i:\"\\\\n\",starts:l}],i:\"\\\\S\"},n={b:\"\\\\[\",e:\"\\\\]\",c:[e.inherit(l,{cN:null})],i:\"\\\\S\"};return i.splice(i.length,0,c,n),{c:i,k:t,i:\"\\\\S\"}});hljs.registerLanguage(\"xml\",function(t){var s=\"[A-Za-z0-9\\\\._:-]+\",c={b:/<\\?(php)?(?!\\w)/,e:/\\?>/,sL:\"php\"},e={eW:!0,i:/</,r:0,c:[c,{cN:\"attribute\",b:s,r:0},{b:\"=\",r:0,c:[{cN:\"value\",c:[c],v:[{b:/\"/,e:/\"/},{b:/'/,e:/'/},{b:/[^\\s\\/>]+/}]}]}]};return{aliases:[\"html\",\"xhtml\",\"rss\",\"atom\",\"xsl\",\"plist\"],cI:!0,c:[{cN:\"doctype\",b:\"<!DOCTYPE\",e:\">\",r:10,c:[{b:\"\\\\[\",e:\"\\\\]\"}]},t.C(\"<!--\",\"-->\",{r:10}),{cN:\"cdata\",b:\"<\\\\!\\\\[CDATA\\\\[\",e:\"\\\\]\\\\]>\",r:10},{cN:\"tag\",b:\"<style(?=\\\\s|>|$)\",e:\">\",k:{title:\"style\"},c:[e],starts:{e:\"</style>\",rE:!0,sL:\"css\"}},{cN:\"tag\",b:\"<script(?=\\\\s|>|$)\",e:\">\",k:{title:\"script\"},c:[e],starts:{e:\"</script>\",rE:!0,sL:[\"actionscript\",\"javascript\",\"handlebars\"]}},c,{cN:\"pi\",b:/<\\?\\w+/,e:/\\?>/,r:10},{cN:\"tag\",b:\"</?\",e:\"/?>\",c:[{cN:\"title\",b:/[^ \\/><\\n\\t]+/,r:0},e]}]}});hljs.registerLanguage(\"css\",function(e){var c=\"[a-zA-Z-][a-zA-Z0-9_-]*\",a={cN:\"function\",b:c+\"\\\\(\",rB:!0,eE:!0,e:\"\\\\(\"},r={cN:\"rule\",b:/[A-Z\\_\\.\\-]+\\s*:/,rB:!0,e:\";\",eW:!0,c:[{cN:\"attribute\",b:/\\S/,e:\":\",eE:!0,starts:{cN:\"value\",eW:!0,eE:!0,c:[a,e.CSSNM,e.QSM,e.ASM,e.CBCM,{cN:\"hexcolor\",b:\"#[0-9A-Fa-f]+\"},{cN:\"important\",b:\"!important\"}]}}]};return{cI:!0,i:/[=\\/|'\\$]/,c:[e.CBCM,r,{cN:\"id\",b:/\\#[A-Za-z0-9_-]+/},{cN:\"class\",b:/\\.[A-Za-z0-9_-]+/},{cN:\"attr_selector\",b:/\\[/,e:/\\]/,i:\"$\"},{cN:\"pseudo\",b:/:(:)?[a-zA-Z0-9\\_\\-\\+\\(\\)\"']+/},{cN:\"at_rule\",b:\"@(font-face|page)\",l:\"[a-z-]+\",k:\"font-face page\"},{cN:\"at_rule\",b:\"@\",e:\"[{;]\",c:[{cN:\"keyword\",b:/\\S+/},{b:/\\s/,eW:!0,eE:!0,r:0,c:[a,e.ASM,e.QSM,e.CSSNM]}]},{cN:\"tag\",b:c,r:0},{cN:\"rules\",b:\"{\",e:\"}\",i:/\\S/,c:[e.CBCM,r]}]}});hljs.registerLanguage(\"perl\",function(e){var t=\"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when\",r={cN:\"subst\",b:\"[$@]\\\\{\",e:\"\\\\}\",k:t},s={b:\"->{\",e:\"}\"},n={cN:\"variable\",v:[{b:/\\$\\d/},{b:/[\\$%@](\\^\\w\\b|#\\w+(::\\w+)*|{\\w+}|\\w+(::\\w*)*)/},{b:/[\\$%@][^\\s\\w{]/,r:0}]},o=[e.BE,r,n],i=[n,e.HCM,e.C(\"^\\\\=\\\\w\",\"\\\\=cut\",{eW:!0}),s,{cN:\"string\",c:o,v:[{b:\"q[qwxr]?\\\\s*\\\\(\",e:\"\\\\)\",r:5},{b:\"q[qwxr]?\\\\s*\\\\[\",e:\"\\\\]\",r:5},{b:\"q[qwxr]?\\\\s*\\\\{\",e:\"\\\\}\",r:5},{b:\"q[qwxr]?\\\\s*\\\\|\",e:\"\\\\|\",r:5},{b:\"q[qwxr]?\\\\s*\\\\<\",e:\"\\\\>\",r:5},{b:\"qw\\\\s+q\",e:\"q\",r:5},{b:\"'\",e:\"'\",c:[e.BE]},{b:'\"',e:'\"'},{b:\"`\",e:\"`\",c:[e.BE]},{b:\"{\\\\w+}\",c:[],r:0},{b:\"-?\\\\w+\\\\s*\\\\=\\\\>\",c:[],r:0}]},{cN:\"number\",b:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",r:0},{b:\"(\\\\/\\\\/|\"+e.RSR+\"|\\\\b(split|return|print|reverse|grep)\\\\b)\\\\s*\",k:\"split return print reverse grep\",r:0,c:[e.HCM,{cN:\"regexp\",b:\"(s|tr|y)/(\\\\\\\\.|[^/])*/(\\\\\\\\.|[^/])*/[a-z]*\",r:10},{cN:\"regexp\",b:\"(m|qr)?/\",e:\"/[a-z]*\",c:[e.BE],r:0}]},{cN:\"sub\",bK:\"sub\",e:\"(\\\\s*\\\\(.*?\\\\))?[;{]\",r:5},{cN:\"operator\",b:\"-\\\\w\\\\b\",r:0},{b:\"^__DATA__$\",e:\"^__END__$\",sL:\"mojolicious\",c:[{b:\"^@@.*\",e:\"$\",cN:\"comment\"}]}];return r.c=i,s.c=i,{aliases:[\"pl\"],k:t,c:i}});hljs.registerLanguage(\"cs\",function(e){var r=\"abstract as base bool break byte case catch char checked const continue decimal dynamic default delegate do double else enum event explicit extern false finally fixed float for foreach goto if implicit in int interface internal is lock long null when object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this true try typeof uint ulong unchecked unsafe ushort using virtual volatile void while async protected public private internal ascending descending from get group into join let orderby partial select set value var where yield\",t=e.IR+\"(<\"+e.IR+\">)?\";return{aliases:[\"csharp\"],k:r,i:/::/,c:[e.C(\"///\",\"$\",{rB:!0,c:[{cN:\"xmlDocTag\",v:[{b:\"///\",r:0},{b:\"<!--|-->\"},{b:\"</?\",e:\">\"}]}]}),e.CLCM,e.CBCM,{cN:\"preprocessor\",b:\"#\",e:\"$\",k:\"if else elif endif define undef warning error line region endregion pragma checksum\"},{cN:\"string\",b:'@\"',e:'\"',c:[{b:'\"\"'}]},e.ASM,e.QSM,e.CNM,{bK:\"class interface\",e:/[{;=]/,i:/[^\\s:]/,c:[e.TM,e.CLCM,e.CBCM]},{bK:\"namespace\",e:/[{;=]/,i:/[^\\s:]/,c:[{cN:\"title\",b:\"[a-zA-Z](\\\\.?\\\\w)*\",r:0},e.CLCM,e.CBCM]},{bK:\"new return throw await\",r:0},{cN:\"function\",b:\"(\"+t+\"\\\\s+)+\"+e.IR+\"\\\\s*\\\\(\",rB:!0,e:/[{;=]/,eE:!0,k:r,c:[{b:e.IR+\"\\\\s*\\\\(\",rB:!0,c:[e.TM],r:0},{cN:\"params\",b:/\\(/,e:/\\)/,eB:!0,eE:!0,k:r,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]}]}});hljs.registerLanguage(\"apache\",function(e){var r={cN:\"number\",b:\"[\\\\$%]\\\\d+\"};return{aliases:[\"apacheconf\"],cI:!0,c:[e.HCM,{cN:\"tag\",b:\"</?\",e:\">\"},{cN:\"keyword\",b:/\\w+/,r:0,k:{common:\"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername\"},starts:{e:/$/,r:0,k:{literal:\"on off all\"},c:[{cN:\"sqbracket\",b:\"\\\\s\\\\[\",e:\"\\\\]$\"},{cN:\"cbracket\",b:\"[\\\\$%]\\\\{\",e:\"\\\\}\",c:[\"self\",r]},r,e.QSM]}}],i:/\\S/}});hljs.registerLanguage(\"http\",function(t){return{aliases:[\"https\"],i:\"\\\\S\",c:[{cN:\"status\",b:\"^HTTP/[0-9\\\\.]+\",e:\"$\",c:[{cN:\"number\",b:\"\\\\b\\\\d{3}\\\\b\"}]},{cN:\"request\",b:\"^[A-Z]+ (.*?) HTTP/[0-9\\\\.]+$\",rB:!0,e:\"$\",c:[{cN:\"string\",b:\" \",e:\" \",eB:!0,eE:!0}]},{cN:\"attribute\",b:\"^\\\\w\",e:\": \",eE:!0,i:\"\\\\n|\\\\s|=\",starts:{cN:\"string\",e:\"$\"}},{b:\"\\\\n\\\\n\",starts:{sL:[],eW:!0}}]}});hljs.registerLanguage(\"objectivec\",function(e){var t={cN:\"built_in\",b:\"(AV|CA|CF|CG|CI|MK|MP|NS|UI)\\\\w+\"},i={keyword:\"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required\",literal:\"false true FALSE TRUE nil YES NO NULL\",built_in:\"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once\"},o=/[a-zA-Z@][a-zA-Z0-9_]*/,n=\"@interface @class @protocol @implementation\";return{aliases:[\"mm\",\"objc\",\"obj-c\"],k:i,l:o,i:\"</\",c:[t,e.CLCM,e.CBCM,e.CNM,e.QSM,{cN:\"string\",v:[{b:'@\"',e:'\"',i:\"\\\\n\",c:[e.BE]},{b:\"'\",e:\"[^\\\\\\\\]'\",i:\"[^\\\\\\\\][^']\"}]},{cN:\"preprocessor\",b:\"#\",e:\"$\",c:[{cN:\"title\",v:[{b:'\"',e:'\"'},{b:\"<\",e:\">\"}]}]},{cN:\"class\",b:\"(\"+n.split(\" \").join(\"|\")+\")\\\\b\",e:\"({|$)\",eE:!0,k:n,l:o,c:[e.UTM]},{cN:\"variable\",b:\"\\\\.\"+e.UIR,r:0}]}});hljs.registerLanguage(\"python\",function(e){var r={cN:\"prompt\",b:/^(>>>|\\.\\.\\.) /},b={cN:\"string\",c:[e.BE],v:[{b:/(u|b)?r?'''/,e:/'''/,c:[r],r:10},{b:/(u|b)?r?\"\"\"/,e:/\"\"\"/,c:[r],r:10},{b:/(u|r|ur)'/,e:/'/,r:10},{b:/(u|r|ur)\"/,e:/\"/,r:10},{b:/(b|br)'/,e:/'/},{b:/(b|br)\"/,e:/\"/},e.ASM,e.QSM]},a={cN:\"number\",r:0,v:[{b:e.BNR+\"[lLjJ]?\"},{b:\"\\\\b(0o[0-7]+)[lLjJ]?\"},{b:e.CNR+\"[lLjJ]?\"}]},l={cN:\"params\",b:/\\(/,e:/\\)/,c:[\"self\",r,a,b]};return{aliases:[\"py\",\"gyp\"],k:{keyword:\"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10 None True False\",built_in:\"Ellipsis NotImplemented\"},i:/(<\\/|->|\\?)/,c:[r,a,b,e.HCM,{v:[{cN:\"function\",bK:\"def\",r:10},{cN:\"class\",bK:\"class\"}],e:/:/,i:/[${=;\\n,]/,c:[e.UTM,l]},{cN:\"decorator\",b:/^[\\t ]*@/,e:/$/},{b:/\\b(print|exec)\\(/}]}});hljs.registerLanguage(\"java\",function(e){var a=e.UIR+\"(<\"+e.UIR+\">)?\",t=\"false synchronized int abstract float private char boolean static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private\",c=\"\\\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+)(\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))?|\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))([eE][-+]?\\\\d+)?)[lLfF]?\",r={cN:\"number\",b:c,r:0};return{aliases:[\"jsp\"],k:t,i:/<\\/|#/,c:[e.C(\"/\\\\*\\\\*\",\"\\\\*/\",{r:0,c:[{cN:\"doctag\",b:\"@[A-Za-z]+\"}]}),e.CLCM,e.CBCM,e.ASM,e.QSM,{cN:\"class\",bK:\"class interface\",e:/[{;=]/,eE:!0,k:\"class interface\",i:/[:\"\\[\\]]/,c:[{bK:\"extends implements\"},e.UTM]},{bK:\"new throw return else\",r:0},{cN:\"function\",b:\"(\"+a+\"\\\\s+)+\"+e.UIR+\"\\\\s*\\\\(\",rB:!0,e:/[{;=]/,eE:!0,k:t,c:[{b:e.UIR+\"\\\\s*\\\\(\",rB:!0,r:0,c:[e.UTM]},{cN:\"params\",b:/\\(/,e:/\\)/,k:t,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]},r,{cN:\"annotation\",b:\"@[A-Za-z]+\"}]}});hljs.registerLanguage(\"bash\",function(e){var t={cN:\"variable\",v:[{b:/\\$[\\w\\d#@][\\w\\d_]*/},{b:/\\$\\{(.*?)}/}]},s={cN:\"string\",b:/\"/,e:/\"/,c:[e.BE,t,{cN:\"variable\",b:/\\$\\(/,e:/\\)/,c:[e.BE]}]},a={cN:\"string\",b:/'/,e:/'/};return{aliases:[\"sh\",\"zsh\"],l:/-?[a-z\\.]+/,k:{keyword:\"if then else elif fi for while in do done case esac function\",literal:\"true false\",built_in:\"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp\",operator:\"-ne -eq -lt -gt -f -d -e -s -l -a\"},c:[{cN:\"shebang\",b:/^#![^\\n]+sh\\s*$/,r:10},{cN:\"function\",b:/\\w[\\w\\d_]*\\s*\\(\\s*\\)\\s*\\{/,rB:!0,c:[e.inherit(e.TM,{b:/\\w[\\w\\d_]*/})],r:0},e.HCM,e.NM,s,a,t]}});hljs.registerLanguage(\"sql\",function(e){var t=e.C(\"--\",\"$\");return{cI:!0,i:/[<>{}*]/,c:[{cN:\"operator\",bK:\"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke\",e:/;/,eW:!0,k:{keyword:\"abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias allocate allow alter always analyze ancillary and any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound buffer_cache buffer_pool build bulk by byte byteordermark bytes c cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle d data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration e each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain export export_set extended extent external external_1 external_2 externally extract f failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function g general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour http i id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists k keep keep_duplicates key keys kill l language large last last_day last_insert_id last_value lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim m main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex n name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding p package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second section securefile security seed segment select self sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime t table tables tablespace tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek\",literal:\"true false null\",built_in:\"array bigint binary bit blob boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text varchar varying void\"},c:[{cN:\"string\",b:\"'\",e:\"'\",c:[e.BE,{b:\"''\"}]},{cN:\"string\",b:'\"',e:'\"',c:[e.BE,{b:'\"\"'}]},{cN:\"string\",b:\"`\",e:\"`\",c:[e.BE]},e.CNM,e.CBCM,t]},e.CBCM,t]}});hljs.registerLanguage(\"nginx\",function(e){var r={cN:\"variable\",v:[{b:/\\$\\d+/},{b:/\\$\\{/,e:/}/},{b:\"[\\\\$\\\\@]\"+e.UIR}]},b={eW:!0,l:\"[a-z/_]+\",k:{built_in:\"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll\"},r:0,i:\"=>\",c:[e.HCM,{cN:\"string\",c:[e.BE,r],v:[{b:/\"/,e:/\"/},{b:/'/,e:/'/}]},{cN:\"url\",b:\"([a-z]+):/\",e:\"\\\\s\",eW:!0,eE:!0,c:[r]},{cN:\"regexp\",c:[e.BE,r],v:[{b:\"\\\\s\\\\^\",e:\"\\\\s|{|;\",rE:!0},{b:\"~\\\\*?\\\\s+\",e:\"\\\\s|{|;\",rE:!0},{b:\"\\\\*(\\\\.[a-z\\\\-]+)+\"},{b:\"([a-z\\\\-]+\\\\.)+\\\\*\"}]},{cN:\"number\",b:\"\\\\b\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}(:\\\\d{1,5})?\\\\b\"},{cN:\"number\",b:\"\\\\b\\\\d+[kKmMgGdshdwy]*\\\\b\",r:0},r]};return{aliases:[\"nginxconf\"],c:[e.HCM,{b:e.UIR+\"\\\\s\",e:\";|{\",rB:!0,c:[{cN:\"title\",b:e.UIR,starts:b}],r:0}],i:\"[^\\\\s\\\\}]\"}});hljs.registerLanguage(\"cpp\",function(t){var e={cN:\"keyword\",b:\"\\\\b[a-z\\\\d_]*_t\\\\b\"},r={cN:\"string\",v:[t.inherit(t.QSM,{b:'((u8?|U)|L)?\"'}),{b:'(u8?|U)?R\"',e:'\"',c:[t.BE]},{b:\"'\\\\\\\\?.\",e:\"'\",i:\".\"}]},s={cN:\"number\",v:[{b:\"\\\\b(\\\\d+(\\\\.\\\\d*)?|\\\\.\\\\d+)(u|U|l|L|ul|UL|f|F)\"},{b:t.CNR}]},i={cN:\"preprocessor\",b:\"#\",e:\"$\",k:\"if else elif endif define undef warning error line pragma ifdef ifndef\",c:[{b:/\\\\\\n/,r:0},{bK:\"include\",e:\"$\",c:[r,{cN:\"string\",b:\"<\",e:\">\",i:\"\\\\n\"}]},r,s,t.CLCM,t.CBCM]},a=t.IR+\"\\\\s*\\\\(\",c={keyword:\"int float while private char catch export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const struct for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using class asm case typeid short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignof constexpr decltype noexcept static_assert thread_local restrict _Bool complex _Complex _Imaginary atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong\",built_in:\"std string cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf\",literal:\"true false nullptr NULL\"};return{aliases:[\"c\",\"cc\",\"h\",\"c++\",\"h++\",\"hpp\"],k:c,i:\"</\",c:[e,t.CLCM,t.CBCM,s,r,i,{b:\"\\\\b(deque|list|queue|stack|vector|map|set|bitset|multiset|multimap|unordered_map|unordered_set|unordered_multiset|unordered_multimap|array)\\\\s*<\",e:\">\",k:c,c:[\"self\",e]},{b:t.IR+\"::\",k:c},{bK:\"new throw return else\",r:0},{cN:\"function\",b:\"(\"+t.IR+\"[\\\\*&\\\\s]+)+\"+a,rB:!0,e:/[{;=]/,eE:!0,k:c,i:/[^\\w\\s\\*&]/,c:[{b:a,rB:!0,c:[t.TM],r:0},{cN:\"params\",b:/\\(/,e:/\\)/,k:c,r:0,c:[t.CLCM,t.CBCM,r,s]},t.CLCM,t.CBCM,i]}]}});hljs.registerLanguage(\"php\",function(e){var c={cN:\"variable\",b:\"\\\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*\"},a={cN:\"preprocessor\",b:/<\\?(php)?|\\?>/},i={cN:\"string\",c:[e.BE,a],v:[{b:'b\"',e:'\"'},{b:\"b'\",e:\"'\"},e.inherit(e.ASM,{i:null}),e.inherit(e.QSM,{i:null})]},t={v:[e.BNM,e.CNM]};return{aliases:[\"php3\",\"php4\",\"php5\",\"php6\"],cI:!0,k:\"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally\",c:[e.CLCM,e.HCM,e.C(\"/\\\\*\",\"\\\\*/\",{c:[{cN:\"doctag\",b:\"@[A-Za-z]+\"},a]}),e.C(\"__halt_compiler.+?;\",!1,{eW:!0,k:\"__halt_compiler\",l:e.UIR}),{cN:\"string\",b:/<<<['\"]?\\w+['\"]?$/,e:/^\\w+;?$/,c:[e.BE,{cN:\"subst\",v:[{b:/\\$\\w+/},{b:/\\{\\$/,e:/\\}/}]}]},a,c,{b:/(::|->)+[a-zA-Z_\\x7f-\\xff][a-zA-Z0-9_\\x7f-\\xff]*/},{cN:\"function\",bK:\"function\",e:/[;{]/,eE:!0,i:\"\\\\$|\\\\[|%\",c:[e.UTM,{cN:\"params\",b:\"\\\\(\",e:\"\\\\)\",c:[\"self\",c,e.CBCM,i,t]}]},{cN:\"class\",bK:\"class interface\",e:\"{\",eE:!0,i:/[:\\(\\$\"]/,c:[{bK:\"extends implements\"},e.UTM]},{bK:\"namespace\",e:\";\",i:/[\\.']/,c:[e.UTM]},{bK:\"use\",e:\";\",c:[e.UTM]},{b:\"=>\"},i,t]}});hljs.registerLanguage(\"coffeescript\",function(e){var c={keyword:\"in if for while finally new do return else break catch instanceof throw try this switch continue typeof delete debugger super then unless until loop of by when and or is isnt not\",literal:\"true false null undefined yes no on off\",built_in:\"npm require console print module global window document\"},n=\"[A-Za-z$_][0-9A-Za-z$_]*\",r={cN:\"subst\",b:/#\\{/,e:/}/,k:c},t=[e.BNM,e.inherit(e.CNM,{starts:{e:\"(\\\\s*/)?\",r:0}}),{cN:\"string\",v:[{b:/'''/,e:/'''/,c:[e.BE]},{b:/'/,e:/'/,c:[e.BE]},{b:/\"\"\"/,e:/\"\"\"/,c:[e.BE,r]},{b:/\"/,e:/\"/,c:[e.BE,r]}]},{cN:\"regexp\",v:[{b:\"///\",e:\"///\",c:[r,e.HCM]},{b:\"//[gim]*\",r:0},{b:/\\/(?![ *])(\\\\\\/|.)*?\\/[gim]*(?=\\W|$)/}]},{cN:\"property\",b:\"@\"+n},{b:\"`\",e:\"`\",eB:!0,eE:!0,sL:\"javascript\"}];r.c=t;var s=e.inherit(e.TM,{b:n}),i=\"(\\\\(.*\\\\))?\\\\s*\\\\B[-=]>\",o={cN:\"params\",b:\"\\\\([^\\\\(]\",rB:!0,c:[{b:/\\(/,e:/\\)/,k:c,c:[\"self\"].concat(t)}]};return{aliases:[\"coffee\",\"cson\",\"iced\"],k:c,i:/\\/\\*/,c:t.concat([e.C(\"###\",\"###\"),e.HCM,{cN:\"function\",b:\"^\\\\s*\"+n+\"\\\\s*=\\\\s*\"+i,e:\"[-=]>\",rB:!0,c:[s,o]},{b:/[:\\(,=]\\s*/,r:0,c:[{cN:\"function\",b:i,e:\"[-=]>\",rB:!0,c:[o]}]},{cN:\"class\",bK:\"class\",e:\"$\",i:/[:=\"\\[\\]]/,c:[{bK:\"extends\",eW:!0,i:/[:=\"\\[\\]]/,c:[s]},s]},{cN:\"attribute\",b:n+\":\",e:\":\",rB:!0,rE:!0,r:0}])}});hljs.registerLanguage(\"javascript\",function(e){return{aliases:[\"js\"],k:{keyword:\"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await\",literal:\"true false null undefined NaN Infinity\",built_in:\"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise\"},c:[{cN:\"pi\",r:10,b:/^\\s*['\"]use (strict|asm)['\"]/},e.ASM,e.QSM,{cN:\"string\",b:\"`\",e:\"`\",c:[e.BE,{cN:\"subst\",b:\"\\\\$\\\\{\",e:\"\\\\}\"}]},e.CLCM,e.CBCM,{cN:\"number\",v:[{b:\"\\\\b(0[bB][01]+)\"},{b:\"\\\\b(0[oO][0-7]+)\"},{b:e.CNR}],r:0},{b:\"(\"+e.RSR+\"|\\\\b(case|return|throw)\\\\b)\\\\s*\",k:\"return throw case\",c:[e.CLCM,e.CBCM,e.RM,{b:/</,e:/>\\s*[);\\]]/,r:0,sL:\"xml\"}],r:0},{cN:\"function\",bK:\"function\",e:/\\{/,eE:!0,c:[e.inherit(e.TM,{b:/[A-Za-z$_][0-9A-Za-z$_]*/}),{cN:\"params\",b:/\\(/,e:/\\)/,eB:!0,eE:!0,c:[e.CLCM,e.CBCM]}],i:/\\[|%/},{b:/\\$[(.]/},{b:\"\\\\.\"+e.IR,r:0},{bK:\"import\",e:\"[;$]\",k:\"import from as\",c:[e.ASM,e.QSM]},{cN:\"class\",bK:\"class\",e:/[{;=]/,eE:!0,i:/[:\"\\[\\]]/,c:[{bK:\"extends\"},e.UTM]}],i:/#/}});hljs.registerLanguage(\"ini\",function(e){var c={cN:\"string\",c:[e.BE],v:[{b:\"'''\",e:\"'''\",r:10},{b:'\"\"\"',e:'\"\"\"',r:10},{b:'\"',e:'\"'},{b:\"'\",e:\"'\"}]};return{aliases:[\"toml\"],cI:!0,i:/\\S/,c:[e.C(\";\",\"$\"),e.HCM,{cN:\"title\",b:/^\\s*\\[+/,e:/\\]+/},{cN:\"setting\",b:/^[a-z0-9\\[\\]_-]+\\s*=\\s*/,e:\"$\",c:[{cN:\"value\",eW:!0,k:\"on off true false yes no\",c:[{cN:\"variable\",v:[{b:/\\$[\\w\\d\"][\\w\\d_]*/},{b:/\\$\\{(.*?)}/}]},c,{cN:\"number\",b:/([\\+\\-]+)?[\\d]+_[\\d_]+/},e.NM],r:0}]}]}});hljs.registerLanguage(\"diff\",function(e){return{aliases:[\"patch\"],c:[{cN:\"chunk\",r:10,v:[{b:/^@@ +\\-\\d+,\\d+ +\\+\\d+,\\d+ +@@$/},{b:/^\\*\\*\\* +\\d+,\\d+ +\\*\\*\\*\\*$/},{b:/^\\-\\-\\- +\\d+,\\d+ +\\-\\-\\-\\-$/}]},{cN:\"header\",v:[{b:/Index: /,e:/$/},{b:/=====/,e:/=====$/},{b:/^\\-\\-\\-/,e:/$/},{b:/^\\*{3} /,e:/$/},{b:/^\\+\\+\\+/,e:/$/},{b:/\\*{5}/,e:/\\*{5}$/}]},{cN:\"addition\",b:\"^\\\\+\",e:\"$\"},{cN:\"deletion\",b:\"^\\\\-\",e:\"$\"},{cN:\"change\",b:\"^\\\\!\",e:\"$\"}]}});\nexports.hljs = hljs;\n"
},
"$:/plugins/tiddlywiki/highlight/highlight.css": {
"type": "text/css",
"title": "$:/plugins/tiddlywiki/highlight/highlight.css",
"tags": "[[$:/tags/Stylesheet]]",
"text": "/*\n\nOriginal style from softwaremaniacs.org (c) Ivan Sagalaev <Maniac@SoftwareManiacs.Org>\n\n*/\n\n.hljs {\n display: block;\n overflow-x: auto;\n padding: 0.5em;\n background: #f0f0f0;\n -webkit-text-size-adjust: none;\n}\n\n.hljs,\n.hljs-subst,\n.hljs-tag .hljs-title,\n.nginx .hljs-title {\n color: black;\n}\n\n.hljs-string,\n.hljs-title,\n.hljs-constant,\n.hljs-parent,\n.hljs-tag .hljs-value,\n.hljs-rule .hljs-value,\n.hljs-preprocessor,\n.hljs-pragma,\n.hljs-name,\n.haml .hljs-symbol,\n.ruby .hljs-symbol,\n.ruby .hljs-symbol .hljs-string,\n.hljs-template_tag,\n.django .hljs-variable,\n.smalltalk .hljs-class,\n.hljs-addition,\n.hljs-flow,\n.hljs-stream,\n.bash .hljs-variable,\n.pf .hljs-variable,\n.apache .hljs-tag,\n.apache .hljs-cbracket,\n.tex .hljs-command,\n.tex .hljs-special,\n.erlang_repl .hljs-function_or_atom,\n.asciidoc .hljs-header,\n.markdown .hljs-header,\n.coffeescript .hljs-attribute,\n.tp .hljs-variable {\n color: #800;\n}\n\n.smartquote,\n.hljs-comment,\n.hljs-annotation,\n.diff .hljs-header,\n.hljs-chunk,\n.asciidoc .hljs-blockquote,\n.markdown .hljs-blockquote {\n color: #888;\n}\n\n.hljs-number,\n.hljs-date,\n.hljs-regexp,\n.hljs-literal,\n.hljs-hexcolor,\n.smalltalk .hljs-symbol,\n.smalltalk .hljs-char,\n.go .hljs-constant,\n.hljs-change,\n.lasso .hljs-variable,\n.makefile .hljs-variable,\n.asciidoc .hljs-bullet,\n.markdown .hljs-bullet,\n.asciidoc .hljs-link_url,\n.markdown .hljs-link_url {\n color: #080;\n}\n\n.hljs-label,\n.ruby .hljs-string,\n.hljs-decorator,\n.hljs-filter .hljs-argument,\n.hljs-localvars,\n.hljs-array,\n.hljs-attr_selector,\n.hljs-important,\n.hljs-pseudo,\n.hljs-pi,\n.haml .hljs-bullet,\n.hljs-doctype,\n.hljs-deletion,\n.hljs-envvar,\n.hljs-shebang,\n.apache .hljs-sqbracket,\n.nginx .hljs-built_in,\n.tex .hljs-formula,\n.erlang_repl .hljs-reserved,\n.hljs-prompt,\n.asciidoc .hljs-link_label,\n.markdown .hljs-link_label,\n.vhdl .hljs-attribute,\n.clojure .hljs-attribute,\n.asciidoc .hljs-attribute,\n.lasso .hljs-attribute,\n.coffeescript .hljs-property,\n.hljs-phony {\n color: #88f;\n}\n\n.hljs-keyword,\n.hljs-id,\n.hljs-title,\n.hljs-built_in,\n.css .hljs-tag,\n.hljs-doctag,\n.smalltalk .hljs-class,\n.hljs-winutils,\n.bash .hljs-variable,\n.pf .hljs-variable,\n.apache .hljs-tag,\n.hljs-type,\n.hljs-typename,\n.tex .hljs-command,\n.asciidoc .hljs-strong,\n.markdown .hljs-strong,\n.hljs-request,\n.hljs-status,\n.tp .hljs-data,\n.tp .hljs-io {\n font-weight: bold;\n}\n\n.asciidoc .hljs-emphasis,\n.markdown .hljs-emphasis,\n.tp .hljs-units {\n font-style: italic;\n}\n\n.nginx .hljs-built_in {\n font-weight: normal;\n}\n\n.coffeescript .javascript,\n.javascript .xml,\n.lasso .markup,\n.tex .hljs-formula,\n.xml .javascript,\n.xml .vbscript,\n.xml .css,\n.xml .hljs-cdata {\n opacity: 0.5;\n}\n"
},
"$:/plugins/tiddlywiki/highlight/highlightblock.js": {
"text": "/*\\\ntitle: $:/plugins/tiddlywiki/highlight/highlightblock.js\ntype: application/javascript\nmodule-type: widget\n\nWraps up the fenced code blocks parser for highlight and use in TiddlyWiki5\n\n\\*/\n(function() {\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar CodeBlockWidget = require(\"$:/core/modules/widgets/codeblock.js\").codeblock;\n\nvar hljs = require(\"$:/plugins/tiddlywiki/highlight/highlight.js\");\n\nhljs.configure({tabReplace: \" \"});\t\n\nCodeBlockWidget.prototype.postRender = function() {\n\tvar domNode = this.domNodes[0];\n\tif($tw.browser && this.document !== $tw.fakeDocument && this.language) {\n\t\tdomNode.className = this.language.toLowerCase();\n\t\thljs.highlightBlock(domNode);\n\t} else if(!$tw.browser && this.language && this.language.indexOf(\"/\") === -1 ){\n\t\ttry {\n\t\t\tdomNode.className = this.language.toLowerCase() + \" hljs\";\n\t\t\tdomNode.children[0].innerHTML = hljs.fixMarkup(hljs.highlight(this.language, this.getAttribute(\"code\")).value);\n\t\t}\n\t\tcatch(err) {\n\t\t\t// Can't easily tell if a language is registered or not in the packed version of hightlight.js,\n\t\t\t// so we silently fail and the codeblock remains unchanged\n\t\t}\n\t}\t\n};\n\n})();\n",
"title": "$:/plugins/tiddlywiki/highlight/highlightblock.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/plugins/tiddlywiki/highlight/license": {
"title": "$:/plugins/tiddlywiki/highlight/license",
"type": "text/plain",
"text": "Copyright (c) 2006, Ivan Sagalaev\nAll rights reserved.\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n * Neither the name of highlight.js nor the names of its contributors\n may be used to endorse or promote products derived from this software\n without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY\nEXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\nWARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY\nDIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\nLOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\nON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
"$:/plugins/tiddlywiki/highlight/readme": {
"title": "$:/plugins/tiddlywiki/highlight/readme",
"text": "This plugin provides syntax highlighting of code blocks using v8.8.0 of [[highlight.js|https://github.com/isagalaev/highlight.js]] from Ivan Sagalaev.\n\n! Usage\n\nWhen the plugin is installed it automatically applies highlighting to all codeblocks defined with triple backticks or with the CodeBlockWidget.\n\nThe language can optionally be specified after the opening triple braces:\n\n<$codeblock code=\"\"\"```css\n * { margin: 0; padding: 0; } /* micro reset */\n\nhtml { font-size: 62.5%; }\nbody { font-size: 14px; font-size: 1.4rem; } /* =14px */\nh1 { font-size: 24px; font-size: 2.4rem; } /* =24px */\n```\"\"\"/>\n\nIf no language is specified highlight.js will attempt to automatically detect the language.\n\n! Built-in Language Brushes\n\nThe plugin includes support for the following languages (referred to as \"brushes\" by highlight.js):\n\n* apache\n* bash\n* coffeescript\n* cpp\n* cs\n* css\n* diff\n* http\n* ini\n* java\n* javascript\n* json\n* makefile\n* markdown\n* nginx\n* objectivec\n* perl\n* php\n* python\n* ruby\n* sql\n* xml\n\n"
},
"$:/plugins/tiddlywiki/highlight/styles": {
"title": "$:/plugins/tiddlywiki/highlight/styles",
"tags": "[[$:/tags/Stylesheet]]",
"text": ".hljs{display:block;overflow-x:auto;padding:.5em;color:#333;background:#f8f8f8;-webkit-text-size-adjust:none}.hljs-comment,.diff .hljs-header,.hljs-javadoc{color:#998;font-style:italic}.hljs-keyword,.css .rule .hljs-keyword,.hljs-winutils,.nginx .hljs-title,.hljs-subst,.hljs-request,.hljs-status{color:#333;font-weight:bold}.hljs-number,.hljs-hexcolor,.ruby .hljs-constant{color:teal}.hljs-string,.hljs-tag .hljs-value,.hljs-phpdoc,.hljs-dartdoc,.tex .hljs-formula{color:#d14}.hljs-title,.hljs-id,.scss .hljs-preprocessor{color:#900;font-weight:bold}.hljs-list .hljs-keyword,.hljs-subst{font-weight:normal}.hljs-class .hljs-title,.hljs-type,.vhdl .hljs-literal,.tex .hljs-command{color:#458;font-weight:bold}.hljs-tag,.hljs-tag .hljs-title,.hljs-rule .hljs-property,.django .hljs-tag .hljs-keyword{color:navy;font-weight:normal}.hljs-attribute,.hljs-variable,.lisp .hljs-body,.hljs-name{color:teal}.hljs-regexp{color:#009926}.hljs-symbol,.ruby .hljs-symbol .hljs-string,.lisp .hljs-keyword,.clojure .hljs-keyword,.scheme .hljs-keyword,.tex .hljs-special,.hljs-prompt{color:#990073}.hljs-built_in{color:#0086b3}.hljs-preprocessor,.hljs-pragma,.hljs-pi,.hljs-doctype,.hljs-shebang,.hljs-cdata{color:#999;font-weight:bold}.hljs-deletion{background:#fdd}.hljs-addition{background:#dfd}.diff .hljs-change{background:#0086b3}.hljs-chunk{color:#aaa}"
},
"$:/plugins/tiddlywiki/highlight/usage": {
"title": "$:/plugins/tiddlywiki/highlight/usage",
"text": "! Usage\n\nFenced code blocks can have a language specifier added to trigger highlighting in a specific language. Otherwise heuristics are used to detect the language.\n\n```\n ```js\n var a = b + c; // Highlighted as JavaScript\n ```\n```\n! Adding Themes\n\nYou can add themes from highlight.js by copying the CSS to a new tiddler and tagging it with [[$:/tags/Stylesheet]]. The available themes can be found on GitHub:\n\nhttps://github.com/isagalaev/highlight.js/tree/master/src/styles\n"
}
}
}
{
"tiddlers": {
"$:/plugins/wimmoermans/fhistory.js": {
"text": "/*\\\r\ntitle: $:/plugins/wimmoermans/fhistory.js\r\ntype: application/javascript\r\nmodule-type: filteroperator\r\n\r\na filter to generate ALL tiddler titles from historylist, \r\nrepairs escaped characters \" \\\r\n\r\nassumptions format of historylist \r\n \"title\":\\s\"(.*)\" where .* is the title of the visited tiddler\r\n\r\n@preserve\r\n\\*/\r\n\r\n (function() {\r\n\t\"use strict\";\r\n\texports.fullhistory = function(e, t, i) {\r\n\t\tvar o = [],\r\n\t\tmatch=\"\",\r\n\t\tregexp= \"\",\r\n\t\tttt=\"\",\r\n\t\ttext=\"\";\r\n\t\tregexp = new RegExp(\"\\\"title\\\": \\\"(.+)\\\"\", \"ig\");\r\n\t\ttext = $tw.wiki.getTiddlerText(\"$:/HistoryList\");\r\n\t\twhile (match = regexp.exec(text)) {\r\n\t\t\tttt=match[1].replace(/\\\\\\\"/g,\"\\\"\");\r\n\t\t\tttt=ttt.replace(/\\\\\\\\/g,\"\\\\\");\r\n\t\t\to.push(ttt); /* oldest first */\r\n\t}; /*while*/\r\n\treturn o;\r\n}; /* export */\r\n\r\n} )();\r\n",
"title": "$:/plugins/wimmoermans/fhistory.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/plugins/wimmoermans/history.js": {
"text": "/*\\\r\ntitle: $:/plugins/wimmoermans/history.js\r\ntype: application/javascript\r\nmodule-type: filteroperator\r\n\r\na filter to generate tiddler titles from historylist, reverse order, no duplicates (only most recent), no drafts.\r\n\r\nassumptions\r\n \"title\":\\s\"(.*)\" where .* is the title\r\n \"Draft of '\" is the prefix for tiddler in edit mode\r\n\r\n@preserve\r\n\\*/\r\n\r\n(function() {\r\n\t\"use strict\";\r\n\texports.history = function(e, t, i) {\r\n\t\tvar results = new Array(),\r\n\t\to = [],\r\n\t\tmatch=\"\",\r\n\t\tregexp= \"\",\r\n\t\ttext=\"\",\r\n\t\tttt=\"\",\r\n\t\ti=0,\r\n\t\tj=0,\r\n\t\tentries=0,\r\n\t\tfound=0;\r\n\t\tregexp = new RegExp(\"\\\"title\\\": \\\"(.+)\\\"\", \"ig\");\r\n\t\ttext = $tw.wiki.getTiddlerText(\"$:/HistoryList\");\r\n\t\twhile (match = regexp.exec(text)) {\r\n\t\t\tttt=match[1].replace(/\\\\\\\"/g,\"\\\"\");\r\n\t\t\tttt=ttt.replace(/\\\\\\\\/g,\"\\\\\");\r\n\t\t\tif (ttt.substr(0, 10) !== \"Draft of '\") {\r\n\t\t\t\tresults.push(ttt); /* oldest first */\r\n\t\t\t\tentries = entries + 1;\r\n\t\t\t}\r\n\t}\r\n\ti = entries-1;\r\n\twhile (i >= 0) {\r\n\t\tj = i + 1;\r\n\t\tfound = 0;\r\n\t\twhile ((j <= entries) && (found === 0)) {\r\n\t\t\tif (results[i] === results[j]) {\r\n\t\t\t\tfound = 1;\r\n\t\t\t}\r\n\t\t\tj = j + 1;\r\n\t\t}\r\n\t\tif (found === 0) {\r\n\r\n\t\t\tif( results[i] !== \"\"){\r\n\t\t\t\to.push(results[i]);\r\n\t\t\t}\r\n\t\t}\r\n\t\ti = i - 1;\r\n\t};\r\n\treturn o;\r\n}\r\n\r\n} )();\r\n",
"title": "$:/plugins/wimmoermans/history.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/plugins/wimmoermans/history/HistoryTab": {
"caption": "History",
"created": "20160504135142490",
"creator": "Wim Moermans",
"modified": "20170615000000000",
"modifier": "Causal Productions",
"tags": "$:/tags/SideBar historyplugin",
"title": "$:/plugins/wimmoermans/history/HistoryTab",
"text": "<small>Recently viewed:</small>\n\n{{{ [history[]] }}}\n\n\n"
},
"$:/plugins/wimmoermans/history/HistoryTab2": {
"caption": "History2",
"created": "20160505094007336",
"creator": "Wim Moermans",
"modified": "20160507171948465",
"modifier": "wjam",
"tags": "$:/tags/AdvancedSearch historyplugin",
"title": "$:/plugins/wimmoermans/history/HistoryTab2",
"text": "<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Shadows/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n{{{ [history[]search{$:/temp/advancedsearch}limit[26]] }}}\n"
},
"$:/plugins/wimmoermans/history/icon": {
"created": "20160508110003253",
"modified": "20160508110047926",
"title": "$:/plugins/wimmoermans/history/icon",
"type": "image/svg+xml",
"text": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<!-- Created with Inkscape (http://www.inkscape.org/) -->\n\n<svg\n xmlns:dc=\"http://purl.org/dc/elements/1.1/\"\n xmlns:cc=\"http://creativecommons.org/ns#\"\n xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n xmlns:svg=\"http://www.w3.org/2000/svg\"\n xmlns=\"http://www.w3.org/2000/svg\"\n xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n xmlns:sodipodi=\"http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd\"\n xmlns:inkscape=\"http://www.inkscape.org/namespaces/inkscape\"\n width=\"22\"\n height=\"21\"\n id=\"svg4046\"\n version=\"1.1\"\n inkscape:version=\"0.47 r22583\"\n sodipodi:docname=\"cat backtrack red sign 22x21.svg\">\n <defs\n id=\"defs4048\">\n <linearGradient\n inkscape:collect=\"always\"\n id=\"linearGradient3600\">\n <stop\n style=\"stop-color:#ff0024;stop-opacity:1;\"\n offset=\"0\"\n id=\"stop3602\" />\n <stop\n style=\"stop-color:#ff0024;stop-opacity:0;\"\n offset=\"1\"\n id=\"stop3604\" />\n </linearGradient>\n <inkscape:perspective\n sodipodi:type=\"inkscape:persp3d\"\n inkscape:vp_x=\"0 : 526.18109 : 1\"\n inkscape:vp_y=\"0 : 1000 : 0\"\n inkscape:vp_z=\"744.09448 : 526.18109 : 1\"\n inkscape:persp3d-origin=\"372.04724 : 350.78739 : 1\"\n id=\"perspective4054\" />\n <inkscape:perspective\n id=\"perspective4064\"\n inkscape:persp3d-origin=\"0.5 : 0.33333333 : 1\"\n inkscape:vp_z=\"1 : 0.5 : 1\"\n inkscape:vp_y=\"0 : 1000 : 0\"\n inkscape:vp_x=\"0 : 0.5 : 1\"\n sodipodi:type=\"inkscape:persp3d\" />\n <linearGradient\n inkscape:collect=\"always\"\n xlink:href=\"#linearGradient3600\"\n id=\"linearGradient3606\"\n x1=\"-17.230652\"\n y1=\"4.6165885\"\n x2=\"-3.4143419\"\n y2=\"4.6165885\"\n gradientUnits=\"userSpaceOnUse\" />\n </defs>\n <sodipodi:namedview\n id=\"base\"\n pagecolor=\"#ffffff\"\n bordercolor=\"#666666\"\n borderopacity=\"1.0\"\n inkscape:pageopacity=\"0.0\"\n inkscape:pageshadow=\"2\"\n inkscape:zoom=\"31.678384\"\n inkscape:cx=\"9.633971\"\n inkscape:cy=\"9.3724875\"\n inkscape:document-units=\"px\"\n inkscape:current-layer=\"layer1\"\n showgrid=\"false\"\n inkscape:window-width=\"1690\"\n inkscape:window-height=\"1181\"\n inkscape:window-x=\"-5\"\n inkscape:window-y=\"-5\"\n inkscape:window-maximized=\"1\" />\n <metadata\n id=\"metadata4051\">\n <rdf:RDF>\n <cc:Work\n rdf:about=\"\">\n <dc:format>image/svg+xml</dc:format>\n <dc:type\n rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\" />\n <dc:title></dc:title>\n </cc:Work>\n </rdf:RDF>\n </metadata>\n <g\n inkscape:label=\"Layer 1\"\n inkscape:groupmode=\"layer\"\n id=\"layer1\"\n transform=\"translate(-12.564828,-228.71506)\">\n <path\n sodipodi:type=\"arc\"\n style=\"fill:#fcfcfc;fill-opacity:1;fill-rule:nonzero\"\n id=\"path6042\"\n sodipodi:cx=\"-1.4836615\"\n sodipodi:cy=\"-1.6968651\"\n sodipodi:rx=\"6.976366\"\n sodipodi:ry=\"6.8500967\"\n d=\"m 5.4927045,-1.6968651 a 6.976366,6.8500967 0 1 1 -13.9527321,0 6.976366,6.8500967 0 1 1 13.9527321,0 z\"\n transform=\"matrix(1.2556561,0,0,1.2788018,25.334287,241.26263)\" />\n <path\n id=\"path6044\"\n d=\"m 30.4446,244.31397 c 0.310834,-0.28767 0.606444,-0.65004 0.656841,-0.80533 0.226308,-0.69733 -1.75153,-1.35182 -2.563323,-0.84824 -0.640438,0.39727 -1.154161,1.973 -0.807158,2.47583 0.257232,0.37275 0.420332,0.39322 1.137559,0.14288 0.460496,-0.16076 0.876334,-0.32601 0.924074,-0.36721 0.04729,-0.042 0.341159,-0.31027 0.65198,-0.59797 l 2.7e-5,4e-5 z m 0.597108,-2.74293 c 0.09612,-0.164 0.0099,-0.46244 -0.199577,-0.69068 -0.46117,-0.50252 -1.166755,-0.22586 -1.371622,0.53779 -0.138492,0.51626 -0.124003,0.53781 0.418457,0.62237 0.608375,0.0949 0.889192,-0.0195 1.152742,-0.46948 z m -3.686825,2.07878 c 0.168572,-0.62841 -0.06485,-0.93373 -0.745912,-0.97577 -0.770729,-0.0477 -1.241044,0.64384 -0.836604,1.22992 0.512291,0.74232 1.35136,0.60756 1.582532,-0.25415 l -1.6e-5,0 z m 1.462533,-2.12446 0.185272,-0.64054 -0.625699,-0.0677 c -0.706134,-0.0764 -0.924717,0.0207 -1.305369,0.57977 -0.335314,0.49243 -0.04392,0.93382 0.644496,0.97629 0.707662,0.0437 0.882331,-0.0906 1.101289,-0.84784 l 1.1e-5,-4e-5 z m -7.797022,1.15185 c 0.76937,-0.85185 0.741916,-1.28981 -0.106461,-1.69843 -0.998166,-0.48078 -1.914981,-0.37475 -2.454339,0.28389 -0.516439,0.63069 -0.583894,1.63687 -0.151704,2.26314 0.51397,0.74476 1.572512,0.41361 2.712495,-0.8486 z m -3.48099,-0.42697 c 0.0896,-0.69621 -0.04686,-0.87565 -0.696238,-0.91572 -1.139297,-0.0703 -1.566432,0.84984 -0.702808,1.51406 0.586816,0.4513 1.303444,0.14483 1.399073,-0.59834 l -2.7e-5,0 z m 3.354628,-2.52461 c 0.149115,-0.45951 -0.275478,-0.99883 -0.833864,-1.05921 -0.603977,-0.0653 -0.7421,0.0289 -0.89905,0.61367 -0.166828,0.62185 0.06374,0.9337 0.720441,0.97418 0.628634,0.0389 0.868921,-0.0867 1.012367,-0.52882 l 1.06e-4,1.8e-4 z m -2.408088,0.34458 c 0.112063,-0.75445 -0.0033,-0.89128 -0.721233,-0.85538 -0.828289,0.0413 -1.07332,0.23945 -1.137099,0.9192 -0.05268,0.56122 -0.02343,0.59189 0.653277,0.68515 0.878304,0.12109 1.095906,-0.0141 1.204881,-0.74921 l 1.74e-4,2.4e-4 z m 5.888163,-5.33851 c 0.142599,-0.43933 -0.245444,-0.96317 -1.034761,-1.39674 -0.659415,-0.36226 -1.526134,-0.27635 -1.956444,0.1939 -0.468183,0.51161 -0.852424,1.97658 -0.610417,2.32725 0.48829,0.70756 3.291025,-0.16736 3.601586,-1.12433 l 3.6e-5,-8e-5 z m 0.05327,-2.11052 c 0.567019,-0.52796 -0.337185,-1.89786 -1.117088,-1.69249 -0.480085,0.12648 -0.794832,1.02942 -0.505121,1.44923 0.309844,0.44897 1.249847,0.58994 1.622222,0.24325 l -1.3e-5,1e-5 z m -3.840095,1.12289 c 0.05032,-0.53627 0.0115,-0.59251 -0.526932,-0.76354 -0.319703,-0.10149 -0.703975,-0.10859 -0.853942,-0.0154 -0.412123,0.25566 -0.580704,0.98457 -0.316321,1.36768 0.511143,0.74066 1.608153,0.36021 1.697198,-0.58862 l -3e-6,-7e-5 z m 1.399399,-1.72835 c 0.13752,-0.4755 0.08353,-0.73271 -0.201357,-0.9592 -0.777497,-0.6182 -2.043348,0.0734 -1.830727,1.00011 0.08032,0.34992 1.408324,0.87902 1.720388,0.68544 0.06804,-0.0423 0.208269,-0.3691 0.311631,-0.72643 l 6.5e-5,8e-5 z\"\n style=\"fill:#000000\"\n sodipodi:nodetypes=\"cssssscccsssccsscccccsscccsssccsscccssscccssscccssscccsscccssscccssscc\" />\n </g>\n</svg>\n"
},
"$:/plugins/wimmoermans/history/readme": {
"created": "20160505113313287",
"creator": "wjam",
"modified": "20160514063831746",
"modifier": "wjam",
"tags": "historyplugin sh",
"title": "$:/plugins/wimmoermans/history/readme",
"text": "!!history filter\nTom Tumb (Dutch: Klein Duimpje).used breadcrumbs because he didn't want to get lost in the woods. \n\nWhen reading or editing a large ~TiddlyWiki you sometimes get lost and revisit tiddlers over and over. This plugin ''automatically creates a list of all visited tiddlers'', and allows you to answer questions like \"Where did I read that?\", \"Did I update tiddler XXYY already?\", \"Which system tiddlers did I view/edit?\" \"Which tiddlers did I rename/delete?\". \n\n!!functionality \n\n*The ''plugin/filter'' generates the tiddlers which you visited since loading the ~TiddlyWiki. \n*Like ~OpenTab all* tiddlers from the story river are shown in the history. When you ''close a tiddler'' it is removed from the ~OpenTab but is ''still present in the the history''. \n*Tiddler titles which were opened using tiddlers like $:/DefaultTiddlers are not present in the history.\n*Like ~RecentTab, the history list includes the tiddlers you created or modified during this session. When you ''delete or rename'' a tiddler during your session the old tiddler title will be in ''//italics//''. \n\ncompare ''history[] and ''fullhistory[]\n\n| |''history[]''|fullhistory[]|\n| most recent visited tiddlers|''most recent visited appear first''|most recent appear last|\n| Draft titles|''drafts not included ''|all drafts ^^dangerous[1]^^|\n| visited multiple times|''no duplicates, only most recent title''|includes all duplicates|\n| usage|normal use|advanced use only|\n\n!!examples\n\nTo display all visited tiddlers so far use\n\n ``{{{[history[]]}}}`` \n\nYou can sort the list alphabetically, ''search'' the tiddlers and ''limit'' the number of results to 25. e.g.\n\n``{{{[history[]search{$:/temp/search}limit[25]]}}}``\n\nif you want to exclude system tiddlers from the list:\n\n``{{{[history[]!is[system]]}}}``\n\nIf you want modified but possibly not yet saved tiddlers (incl renamed, deleted but excluding Draft. \n\n``{{{[history[]haschanged[]]}}}``\n\n''fullhistory[]'' is only included for //advanced users//. To generate the same list as history[] you would have to write \n``{{{[fullhistory[]!prefix[Draft]reverse[]unique[]]}}}`` ^^[2]^^\n\n!!how to install \n\n''Drag'' the link $:/plugins/wimmoermans/history to your wiki, ''import'' the tiddler and ''save'' your wiki, then ''LOAD'' the newly saved wiki.\nOr ''open'' the history tiddler in this wiki and use the top right icon ''V'', ''export tiddler'', ''JSON file'' to save the tiddler to disk, then in your wiki in the sidebar use ''Tools'', ''import (paperclip)'' to import the JSON file you just saved, ''save'' your wiki, ''LOAD'' the saved wiki.\n\n# history filter <br>[[$:/plugins/wimmoermans/history/history.js]]\n\n#fullhistory filter <br>[[$:/plugins/wimmoermans/history/fhistory.js]]\n\n#History tab in the Sidebar.<br>[[$:/plugins/wimmoermans/history/HistoryTab]]<br><small>(to disable remove the ~$:/tags/SideBar tag)</small>\n# History2 tab for advanced seard tiddler <br>[[$:/plugins/wimmoermans/history/HistoryTab2]]<br><small>(to disable remove the ~$:/tags/AdvancedSearch tag)</small>\n#$:/plugins/wimmoermans/history/readme this tiddler\n# $:/plugins/wimmoermans/history/icon three cat paw prints (by Motovun ?)\n\n!!Google plus forum to discuss the history filters\nhttps://groups.google.com/forum/#!topic/tiddlywiki/u4lN-olqnPc\n\n\n!! ~TiddlyWiki version compatibility [3]\nhistory and fullhistory were tested on version 5.1.12 pre-release, 5.1.11, 5.1.9, 5.0.8-beta. For 5.0.8-beta the tab-example tiddlers require manually adding the field named 'caption' value 'History' and 'History2' to present the Tab captions.\n\n!!notes/warning\n[1] clicking on ''Draft'' titles in the history is ''dangerous'' especially when the tiddler is already open.\n\n[2] ''unique[]'' is a undocumented filter present in ~TiddlyWiki boot.js.\n\n[3] history scan the $:/HistoryList tiddler for \"title\"://single space//\"//tiddler title//\" and displays the //tiddler title// value. It correctly handles double quote and backslahs in tiddler titles.\n"
}
}
}
\rules except wikilink
Proceedings of INTERSPEECH 2017
\rules except wikilink
INTERSPEECH 2017
$:/core/ui/MoreSideBar/Missing
$:/plugins/wimmoermans/history/HistoryTab
{
"tiddlers": {
"$:/info/browser": {
"title": "$:/info/browser",
"text": "yes"
},
"$:/info/node": {
"title": "$:/info/node",
"text": "no"
}
}
}
{
"tiddlers": {
"$:/themes/tiddlywiki/snowwhite/base": {
"title": "$:/themes/tiddlywiki/snowwhite/base",
"tags": "[[$:/tags/Stylesheet]]",
"text": "\\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline\n\n.tc-sidebar-header {\n\ttext-shadow: 0 1px 0 <<colour sidebar-foreground-shadow>>;\n}\n\n.tc-tiddler-info {\n\t<<box-shadow \"inset 1px 2px 3px rgba(0,0,0,0.1)\">>\n}\n\n@media screen {\n\t.tc-tiddler-frame {\n\t\t<<box-shadow \"1px 1px 5px rgba(0, 0, 0, 0.3)\">>\n\t}\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\t<<box-shadow none>>\n\t}\n}\n\n.tc-page-controls button svg, .tc-tiddler-controls button svg, .tc-topbar button svg {\n\t<<transition \"fill 150ms ease-in-out\">>\n}\n\n.tc-tiddler-controls button.tc-selected,\n.tc-page-controls button.tc-selected {\n\t<<filter \"drop-shadow(0px -1px 2px rgba(0,0,0,0.25))\">>\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor {\n\t<<box-shadow \"inset 0 1px 8px rgba(0, 0, 0, 0.15)\">>\n}\n\n.tc-edit-tags {\n\t<<box-shadow \"inset 0 1px 8px rgba(0, 0, 0, 0.15)\">>\n}\n\n.tc-tiddler-frame .tc-edit-tags input.tc-edit-texteditor {\n\t<<box-shadow \"none\">>\n\tborder: none;\n\toutline: none;\n}\n\ncanvas.tc-edit-bitmapeditor {\n\t<<box-shadow \"2px 2px 5px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-drop-down {\n\tborder-radius: 4px;\n\t<<box-shadow \"2px 2px 10px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-block-dropdown {\n\tborder-radius: 4px;\n\t<<box-shadow \"2px 2px 10px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-modal {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.3)\">>\n}\n\n.tc-modal-footer {\n\tborder-radius: 0 0 6px 6px;\n\t<<box-shadow \"inset 0 1px 0 #fff\">>;\n}\n\n\n.tc-alert {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.6)\">>\n}\n\n.tc-notification {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.3)\">>\n\ttext-shadow: 0 1px 0 rgba(255,255,255, 0.8);\n}\n\n.tc-sidebar-lists .tc-tab-set .tc-tab-divider {\n\tborder-top: none;\n\theight: 1px;\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.15) 0%, rgba(0,0,0,0.0) 100%\">>\n}\n\n.tc-more-sidebar .tc-tab-buttons button {\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.01) 0%, rgba(0,0,0,0.1) 100%\">>\n}\n\n.tc-more-sidebar .tc-tab-buttons button.tc-tab-selected {\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.05) 0%, rgba(255,255,255,0.05) 100%\">>\n}\n\n.tc-message-box img {\n\t<<box-shadow \"1px 1px 3px rgba(0,0,0,0.5)\">>\n}\n\n.tc-plugin-info {\n\t<<box-shadow \"1px 1px 3px rgba(0,0,0,0.5)\">>\n}\n"
}
}
}
{
"tiddlers": {
"$:/themes/tiddlywiki/vanilla/base": {
"title": "$:/themes/tiddlywiki/vanilla/base",
"tags": "[[$:/tags/Stylesheet]]",
"text": "\\define custom-background-datauri()\n<$set name=\"background\" value={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}}>\n<$list filter=\"[<background>is[image]]\">\n`background: url(`\n<$list filter=\"[<background>!has[_canonical_uri]]\">\n<$macrocall $name=\"datauri\" title={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}}/>\n</$list>\n<$list filter=\"[<background>has[_canonical_uri]]\">\n<$view tiddler={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}} field=\"_canonical_uri\"/>\n</$list>\n`) center center;`\n`background-attachment: `{{$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment}}`;\n-webkit-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\n-moz-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\n-o-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\nbackground-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;`\n</$list>\n</$set>\n\\end\n\n\\define if-fluid-fixed(text,hiddenSidebarText)\n<$reveal state=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\" type=\"match\" text=\"fluid-fixed\">\n$text$\n<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"yes\" default=\"yes\">\n$hiddenSidebarText$\n</$reveal>\n</$reveal>\n\\end\n\n\\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline macrocallblock\n\n/*\n** Start with the normalize CSS reset, and then belay some of its effects\n*/\n\n{{$:/themes/tiddlywiki/vanilla/reset}}\n\n*, input[type=\"search\"] {\n\tbox-sizing: border-box;\n\t-moz-box-sizing: border-box;\n\t-webkit-box-sizing: border-box;\n}\n\nhtml button {\n\tline-height: 1.2;\n\tcolor: <<colour button-foreground>>;\n\tbackground: <<colour button-background>>;\n\tborder-color: <<colour button-border>>;\n}\n\n/*\n** Basic element styles\n*/\n\nhtml {\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/fontfamily}};\n\ttext-rendering: optimizeLegibility; /* Enables kerning and ligatures etc. */\n\t-webkit-font-smoothing: antialiased;\n\t-moz-osx-font-smoothing: grayscale;\n}\n\nhtml:-webkit-full-screen {\n\tbackground-color: <<colour page-background>>;\n}\n\nbody.tc-body {\n\tfont-size: {{$:/themes/tiddlywiki/vanilla/metrics/fontsize}};\n\tline-height: {{$:/themes/tiddlywiki/vanilla/metrics/lineheight}};\n\tcolor: <<colour foreground>>;\n\tbackground-color: <<colour page-background>>;\n\tfill: <<colour foreground>>;\n\tword-wrap: break-word;\n\t<<custom-background-datauri>>\n}\n\nh1, h2, h3, h4, h5, h6 {\n\tline-height: 1.2;\n\tfont-weight: 300;\n}\n\npre {\n\tdisplay: block;\n\tpadding: 14px;\n\tmargin-top: 1em;\n\tmargin-bottom: 1em;\n\tword-break: normal;\n\tword-wrap: break-word;\n\twhite-space: {{$:/themes/tiddlywiki/vanilla/options/codewrapping}};\n\tbackground-color: <<colour pre-background>>;\n\tborder: 1px solid <<colour pre-border>>;\n\tpadding: 0 3px 2px;\n\tborder-radius: 3px;\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/codefontfamily}};\n}\n\ncode {\n\tcolor: <<colour code-foreground>>;\n\tbackground-color: <<colour code-background>>;\n\tborder: 1px solid <<colour code-border>>;\n\twhite-space: {{$:/themes/tiddlywiki/vanilla/options/codewrapping}};\n\tpadding: 0 3px 2px;\n\tborder-radius: 3px;\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/codefontfamily}};\n}\n\nblockquote {\n\tborder-left: 5px solid <<colour blockquote-bar>>;\n\tmargin-left: 25px;\n\tpadding-left: 10px;\n}\n\ndl dt {\n\tfont-weight: bold;\n\tmargin-top: 6px;\n}\n\ntextarea,\ninput[type=text],\ninput[type=search],\ninput[type=\"\"],\ninput:not([type]) {\n\tcolor: <<colour foreground>>;\n\tbackground: <<colour background>>;\n}\n\n.tc-muted {\n\tcolor: <<colour muted-foreground>>;\n}\n\nsvg.tc-image-button {\n\tpadding: 0px 1px 1px 0px;\n}\n\nkbd {\n\tdisplay: inline-block;\n\tpadding: 3px 5px;\n\tfont-size: 0.8em;\n\tline-height: 1.2;\n\tcolor: <<colour foreground>>;\n\tvertical-align: middle;\n\tbackground-color: <<colour background>>;\n\tborder: solid 1px <<colour muted-foreground>>;\n\tborder-bottom-color: <<colour muted-foreground>>;\n\tborder-radius: 3px;\n\tbox-shadow: inset 0 -1px 0 <<colour muted-foreground>>;\n}\n\n/*\nMarkdown likes putting code elements inside pre elements\n*/\npre > code {\n\tpadding: 0;\n\tborder: none;\n\tbackground-color: inherit;\n\tcolor: inherit;\n}\n\ntable {\n\tborder: 1px solid <<colour table-border>>;\n\twidth: auto;\n\tmax-width: 100%;\n\tcaption-side: bottom;\n\tmargin-top: 1em;\n\tmargin-bottom: 1em;\n}\n\ntable th, table td {\n\tpadding: 0 7px 0 7px;\n\tborder-top: 1px solid <<colour table-border>>;\n\tborder-left: 1px solid <<colour table-border>>;\n}\n\ntable thead tr td, table th {\n\tbackground-color: <<colour table-header-background>>;\n\tfont-weight: bold;\n}\n\ntable tfoot tr td {\n\tbackground-color: <<colour table-footer-background>>;\n}\n\n.tc-csv-table {\n\twhite-space: nowrap;\n}\n\n.tc-tiddler-frame img,\n.tc-tiddler-frame svg,\n.tc-tiddler-frame canvas,\n.tc-tiddler-frame embed,\n.tc-tiddler-frame iframe {\n\tmax-width: 100%;\n}\n\n.tc-tiddler-body > embed,\n.tc-tiddler-body > iframe {\n\twidth: 100%;\n\theight: 600px;\n}\n\n/*\n** Links\n*/\n\nbutton.tc-tiddlylink,\na.tc-tiddlylink {\n\ttext-decoration: none;\n\tfont-weight: normal;\n\tcolor: <<colour tiddler-link-foreground>>;\n\t-webkit-user-select: inherit; /* Otherwise the draggable attribute makes links impossible to select */\n}\n\n.tc-sidebar-lists a.tc-tiddlylink {\n\tcolor: <<colour sidebar-tiddler-link-foreground>>;\n}\n\n.tc-sidebar-lists a.tc-tiddlylink:hover {\n\tcolor: <<colour sidebar-tiddler-link-foreground-hover>>;\n}\n\nbutton.tc-tiddlylink:hover,\na.tc-tiddlylink:hover {\n\ttext-decoration: underline;\n}\n\na.tc-tiddlylink-resolves {\n}\n\na.tc-tiddlylink-shadow {\n\tfont-weight: bold;\n}\n\na.tc-tiddlylink-shadow.tc-tiddlylink-resolves {\n\tfont-weight: normal;\n}\n\na.tc-tiddlylink-missing {\n\tfont-style: italic;\n}\n\na.tc-tiddlylink-external {\n\ttext-decoration: underline;\n\tcolor: <<colour external-link-foreground>>;\n\tbackground-color: <<colour external-link-background>>;\n}\n\na.tc-tiddlylink-external:visited {\n\tcolor: <<colour external-link-foreground-visited>>;\n\tbackground-color: <<colour external-link-background-visited>>;\n}\n\na.tc-tiddlylink-external:hover {\n\tcolor: <<colour external-link-foreground-hover>>;\n\tbackground-color: <<colour external-link-background-hover>>;\n}\n\n/*\n** Drag and drop styles\n*/\n\n.tc-tiddler-dragger {\n\tposition: relative;\n\tz-index: -10000;\n}\n\n.tc-tiddler-dragger-inner {\n\tposition: absolute;\n\tdisplay: inline-block;\n\tpadding: 8px 20px;\n\tfont-size: 16.9px;\n\tfont-weight: bold;\n\tline-height: 20px;\n\tcolor: <<colour dragger-foreground>>;\n\ttext-shadow: 0 1px 0 rgba(0, 0, 0, 1);\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n\tbackground-color: <<colour dragger-background>>;\n\tborder-radius: 20px;\n}\n\n.tc-tiddler-dragger-cover {\n\tposition: absolute;\n\tbackground-color: <<colour page-background>>;\n}\n\n.tc-dropzone {\n\tposition: relative;\n}\n\n.tc-dropzone.tc-dragover:before {\n\tz-index: 10000;\n\tdisplay: block;\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbackground: <<colour dropzone-background>>;\n\ttext-align: center;\n\tcontent: \"<<lingo DropMessage>>\";\n}\n\n/*\n** Plugin reload warning\n*/\n\n.tc-plugin-reload-warning {\n\tz-index: 1000;\n\tdisplay: block;\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbackground: <<colour alert-background>>;\n\ttext-align: center;\n}\n\n/*\n** Buttons\n*/\n\nbutton svg, button img, label svg, label img {\n\tvertical-align: middle;\n}\n\n.tc-btn-invisible {\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n}\n\n.tc-btn-boxed {\n\tfont-size: 0.6em;\n\tpadding: 0.2em;\n\tmargin: 1px;\n\tbackground: none;\n\tborder: 1px solid <<colour tiddler-controls-foreground>>;\n\tborder-radius: 0.25em;\n}\n\nhtml body.tc-body .tc-btn-boxed svg {\n\tfont-size: 1.6666em;\n}\n\n.tc-btn-boxed:hover {\n\tbackground: <<colour muted-foreground>>;\n\tcolor: <<colour background>>;\n}\n\nhtml body.tc-body .tc-btn-boxed:hover svg {\n\tfill: <<colour background>>;\n}\n\n.tc-btn-rounded {\n\tfont-size: 0.5em;\n\tline-height: 2;\n\tpadding: 0em 0.3em 0.2em 0.4em;\n\tmargin: 1px;\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground: <<colour muted-foreground>>;\n\tcolor: <<colour background>>;\n\tborder-radius: 2em;\n}\n\nhtml body.tc-body .tc-btn-rounded svg {\n\tfont-size: 1.6666em;\n\tfill: <<colour background>>;\n}\n\n.tc-btn-rounded:hover {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground: <<colour background>>;\n\tcolor: <<colour muted-foreground>>;\n}\n\nhtml body.tc-body .tc-btn-rounded:hover svg {\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-btn-icon svg {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-btn-text {\n\tpadding: 0;\n\tmargin: 0;\n}\n\n.tc-btn-big-green {\n\tdisplay: inline-block;\n\tpadding: 8px;\n\tmargin: 4px 8px 4px 8px;\n\tbackground: <<colour download-background>>;\n\tcolor: <<colour download-foreground>>;\n\tfill: <<colour download-foreground>>;\n\tborder: none;\n\tfont-size: 1.2em;\n\tline-height: 1.4em;\n\ttext-decoration: none;\n}\n\n.tc-btn-big-green svg,\n.tc-btn-big-green img {\n\theight: 2em;\n\twidth: 2em;\n\tvertical-align: middle;\n\tfill: <<colour download-foreground>>;\n}\n\n.tc-sidebar-lists input {\n\tcolor: <<colour foreground>>;\n}\n\n.tc-sidebar-lists button {\n\tcolor: <<colour sidebar-button-foreground>>;\n\tfill: <<colour sidebar-button-foreground>>;\n}\n\n.tc-sidebar-lists button.tc-btn-mini {\n\tcolor: <<colour sidebar-muted-foreground>>;\n}\n\n.tc-sidebar-lists button.tc-btn-mini:hover {\n\tcolor: <<colour sidebar-muted-foreground-hover>>;\n}\n\nbutton svg.tc-image-button, button .tc-image-button img {\n\theight: 1em;\n\twidth: 1em;\n}\n\n.tc-unfold-banner {\n\tposition: absolute;\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n\twidth: 100%;\n\twidth: calc(100% + 2px);\n\tmargin-left: -43px;\n\ttext-align: center;\n\tborder-top: 2px solid <<colour tiddler-info-background>>;\n\tmargin-top: 4px;\n}\n\n.tc-unfold-banner:hover {\n\tbackground: <<colour tiddler-info-background>>;\n\tborder-top: 2px solid <<colour tiddler-info-border>>;\n}\n\n.tc-unfold-banner svg, .tc-fold-banner svg {\n\theight: 0.75em;\n\tfill: <<colour tiddler-controls-foreground>>;\n}\n\n.tc-unfold-banner:hover svg, .tc-fold-banner:hover svg {\n\tfill: <<colour tiddler-controls-foreground-hover>>;\n}\n\n.tc-fold-banner {\n\tposition: absolute;\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n\twidth: 23px;\n\ttext-align: center;\n\tmargin-left: -35px;\n\ttop: 6px;\n\tbottom: 6px;\n}\n\n.tc-fold-banner:hover {\n\tbackground: <<colour tiddler-info-background>>;\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-unfold-banner {\n\t\tposition: static;\n\t\twidth: calc(100% + 59px);\n\t}\n\n\t.tc-fold-banner {\n\t\twidth: 16px;\n\t\tmargin-left: -16px;\n\t\tfont-size: 0.75em;\n\t}\n\n}\n\n/*\n** Tags and missing tiddlers\n*/\n\n.tc-tag-list-item {\n\tposition: relative;\n\tdisplay: inline-block;\n\tmargin-right: 7px;\n}\n\n.tc-tags-wrapper {\n\tmargin: 4px 0 14px 0;\n}\n\n.tc-missing-tiddler-label {\n\tfont-style: italic;\n\tfont-weight: normal;\n\tdisplay: inline-block;\n\tfont-size: 11.844px;\n\tline-height: 14px;\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n}\n\nbutton.tc-tag-label, span.tc-tag-label {\n\tdisplay: inline-block;\n\tpadding: 0.16em 0.7em;\n\tfont-size: 0.9em;\n\tfont-weight: 300;\n\tline-height: 1.2em;\n\tcolor: <<colour tag-foreground>>;\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n\tbackground-color: <<colour tag-background>>;\n\tborder-radius: 1em;\n}\n\n.tc-untagged-separator {\n\twidth: 10em;\n\tleft: 0;\n\tmargin-left: 0;\n\tborder: 0;\n\theight: 1px;\n\tbackground: <<colour tab-divider>>;\n}\n\nbutton.tc-untagged-label {\n\tbackground-color: <<colour untagged-background>>;\n}\n\n.tc-tag-label svg, .tc-tag-label img {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour tag-foreground>>;\n}\n\n.tc-tag-manager-table .tc-tag-label {\n\twhite-space: normal;\n}\n\n.tc-tag-manager-tag {\n\twidth: 100%;\n}\n\n/*\n** Page layout\n*/\n\n.tc-topbar {\n\tposition: fixed;\n\tz-index: 1200;\n}\n\n.tc-topbar-left {\n\tleft: 29px;\n\ttop: 5px;\n}\n\n.tc-topbar-right {\n\ttop: 5px;\n\tright: 29px;\n}\n\n.tc-topbar button {\n\tpadding: 8px;\n}\n\n.tc-topbar svg {\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-topbar button:hover svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-sidebar-header {\n\tcolor: <<colour sidebar-foreground>>;\n\tfill: <<colour sidebar-foreground>>;\n}\n\n.tc-sidebar-header .tc-title a.tc-tiddlylink-resolves {\n\tfont-weight: 300;\n}\n\n.tc-sidebar-header .tc-sidebar-lists p {\n\tmargin-top: 3px;\n\tmargin-bottom: 3px;\n}\n\n.tc-sidebar-header .tc-missing-tiddler-label {\n\tcolor: <<colour sidebar-foreground>>;\n}\n\n.tc-advanced-search input {\n\twidth: 60%;\n}\n\n.tc-search a svg {\n\twidth: 1.2em;\n\theight: 1.2em;\n\tvertical-align: middle;\n}\n\n.tc-page-controls {\n\tmargin-top: 14px;\n\tfont-size: 1.5em;\n}\n\n.tc-page-controls button {\n\tmargin-right: 0.5em;\n}\n\n.tc-page-controls a.tc-tiddlylink:hover {\n\ttext-decoration: none;\n}\n\n.tc-page-controls img {\n\twidth: 1em;\n}\n\n.tc-page-controls svg {\n\tfill: <<colour sidebar-controls-foreground>>;\n}\n\n.tc-page-controls button:hover svg, .tc-page-controls a:hover svg {\n\tfill: <<colour sidebar-controls-foreground-hover>>;\n}\n\n.tc-menu-list-item {\n\twhite-space: nowrap;\n}\n\n.tc-menu-list-count {\n\tfont-weight: bold;\n}\n\n.tc-menu-list-subitem {\n\tpadding-left: 7px;\n}\n\n.tc-story-river {\n\tposition: relative;\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-sidebar-header {\n\t\tpadding: 14px;\n\t\tmin-height: 32px;\n\t\tmargin-top: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t}\n\n\t.tc-story-river {\n\t\tposition: relative;\n\t\tpadding: 0;\n\t}\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-message-box {\n\t\tmargin: 21px -21px 21px -21px;\n\t}\n\n\t.tc-sidebar-scrollable {\n\t\tposition: fixed;\n\t\ttop: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t\tleft: {{$:/themes/tiddlywiki/vanilla/metrics/storyright}};\n\t\tbottom: 0;\n\t\tright: 0;\n\t\toverflow-y: auto;\n\t\toverflow-x: auto;\n\t\t-webkit-overflow-scrolling: touch;\n\t\tmargin: 0 0 0 -42px;\n\t\tpadding: 71px 0 28px 42px;\n\t}\n\n\t.tc-story-river {\n\t\tposition: relative;\n\t\tleft: {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}};\n\t\ttop: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/storywidth}};\n\t\tpadding: 42px 42px 42px 42px;\n\t}\n\n<<if-no-sidebar \"\n\n\t.tc-story-river {\n\t\twidth: calc(100% - {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}});\n\t}\n\n\">>\n\n}\n\n@media print {\n\n\tbody.tc-body {\n\t\tbackground-color: transparent;\n\t}\n\n\t.tc-sidebar-header, .tc-topbar {\n\t\tdisplay: none;\n\t}\n\n\t.tc-story-river {\n\t\tmargin: 0;\n\t\tpadding: 0;\n\t}\n\n\t.tc-story-river .tc-tiddler-frame {\n\t\tmargin: 0;\n\t\tborder: none;\n\t\tpadding: 0;\n\t}\n}\n\n/*\n** Tiddler styles\n*/\n\n.tc-tiddler-frame {\n\tposition: relative;\n\tmargin-bottom: 28px;\n\tbackground-color: <<colour tiddler-background>>;\n\tborder: 1px solid <<colour tiddler-border>>;\n}\n\n{{$:/themes/tiddlywiki/vanilla/sticky}}\n\n.tc-tiddler-info {\n\tpadding: 14px 42px 14px 42px;\n\tbackground-color: <<colour tiddler-info-background>>;\n\tborder-top: 1px solid <<colour tiddler-info-border>>;\n\tborder-bottom: 1px solid <<colour tiddler-info-border>>;\n}\n\n.tc-tiddler-info p {\n\tmargin-top: 3px;\n\tmargin-bottom: 3px;\n}\n\n.tc-tiddler-info .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour tiddler-info-tab-background>>;\n\tborder-bottom: 1px solid <<colour tiddler-info-tab-background>>;\n}\n\n.tc-view-field-table {\n\twidth: 100%;\n}\n\n.tc-view-field-name {\n\twidth: 1%; /* Makes this column be as narrow as possible */\n\ttext-align: right;\n\tfont-style: italic;\n\tfont-weight: 200;\n}\n\n.tc-view-field-value {\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\tpadding: 14px 14px 14px 14px;\n\t}\n\n\t.tc-tiddler-info {\n\t\tmargin: 0 -14px 0 -14px;\n\t}\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\tpadding: 28px 42px 42px 42px;\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth}};\n\t\tborder-radius: 2px;\n\t}\n\n<<if-no-sidebar \"\n\n\t.tc-tiddler-frame {\n\t\twidth: 100%;\n\t}\n\n\">>\n\n\t.tc-tiddler-info {\n\t\tmargin: 0 -42px 0 -42px;\n\t}\n}\n\n.tc-site-title,\n.tc-titlebar {\n\tfont-weight: 300;\n\tfont-size: 2.35em;\n\tline-height: 1.2em;\n\tcolor: <<colour tiddler-title-foreground>>;\n\tmargin: 0;\n}\n\n.tc-site-title {\n\tcolor: <<colour site-title-foreground>>;\n}\n\n.tc-tiddler-title-icon {\n\tvertical-align: middle;\n}\n\n.tc-system-title-prefix {\n\tcolor: <<colour muted-foreground>>;\n}\n\n.tc-titlebar h2 {\n\tfont-size: 1em;\n\tdisplay: inline;\n}\n\n.tc-titlebar img {\n\theight: 1em;\n}\n\n.tc-subtitle {\n\tfont-size: 0.9em;\n\tcolor: <<colour tiddler-subtitle-foreground>>;\n\tfont-weight: 300;\n}\n\n.tc-tiddler-missing .tc-title {\n font-style: italic;\n font-weight: normal;\n}\n\n.tc-tiddler-frame .tc-tiddler-controls {\n\tfloat: right;\n}\n\n.tc-tiddler-controls .tc-drop-down {\n\tfont-size: 0.6em;\n}\n\n.tc-tiddler-controls .tc-drop-down .tc-drop-down {\n\tfont-size: 1em;\n}\n\n.tc-tiddler-controls > span > button {\n\tvertical-align: baseline;\n\tmargin-left:5px;\n}\n\n.tc-tiddler-controls button svg, .tc-tiddler-controls button img,\n.tc-search button svg, .tc-search a svg {\n\theight: 0.75em;\n\tfill: <<colour tiddler-controls-foreground>>;\n}\n\n.tc-tiddler-controls button.tc-selected svg,\n.tc-page-controls button.tc-selected svg {\n\tfill: <<colour tiddler-controls-foreground-selected>>;\n}\n\n.tc-tiddler-controls button.tc-btn-invisible:hover svg,\n.tc-search button:hover svg, .tc-search a:hover svg {\n\tfill: <<colour tiddler-controls-foreground-hover>>;\n}\n\n@media print {\n\t.tc-tiddler-controls {\n\t\tdisplay: none;\n\t}\n}\n\n.tc-tiddler-help { /* Help prompts within tiddler template */\n\tcolor: <<colour muted-foreground>>;\n\tmargin-top: 14px;\n}\n\n.tc-tiddler-help a.tc-tiddlylink {\n\tcolor: <<colour very-muted-foreground>>;\n}\n\n.tc-tiddler-frame .tc-edit-texteditor {\n\twidth: 100%;\n\tmargin: 4px 0 4px 0;\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor,\n.tc-tiddler-frame textarea.tc-edit-texteditor,\n.tc-tiddler-frame iframe.tc-edit-texteditor {\n\tpadding: 3px 3px 3px 3px;\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tline-height: 1.3em;\n\t-webkit-appearance: none;\n}\n\n.tc-tiddler-frame .tc-binary-warning {\n\twidth: 100%;\n\theight: 5em;\n\ttext-align: center;\n\tpadding: 3em 3em 6em 3em;\n\tbackground: <<colour alert-background>>;\n\tborder: 1px solid <<colour alert-border>>;\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor {\n\tbackground-color: <<colour tiddler-editor-background>>;\n}\n\ncanvas.tc-edit-bitmapeditor {\n\tborder: 6px solid <<colour tiddler-editor-border-image>>;\n\tcursor: crosshair;\n\t-moz-user-select: none;\n\t-webkit-user-select: none;\n\t-ms-user-select: none;\n\tmargin-top: 6px;\n\tmargin-bottom: 6px;\n}\n\n.tc-edit-bitmapeditor-width {\n\tdisplay: block;\n}\n\n.tc-edit-bitmapeditor-height {\n\tdisplay: block;\n}\n\n.tc-tiddler-body {\n\tclear: both;\n}\n\n.tc-tiddler-frame .tc-tiddler-body {\n\tfont-size: {{$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize}};\n\tline-height: {{$:/themes/tiddlywiki/vanilla/metrics/bodylineheight}};\n}\n\n.tc-titlebar, .tc-tiddler-edit-title {\n\toverflow: hidden; /* https://github.com/Jermolene/TiddlyWiki5/issues/282 */\n}\n\nhtml body.tc-body.tc-single-tiddler-window {\n\tmargin: 1em;\n\tbackground: <<colour tiddler-background>>;\n}\n\n.tc-single-tiddler-window img,\n.tc-single-tiddler-window svg,\n.tc-single-tiddler-window canvas,\n.tc-single-tiddler-window embed,\n.tc-single-tiddler-window iframe {\n\tmax-width: 100%;\n}\n\n/*\n** Editor\n*/\n\n.tc-editor-toolbar {\n\tmargin-top: 8px;\n}\n\n.tc-editor-toolbar button {\n\tvertical-align: middle;\n\tbackground-color: <<colour tiddler-controls-foreground>>;\n\tfill: <<colour tiddler-controls-foreground-selected>>;\n\tborder-radius: 4px;\n\tpadding: 3px;\n\tmargin: 2px 0 2px 4px;\n}\n\n.tc-editor-toolbar button.tc-text-editor-toolbar-item-adjunct {\n\tmargin-left: 1px;\n\twidth: 1em;\n\tborder-radius: 8px;\n}\n\n.tc-editor-toolbar button.tc-text-editor-toolbar-item-start-group {\n\tmargin-left: 11px;\n}\n\n.tc-editor-toolbar button.tc-selected {\n\tbackground-color: <<colour primary>>;\n}\n\n.tc-editor-toolbar button svg {\n\twidth: 1.6em;\n\theight: 1.2em;\n}\n\n.tc-editor-toolbar button:hover {\n\tbackground-color: <<colour tiddler-controls-foreground-selected>>;\n\tfill: <<colour background>>;\n}\n\n.tc-editor-toolbar .tc-text-editor-toolbar-more {\n\twhite-space: normal;\n}\n\n.tc-editor-toolbar .tc-text-editor-toolbar-more button {\n\tdisplay: inline-block;\n\tpadding: 3px;\n\twidth: auto;\n}\n\n.tc-editor-toolbar .tc-search-results {\n\tpadding: 0;\n}\n\n/*\n** Adjustments for fluid-fixed mode\n*/\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n<<if-fluid-fixed text:\"\"\"\n\n\t.tc-story-river {\n\t\tpadding-right: 0;\n\t\tposition: relative;\n\t\twidth: auto;\n\t\tleft: 0;\n\t\tmargin-left: {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}};\n\t\tmargin-right: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth}};\n\t}\n\n\t.tc-tiddler-frame {\n\t\twidth: 100%;\n\t}\n\n\t.tc-sidebar-scrollable {\n\t\tleft: auto;\n\t\tbottom: 0;\n\t\tright: 0;\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth}};\n\t}\n\n\tbody.tc-body .tc-storyview-zoomin-tiddler {\n\t\twidth: 100%;\n\t\twidth: calc(100% - 42px);\n\t}\n\n\"\"\" hiddenSidebarText:\"\"\"\n\n\t.tc-story-river {\n\t\tpadding-right: 3em;\n\t\tmargin-right: 0;\n\t}\n\n\tbody.tc-body .tc-storyview-zoomin-tiddler {\n\t\twidth: 100%;\n\t\twidth: calc(100% - 84px);\n\t}\n\n\"\"\">>\n\n}\n\n/*\n** Toolbar buttons\n*/\n\n.tc-page-controls svg.tc-image-new-button {\n fill: <<colour toolbar-new-button>>;\n}\n\n.tc-page-controls svg.tc-image-options-button {\n fill: <<colour toolbar-options-button>>;\n}\n\n.tc-page-controls svg.tc-image-save-button {\n fill: <<colour toolbar-save-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-info-button {\n fill: <<colour toolbar-info-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-edit-button {\n fill: <<colour toolbar-edit-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-close-button {\n fill: <<colour toolbar-close-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-delete-button {\n fill: <<colour toolbar-delete-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-cancel-button {\n fill: <<colour toolbar-cancel-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-done-button {\n fill: <<colour toolbar-done-button>>;\n}\n\n/*\n** Tiddler edit mode\n*/\n\n.tc-tiddler-edit-frame em.tc-edit {\n\tcolor: <<colour muted-foreground>>;\n\tfont-style: normal;\n}\n\n.tc-edit-type-dropdown a.tc-tiddlylink-missing {\n\tfont-style: normal;\n}\n\n.tc-edit-tags {\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tpadding: 4px 8px 4px 8px;\n}\n\n.tc-edit-add-tag {\n\tdisplay: inline-block;\n}\n\n.tc-edit-add-tag .tc-add-tag-name input {\n\twidth: 50%;\n}\n\n.tc-edit-tags .tc-tag-label {\n\tdisplay: inline-block;\n}\n\n.tc-edit-tags-list {\n\tmargin: 14px 0 14px 0;\n}\n\n.tc-remove-tag-button {\n\tpadding-left: 4px;\n}\n\n.tc-tiddler-preview {\n\toverflow: auto;\n}\n\n.tc-tiddler-preview-preview {\n\tfloat: right;\n\twidth: 49%;\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tmargin: 4px 3px 3px 3px;\n\tpadding: 3px 3px 3px 3px;\n}\n\n.tc-tiddler-frame .tc-tiddler-preview .tc-edit-texteditor {\n\twidth: 49%;\n}\n\n.tc-tiddler-frame .tc-tiddler-preview canvas.tc-edit-bitmapeditor {\n\tmax-width: 49%;\n}\n\n.tc-edit-fields {\n\twidth: 100%;\n}\n\n\n.tc-edit-fields table, .tc-edit-fields tr, .tc-edit-fields td {\n\tborder: none;\n\tpadding: 4px;\n}\n\n.tc-edit-fields > tbody > .tc-edit-field:nth-child(odd) {\n\tbackground-color: <<colour tiddler-editor-fields-odd>>;\n}\n\n.tc-edit-fields > tbody > .tc-edit-field:nth-child(even) {\n\tbackground-color: <<colour tiddler-editor-fields-even>>;\n}\n\n.tc-edit-field-name {\n\ttext-align: right;\n}\n\n.tc-edit-field-value input {\n\twidth: 100%;\n}\n\n.tc-edit-field-remove {\n}\n\n.tc-edit-field-remove svg {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n\tvertical-align: middle;\n}\n\n.tc-edit-field-add-name {\n\tdisplay: inline-block;\n\twidth: 15%;\n}\n\n.tc-edit-field-add-value {\n\tdisplay: inline-block;\n\twidth: 40%;\n}\n\n.tc-edit-field-add-button {\n\tdisplay: inline-block;\n\twidth: 10%;\n}\n\n/*\n** Storyview Classes\n*/\n\n.tc-storyview-zoomin-tiddler {\n\tposition: absolute;\n\tdisplay: block;\n\twidth: 100%;\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-storyview-zoomin-tiddler {\n\t\twidth: calc(100% - 84px);\n\t}\n\n}\n\n/*\n** Dropdowns\n*/\n\n.tc-btn-dropdown {\n\ttext-align: left;\n}\n\n.tc-btn-dropdown svg, .tc-btn-dropdown img {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-drop-down-wrapper {\n\tposition: relative;\n}\n\n.tc-drop-down {\n\tmin-width: 380px;\n\tborder: 1px solid <<colour dropdown-border>>;\n\tbackground-color: <<colour dropdown-background>>;\n\tpadding: 7px 0 7px 0;\n\tmargin: 4px 0 0 0;\n\twhite-space: nowrap;\n\ttext-shadow: none;\n\tline-height: 1.4;\n}\n\n.tc-drop-down .tc-drop-down {\n\tmargin-left: 14px;\n}\n\n.tc-drop-down button svg, .tc-drop-down a svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-drop-down button.tc-btn-invisible:hover svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-drop-down p {\n\tpadding: 0 14px 0 14px;\n}\n\n.tc-drop-down svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-drop-down img {\n\twidth: 1em;\n}\n\n.tc-drop-down-language-chooser img {\n\twidth: 2em;\n\tvertical-align: baseline;\n}\n\n.tc-drop-down a, .tc-drop-down button {\n\tdisplay: block;\n\tpadding: 0 14px 0 14px;\n\twidth: 100%;\n\ttext-align: left;\n\tcolor: <<colour foreground>>;\n\tline-height: 1.4;\n}\n\n.tc-drop-down .tc-tab-set .tc-tab-buttons button {\n\tdisplay: inline-block;\n width: auto;\n margin-bottom: 0px;\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n}\n\n.tc-drop-down .tc-prompt {\n\tpadding: 0 14px;\n}\n\n.tc-drop-down .tc-chooser {\n\tborder: none;\n}\n\n.tc-drop-down .tc-chooser .tc-swatches-horiz {\n\tfont-size: 0.4em;\n\tpadding-left: 1.2em;\n}\n\n.tc-drop-down .tc-file-input-wrapper {\n\twidth: 100%;\n}\n\n.tc-drop-down .tc-file-input-wrapper button {\n\tcolor: <<colour foreground>>;\n}\n\n.tc-drop-down a:hover, .tc-drop-down button:hover, .tc-drop-down .tc-file-input-wrapper:hover button {\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n\ttext-decoration: none;\n}\n\n.tc-drop-down .tc-tab-buttons button {\n\tbackground-color: <<colour dropdown-tab-background>>;\n}\n\n.tc-drop-down .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour dropdown-tab-background-selected>>;\n\tborder-bottom: 1px solid <<colour dropdown-tab-background-selected>>;\n}\n\n.tc-drop-down-bullet {\n\tdisplay: inline-block;\n\twidth: 0.5em;\n}\n\n.tc-drop-down .tc-tab-contents a {\n\tpadding: 0 0.5em 0 0.5em;\n}\n\n.tc-block-dropdown-wrapper {\n\tposition: relative;\n}\n\n.tc-block-dropdown {\n\tposition: absolute;\n\tmin-width: 220px;\n\tborder: 1px solid <<colour dropdown-border>>;\n\tbackground-color: <<colour dropdown-background>>;\n\tpadding: 7px 0;\n\tmargin: 4px 0 0 0;\n\twhite-space: nowrap;\n\tz-index: 1000;\n\ttext-shadow: none;\n}\n\n.tc-block-dropdown.tc-search-drop-down {\n\tmargin-left: -12px;\n}\n\n.tc-block-dropdown a {\n\tdisplay: block;\n\tpadding: 4px 14px 4px 14px;\n}\n\n.tc-block-dropdown.tc-search-drop-down a {\n\tdisplay: block;\n\tpadding: 0px 10px 0px 10px;\n}\n\n.tc-drop-down .tc-dropdown-item-plain,\n.tc-block-dropdown .tc-dropdown-item-plain {\n\tpadding: 4px 14px 4px 7px;\n}\n\n.tc-drop-down .tc-dropdown-item,\n.tc-block-dropdown .tc-dropdown-item {\n\tpadding: 4px 14px 4px 7px;\n\tcolor: <<colour muted-foreground>>;\n}\n\n.tc-block-dropdown a:hover {\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n\ttext-decoration: none;\n}\n\n.tc-search-results {\n\tpadding: 0 7px 0 7px;\n}\n\n.tc-image-chooser, .tc-colour-chooser {\n\twhite-space: normal;\n}\n\n.tc-image-chooser a,\n.tc-colour-chooser a {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\ttext-align: center;\n\tposition: relative;\n}\n\n.tc-image-chooser a {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tpadding: 2px;\n\tmargin: 2px;\n\twidth: 4em;\n\theight: 4em;\n}\n\n.tc-colour-chooser a {\n\tpadding: 3px;\n\twidth: 2em;\n\theight: 2em;\n\tvertical-align: middle;\n}\n\n.tc-image-chooser a:hover,\n.tc-colour-chooser a:hover {\n\tbackground: <<colour primary>>;\n\tpadding: 0px;\n\tborder: 3px solid <<colour primary>>;\n}\n\n.tc-image-chooser a svg,\n.tc-image-chooser a img {\n\tdisplay: inline-block;\n\twidth: auto;\n\theight: auto;\n\tmax-width: 3.5em;\n\tmax-height: 3.5em;\n\tposition: absolute;\n\ttop: 0;\n\tbottom: 0;\n\tleft: 0;\n\tright: 0;\n\tmargin: auto;\n}\n\n/*\n** Modals\n*/\n\n.tc-modal-wrapper {\n\tposition: fixed;\n\toverflow: auto;\n\toverflow-y: scroll;\n\ttop: 0;\n\tright: 0;\n\tbottom: 0;\n\tleft: 0;\n\tz-index: 900;\n}\n\n.tc-modal-backdrop {\n\tposition: fixed;\n\ttop: 0;\n\tright: 0;\n\tbottom: 0;\n\tleft: 0;\n\tz-index: 1000;\n\tbackground-color: <<colour modal-backdrop>>;\n}\n\n.tc-modal {\n\tz-index: 1100;\n\tbackground-color: <<colour modal-background>>;\n\tborder: 1px solid <<colour modal-border>>;\n}\n\n@media (max-width: 55em) {\n\t.tc-modal {\n\t\tposition: fixed;\n\t\ttop: 1em;\n\t\tleft: 1em;\n\t\tright: 1em;\n\t}\n\n\t.tc-modal-body {\n\t\toverflow-y: auto;\n\t\tmax-height: 400px;\n\t\tmax-height: 60vh;\n\t}\n}\n\n@media (min-width: 55em) {\n\t.tc-modal {\n\t\tposition: fixed;\n\t\ttop: 2em;\n\t\tleft: 25%;\n\t\twidth: 50%;\n\t}\n\n\t.tc-modal-body {\n\t\toverflow-y: auto;\n\t\tmax-height: 400px;\n\t\tmax-height: 60vh;\n\t}\n}\n\n.tc-modal-header {\n\tpadding: 9px 15px;\n\tborder-bottom: 1px solid <<colour modal-header-border>>;\n}\n\n.tc-modal-header h3 {\n\tmargin: 0;\n\tline-height: 30px;\n}\n\n.tc-modal-header img, .tc-modal-header svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-modal-body {\n\tpadding: 15px;\n}\n\n.tc-modal-footer {\n\tpadding: 14px 15px 15px;\n\tmargin-bottom: 0;\n\ttext-align: right;\n\tbackground-color: <<colour modal-footer-background>>;\n\tborder-top: 1px solid <<colour modal-footer-border>>;\n}\n\n/*\n** Notifications\n*/\n\n.tc-notification {\n\tposition: fixed;\n\ttop: 14px;\n\tright: 42px;\n\tz-index: 1300;\n\tmax-width: 280px;\n\tpadding: 0 14px 0 14px;\n\tbackground-color: <<colour notification-background>>;\n\tborder: 1px solid <<colour notification-border>>;\n}\n\n/*\n** Tabs\n*/\n\n.tc-tab-set.tc-vertical {\n\tdisplay: -webkit-flex;\n\tdisplay: flex;\n}\n\n.tc-tab-buttons {\n\tfont-size: 0.85em;\n\tpadding-top: 1em;\n\tmargin-bottom: -2px;\n}\n\n.tc-tab-buttons.tc-vertical {\n\tz-index: 100;\n\tdisplay: block;\n\tpadding-top: 14px;\n\tvertical-align: top;\n\ttext-align: right;\n\tmargin-bottom: inherit;\n\tmargin-right: -1px;\n\tmax-width: 33%;\n\t-webkit-flex: 0 0 auto;\n\tflex: 0 0 auto;\n}\n\n.tc-tab-buttons button.tc-tab-selected {\n\tcolor: <<colour tab-foreground-selected>>;\n\tbackground-color: <<colour tab-background-selected>>;\n\tborder-left: 1px solid <<colour tab-border-selected>>;\n\tborder-top: 1px solid <<colour tab-border-selected>>;\n\tborder-right: 1px solid <<colour tab-border-selected>>;\n}\n\n.tc-tab-buttons button {\n\tcolor: <<colour tab-foreground>>;\n\tpadding: 3px 5px 3px 5px;\n\tmargin-right: 0.3em;\n\tfont-weight: 300;\n\tborder: none;\n\tbackground: inherit;\n\tbackground-color: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-right: 1px solid <<colour tab-border>>;\n\tborder-top-left-radius: 2px;\n\tborder-top-right-radius: 2px;\n}\n\n.tc-tab-buttons.tc-vertical button {\n\tdisplay: block;\n\twidth: 100%;\n\tmargin-top: 3px;\n\tmargin-right: 0;\n\ttext-align: right;\n\tbackground-color: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tborder-right: none;\n\tborder-top-left-radius: 2px;\n\tborder-bottom-left-radius: 2px;\n}\n\n.tc-tab-buttons.tc-vertical button.tc-tab-selected {\n\tbackground-color: <<colour tab-background-selected>>;\n\tborder-right: 1px solid <<colour tab-background-selected>>;\n}\n\n.tc-tab-divider {\n\tborder-top: 1px solid <<colour tab-divider>>;\n}\n\n.tc-tab-divider.tc-vertical {\n\tdisplay: none;\n}\n\n.tc-tab-content {\n\tmargin-top: 14px;\n}\n\n.tc-tab-content.tc-vertical {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\tpadding-top: 0;\n\tpadding-left: 14px;\n\tborder-left: 1px solid <<colour tab-border>>;\n\t-webkit-flex: 1 0 70%;\n\tflex: 1 0 70%;\n}\n\n.tc-sidebar-lists .tc-tab-buttons {\n\tmargin-bottom: -1px;\n}\n\n.tc-sidebar-lists .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour sidebar-tab-background-selected>>;\n\tcolor: <<colour sidebar-tab-foreground-selected>>;\n\tborder-left: 1px solid <<colour sidebar-tab-border-selected>>;\n\tborder-top: 1px solid <<colour sidebar-tab-border-selected>>;\n\tborder-right: 1px solid <<colour sidebar-tab-border-selected>>;\n}\n\n.tc-sidebar-lists .tc-tab-buttons button {\n\tbackground-color: <<colour sidebar-tab-background>>;\n\tcolor: <<colour sidebar-tab-foreground>>;\n\tborder-left: 1px solid <<colour sidebar-tab-border>>;\n\tborder-top: 1px solid <<colour sidebar-tab-border>>;\n\tborder-right: 1px solid <<colour sidebar-tab-border>>;\n}\n\n.tc-sidebar-lists .tc-tab-divider {\n\tborder-top: 1px solid <<colour sidebar-tab-divider>>;\n}\n\n.tc-more-sidebar .tc-tab-buttons button {\n\tdisplay: block;\n\twidth: 100%;\n\tbackground-color: <<colour sidebar-tab-background>>;\n\tborder-top: none;\n\tborder-left: none;\n\tborder-bottom: none;\n\tborder-right: 1px solid #ccc;\n\tmargin-bottom: inherit;\n}\n\n.tc-more-sidebar .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour sidebar-tab-background-selected>>;\n\tborder: none;\n}\n\n/*\n** Alerts\n*/\n\n.tc-alerts {\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tmax-width: 500px;\n\tz-index: 20000;\n}\n\n.tc-alert {\n\tposition: relative;\n\tmargin: 28px;\n\tpadding: 14px 14px 14px 14px;\n\tborder: 2px solid <<colour alert-border>>;\n\tbackground-color: <<colour alert-background>>;\n}\n\n.tc-alert-toolbar {\n\tposition: absolute;\n\ttop: 14px;\n\tright: 14px;\n}\n\n.tc-alert-toolbar svg {\n\tfill: <<colour alert-muted-foreground>>;\n}\n\n.tc-alert-subtitle {\n\tcolor: <<colour alert-muted-foreground>>;\n\tfont-weight: bold;\n}\n\n.tc-alert-highlight {\n\tcolor: <<colour alert-highlight>>;\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-static-alert {\n\t\tposition: relative;\n\t}\n\n\t.tc-static-alert-inner {\n\t\tposition: absolute;\n\t\tz-index: 100;\n\t}\n\n}\n\n.tc-static-alert-inner {\n\tpadding: 0 2px 2px 42px;\n\tcolor: <<colour static-alert-foreground>>;\n}\n\n/*\n** Control panel\n*/\n\n.tc-control-panel td {\n\tpadding: 4px;\n}\n\n.tc-control-panel table, .tc-control-panel table input, .tc-control-panel table textarea {\n\twidth: 100%;\n}\n\n.tc-plugin-info {\n\tdisplay: block;\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground-colour: <<colour background>>;\n\tmargin: 0.5em 0 0.5em 0;\n\tpadding: 4px;\n}\n\n.tc-plugin-info-disabled {\n\tbackground: -webkit-repeating-linear-gradient(45deg, #ff0, #ff0 10px, #eee 10px, #eee 20px);\n\tbackground: repeating-linear-gradient(45deg, #ff0, #ff0 10px, #eee 10px, #eee 20px);\n}\n\n.tc-plugin-info-disabled:hover {\n\tbackground: -webkit-repeating-linear-gradient(45deg, #aa0, #aa0 10px, #888 10px, #888 20px);\n\tbackground: repeating-linear-gradient(45deg, #aa0, #aa0 10px, #888 10px, #888 20px);\n}\n\na.tc-tiddlylink.tc-plugin-info:hover {\n\ttext-decoration: none;\n\tbackground-color: <<colour primary>>;\n\tcolor: <<colour background>>;\n\tfill: <<colour foreground>>;\n}\n\na.tc-tiddlylink.tc-plugin-info:hover .tc-plugin-info > .tc-plugin-info-chunk > svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-plugin-info-chunk {\n\tdisplay: inline-block;\n\tvertical-align: middle;\n}\n\n.tc-plugin-info-chunk h1 {\n\tfont-size: 1em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info-chunk h2 {\n\tfont-size: 0.8em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info-chunk div {\n\tfont-size: 0.7em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info:hover > .tc-plugin-info-chunk > img, .tc-plugin-info:hover > .tc-plugin-info-chunk > svg {\n\twidth: 2em;\n\theight: 2em;\n\tfill: <<colour foreground>>;\n}\n\n.tc-plugin-info > .tc-plugin-info-chunk > img, .tc-plugin-info > .tc-plugin-info-chunk > svg {\n\twidth: 2em;\n\theight: 2em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-plugin-info.tc-small-icon > .tc-plugin-info-chunk > img, .tc-plugin-info.tc-small-icon > .tc-plugin-info-chunk > svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-plugin-info-dropdown {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tmargin-top: -8px;\n}\n\n.tc-plugin-info-dropdown-message {\n\tbackground: <<colour message-background>>;\n\tpadding: 0.5em 1em 0.5em 1em;\n\tfont-weight: bold;\n\tfont-size: 0.8em;\n}\n\n.tc-plugin-info-dropdown-body {\n\tpadding: 1em 1em 1em 1em;\n}\n\n/*\n** Message boxes\n*/\n\n.tc-message-box {\n\tborder: 1px solid <<colour message-border>>;\n\tbackground: <<colour message-background>>;\n\tpadding: 0px 21px 0px 21px;\n\tfont-size: 12px;\n\tline-height: 18px;\n\tcolor: <<colour message-foreground>>;\n}\n\n/*\n** Pictures\n*/\n\n.tc-bordered-image {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tpadding: 5px;\n\tmargin: 5px;\n}\n\n/*\n** Floats\n*/\n\n.tc-float-right {\n\tfloat: right;\n}\n\n/*\n** Chooser\n*/\n\n.tc-chooser {\n\tborder: 1px solid <<colour table-border>>;\n}\n\n.tc-chooser-item {\n\tborder: 8px;\n\tpadding: 2px 4px;\n}\n\n.tc-chooser-item a.tc-tiddlylink {\n\tdisplay: block;\n\ttext-decoration: none;\n\tcolor: <<colour tiddler-link-foreground>>;\n\tbackground-color: <<colour tiddler-link-background>>;\n}\n\n.tc-chooser-item a.tc-tiddlylink:hover {\n\ttext-decoration: none;\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n}\n\n/*\n** Palette swatches\n*/\n\n.tc-swatches-horiz {\n}\n\n.tc-swatches-horiz .tc-swatch {\n\tdisplay: inline-block;\n}\n\n.tc-swatch {\n\twidth: 2em;\n\theight: 2em;\n\tmargin: 0.4em;\n\tborder: 1px solid #888;\n}\n\n/*\n** Table of contents\n*/\n\n.tc-sidebar-lists .tc-table-of-contents {\n\twhite-space: nowrap;\n}\n\n.tc-table-of-contents button {\n\tcolor: <<colour sidebar-foreground>>;\n}\n\n.tc-table-of-contents svg {\n\twidth: 0.7em;\n\theight: 0.7em;\n\tvertical-align: middle;\n\tfill: <<colour sidebar-foreground>>;\n}\n\n.tc-table-of-contents ol {\n\tlist-style-type: none;\n\tpadding-left: 0;\n}\n\n.tc-table-of-contents ol ol {\n\tpadding-left: 1em;\n}\n\n.tc-table-of-contents li {\n\tfont-size: 1.0em;\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li a {\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li li {\n\tfont-size: 0.95em;\n\tfont-weight: normal;\n\tline-height: 1.4;\n}\n\n.tc-table-of-contents li li a {\n\tfont-weight: normal;\n}\n\n.tc-table-of-contents li li li {\n\tfont-size: 0.95em;\n\tfont-weight: 200;\n\tline-height: 1.5;\n}\n\n.tc-table-of-contents li li li a {\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li li li li {\n\tfont-size: 0.95em;\n\tfont-weight: 200;\n}\n\n.tc-tabbed-table-of-contents {\n\tdisplay: -webkit-flex;\n\tdisplay: flex;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents {\n\tz-index: 100;\n\tdisplay: inline-block;\n\tpadding-left: 1em;\n\tmax-width: 50%;\n\t-webkit-flex: 0 0 auto;\n\tflex: 0 0 auto;\n\tbackground: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a,\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a {\n\tdisplay: block;\n\tpadding: 0.12em 1em 0.12em 0.25em;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a {\n\tborder-top: 1px solid <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-background>>;\n\tborder-bottom: 1px solid <<colour tab-background>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a:hover {\n\ttext-decoration: none;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tbackground: <<colour tab-border>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a {\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tbackground: <<colour background>>;\n\tmargin-right: -1px;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a:hover {\n\ttext-decoration: none;\n}\n\n.tc-tabbed-table-of-contents .tc-tabbed-table-of-contents-content {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\tpadding-left: 1.5em;\n\tpadding-right: 1.5em;\n\tborder: 1px solid <<colour tab-border>>;\n\t-webkit-flex: 1 0 50%;\n\tflex: 1 0 50%;\n}\n\n/*\n** Dirty indicator\n*/\n\nbody.tc-dirty span.tc-dirty-indicator, body.tc-dirty span.tc-dirty-indicator svg {\n\tfill: <<colour dirty-indicator>>;\n\tcolor: <<colour dirty-indicator>>;\n}\n\n/*\n** File inputs\n*/\n\n.tc-file-input-wrapper {\n\tposition: relative;\n\toverflow: hidden;\n\tdisplay: inline-block;\n\tvertical-align: middle;\n}\n\n.tc-file-input-wrapper input[type=file] {\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbottom: 0;\n\tfont-size: 999px;\n\tmax-width: 100%;\n\tmax-height: 100%;\n\tfilter: alpha(opacity=0);\n\topacity: 0;\n\toutline: none;\n\tbackground: white;\n\tcursor: pointer;\n\tdisplay: inline-block;\n}\n\n/*\n** Thumbnail macros\n*/\n\n.tc-thumbnail-wrapper {\n\tposition: relative;\n\tdisplay: inline-block;\n\tmargin: 6px;\n\tvertical-align: top;\n}\n\n.tc-thumbnail-right-wrapper {\n\tfloat:right;\n\tmargin: 0.5em 0 0.5em 0.5em;\n}\n\n.tc-thumbnail-image {\n\ttext-align: center;\n\toverflow: hidden;\n\tborder-radius: 3px;\n}\n\n.tc-thumbnail-image svg,\n.tc-thumbnail-image img {\n\tfilter: alpha(opacity=1);\n\topacity: 1;\n\tmin-width: 100%;\n\tmin-height: 100%;\n\tmax-width: 100%;\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-image svg,\n.tc-thumbnail-wrapper:hover .tc-thumbnail-image img {\n\tfilter: alpha(opacity=0.8);\n\topacity: 0.8;\n}\n\n.tc-thumbnail-background {\n\tposition: absolute;\n\tborder-radius: 3px;\n}\n\n.tc-thumbnail-icon svg,\n.tc-thumbnail-icon img {\n\twidth: 3em;\n\theight: 3em;\n\t<<filter \"drop-shadow(2px 2px 4px rgba(0,0,0,0.3))\">>\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-icon svg,\n.tc-thumbnail-wrapper:hover .tc-thumbnail-icon img {\n\tfill: #fff;\n\t<<filter \"drop-shadow(3px 3px 4px rgba(0,0,0,0.6))\">>\n}\n\n.tc-thumbnail-icon {\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbottom: 0;\n\tdisplay: -webkit-flex;\n\t-webkit-align-items: center;\n\t-webkit-justify-content: center;\n\tdisplay: flex;\n\talign-items: center;\n\tjustify-content: center;\n}\n\n.tc-thumbnail-caption {\n\tposition: absolute;\n\tbackground-color: #777;\n\tcolor: #fff;\n\ttext-align: center;\n\tbottom: 0;\n\twidth: 100%;\n\tfilter: alpha(opacity=0.9);\n\topacity: 0.9;\n\tline-height: 1.4;\n\tborder-bottom-left-radius: 3px;\n\tborder-bottom-right-radius: 3px;\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-caption {\n\tfilter: alpha(opacity=1);\n\topacity: 1;\n}\n\n/*\n** Errors\n*/\n\n.tc-error {\n\tbackground: #f00;\n\tcolor: #fff;\n}\n"
},
"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize",
"text": "15px"
},
"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/bodylineheight",
"text": "22px"
},
"$:/themes/tiddlywiki/vanilla/metrics/fontsize": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/fontsize",
"text": "14px"
},
"$:/themes/tiddlywiki/vanilla/metrics/lineheight": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/lineheight",
"text": "20px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storyleft": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storyleft",
"text": "0px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storytop": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storytop",
"text": "0px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storyright": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storyright",
"text": "770px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storywidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storywidth",
"text": "770px"
},
"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth",
"text": "686px"
},
"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint",
"text": "960px"
},
"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth",
"text": "350px"
},
"$:/themes/tiddlywiki/vanilla/options/stickytitles": {
"title": "$:/themes/tiddlywiki/vanilla/options/stickytitles",
"text": "no"
},
"$:/themes/tiddlywiki/vanilla/options/sidebarlayout": {
"title": "$:/themes/tiddlywiki/vanilla/options/sidebarlayout",
"text": "fixed-fluid"
},
"$:/themes/tiddlywiki/vanilla/options/codewrapping": {
"title": "$:/themes/tiddlywiki/vanilla/options/codewrapping",
"text": "pre-wrap"
},
"$:/themes/tiddlywiki/vanilla/reset": {
"title": "$:/themes/tiddlywiki/vanilla/reset",
"type": "text/plain",
"text": "/*! normalize.css v3.0.0 | MIT License | git.io/normalize */\n\n/**\n * 1. Set default font family to sans-serif.\n * 2. Prevent iOS text size adjust after orientation change, without disabling\n * user zoom.\n */\n\nhtml {\n font-family: sans-serif; /* 1 */\n -ms-text-size-adjust: 100%; /* 2 */\n -webkit-text-size-adjust: 100%; /* 2 */\n}\n\n/**\n * Remove default margin.\n */\n\nbody {\n margin: 0;\n}\n\n/* HTML5 display definitions\n ========================================================================== */\n\n/**\n * Correct `block` display not defined in IE 8/9.\n */\n\narticle,\naside,\ndetails,\nfigcaption,\nfigure,\nfooter,\nheader,\nhgroup,\nmain,\nnav,\nsection,\nsummary {\n display: block;\n}\n\n/**\n * 1. Correct `inline-block` display not defined in IE 8/9.\n * 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera.\n */\n\naudio,\ncanvas,\nprogress,\nvideo {\n display: inline-block; /* 1 */\n vertical-align: baseline; /* 2 */\n}\n\n/**\n * Prevent modern browsers from displaying `audio` without controls.\n * Remove excess height in iOS 5 devices.\n */\n\naudio:not([controls]) {\n display: none;\n height: 0;\n}\n\n/**\n * Address `[hidden]` styling not present in IE 8/9.\n * Hide the `template` element in IE, Safari, and Firefox < 22.\n */\n\n[hidden],\ntemplate {\n display: none;\n}\n\n/* Links\n ========================================================================== */\n\n/**\n * Remove the gray background color from active links in IE 10.\n */\n\na {\n background: transparent;\n}\n\n/**\n * Improve readability when focused and also mouse hovered in all browsers.\n */\n\na:active,\na:hover {\n outline: 0;\n}\n\n/* Text-level semantics\n ========================================================================== */\n\n/**\n * Address styling not present in IE 8/9, Safari 5, and Chrome.\n */\n\nabbr[title] {\n border-bottom: 1px dotted;\n}\n\n/**\n * Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome.\n */\n\nb,\nstrong {\n font-weight: bold;\n}\n\n/**\n * Address styling not present in Safari 5 and Chrome.\n */\n\ndfn {\n font-style: italic;\n}\n\n/**\n * Address variable `h1` font-size and margin within `section` and `article`\n * contexts in Firefox 4+, Safari 5, and Chrome.\n */\n\nh1 {\n font-size: 2em;\n margin: 0.67em 0;\n}\n\n/**\n * Address styling not present in IE 8/9.\n */\n\nmark {\n background: #ff0;\n color: #000;\n}\n\n/**\n * Address inconsistent and variable font size in all browsers.\n */\n\nsmall {\n font-size: 80%;\n}\n\n/**\n * Prevent `sub` and `sup` affecting `line-height` in all browsers.\n */\n\nsub,\nsup {\n font-size: 75%;\n line-height: 0;\n position: relative;\n vertical-align: baseline;\n}\n\nsup {\n top: -0.5em;\n}\n\nsub {\n bottom: -0.25em;\n}\n\n/* Embedded content\n ========================================================================== */\n\n/**\n * Remove border when inside `a` element in IE 8/9.\n */\n\nimg {\n border: 0;\n}\n\n/**\n * Correct overflow displayed oddly in IE 9.\n */\n\nsvg:not(:root) {\n overflow: hidden;\n}\n\n/* Grouping content\n ========================================================================== */\n\n/**\n * Address margin not present in IE 8/9 and Safari 5.\n */\n\nfigure {\n margin: 1em 40px;\n}\n\n/**\n * Address differences between Firefox and other browsers.\n */\n\nhr {\n -moz-box-sizing: content-box;\n box-sizing: content-box;\n height: 0;\n}\n\n/**\n * Contain overflow in all browsers.\n */\n\npre {\n overflow: auto;\n}\n\n/**\n * Address odd `em`-unit font size rendering in all browsers.\n */\n\ncode,\nkbd,\npre,\nsamp {\n font-family: monospace, monospace;\n font-size: 1em;\n}\n\n/* Forms\n ========================================================================== */\n\n/**\n * Known limitation: by default, Chrome and Safari on OS X allow very limited\n * styling of `select`, unless a `border` property is set.\n */\n\n/**\n * 1. Correct color not being inherited.\n * Known issue: affects color of disabled elements.\n * 2. Correct font properties not being inherited.\n * 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome.\n */\n\nbutton,\ninput,\noptgroup,\nselect,\ntextarea {\n color: inherit; /* 1 */\n font: inherit; /* 2 */\n margin: 0; /* 3 */\n}\n\n/**\n * Address `overflow` set to `hidden` in IE 8/9/10.\n */\n\nbutton {\n overflow: visible;\n}\n\n/**\n * Address inconsistent `text-transform` inheritance for `button` and `select`.\n * All other form control elements do not inherit `text-transform` values.\n * Correct `button` style inheritance in Firefox, IE 8+, and Opera\n * Correct `select` style inheritance in Firefox.\n */\n\nbutton,\nselect {\n text-transform: none;\n}\n\n/**\n * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`\n * and `video` controls.\n * 2. Correct inability to style clickable `input` types in iOS.\n * 3. Improve usability and consistency of cursor style between image-type\n * `input` and others.\n */\n\nbutton,\nhtml input[type=\"button\"], /* 1 */\ninput[type=\"reset\"],\ninput[type=\"submit\"] {\n -webkit-appearance: button; /* 2 */\n cursor: pointer; /* 3 */\n}\n\n/**\n * Re-set default cursor for disabled elements.\n */\n\nbutton[disabled],\nhtml input[disabled] {\n cursor: default;\n}\n\n/**\n * Remove inner padding and border in Firefox 4+.\n */\n\nbutton::-moz-focus-inner,\ninput::-moz-focus-inner {\n border: 0;\n padding: 0;\n}\n\n/**\n * Address Firefox 4+ setting `line-height` on `input` using `!important` in\n * the UA stylesheet.\n */\n\ninput {\n line-height: normal;\n}\n\n/**\n * It's recommended that you don't attempt to style these elements.\n * Firefox's implementation doesn't respect box-sizing, padding, or width.\n *\n * 1. Address box sizing set to `content-box` in IE 8/9/10.\n * 2. Remove excess padding in IE 8/9/10.\n */\n\ninput[type=\"checkbox\"],\ninput[type=\"radio\"] {\n box-sizing: border-box; /* 1 */\n padding: 0; /* 2 */\n}\n\n/**\n * Fix the cursor style for Chrome's increment/decrement buttons. For certain\n * `font-size` values of the `input`, it causes the cursor style of the\n * decrement button to change from `default` to `text`.\n */\n\ninput[type=\"number\"]::-webkit-inner-spin-button,\ninput[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n/**\n * 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome.\n * 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome\n * (include `-moz` to future-proof).\n */\n\ninput[type=\"search\"] {\n -webkit-appearance: textfield; /* 1 */\n -moz-box-sizing: content-box;\n -webkit-box-sizing: content-box; /* 2 */\n box-sizing: content-box;\n}\n\n/**\n * Remove inner padding and search cancel button in Safari and Chrome on OS X.\n * Safari (but not Chrome) clips the cancel button when the search input has\n * padding (and `textfield` appearance).\n */\n\ninput[type=\"search\"]::-webkit-search-cancel-button,\ninput[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n/**\n * Define consistent border, margin, and padding.\n */\n\nfieldset {\n border: 1px solid #c0c0c0;\n margin: 0 2px;\n padding: 0.35em 0.625em 0.75em;\n}\n\n/**\n * 1. Correct `color` not being inherited in IE 8/9.\n * 2. Remove padding so people aren't caught out if they zero out fieldsets.\n */\n\nlegend {\n border: 0; /* 1 */\n padding: 0; /* 2 */\n}\n\n/**\n * Remove default vertical scrollbar in IE 8/9.\n */\n\ntextarea {\n overflow: auto;\n}\n\n/**\n * Don't inherit the `font-weight` (applied by a rule above).\n * NOTE: the default cannot safely be changed in Chrome and Safari on OS X.\n */\n\noptgroup {\n font-weight: bold;\n}\n\n/* Tables\n ========================================================================== */\n\n/**\n * Remove most spacing between table cells.\n */\n\ntable {\n border-collapse: collapse;\n border-spacing: 0;\n}\n\ntd,\nth {\n padding: 0;\n}\n"
},
"$:/themes/tiddlywiki/vanilla/settings/fontfamily": {
"title": "$:/themes/tiddlywiki/vanilla/settings/fontfamily",
"text": "\"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", \"DejaVu Sans\", sans-serif"
},
"$:/themes/tiddlywiki/vanilla/settings/codefontfamily": {
"title": "$:/themes/tiddlywiki/vanilla/settings/codefontfamily",
"text": "Monaco, Consolas, \"Lucida Console\", \"DejaVu Sans Mono\", monospace"
},
"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment": {
"title": "$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment",
"text": "fixed"
},
"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize": {
"title": "$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize",
"text": "auto"
},
"$:/themes/tiddlywiki/vanilla/sticky": {
"title": "$:/themes/tiddlywiki/vanilla/sticky",
"text": "<$reveal state=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\" type=\"match\" text=\"yes\">\n``\n.tc-tiddler-title {\n\tposition: -webkit-sticky;\n\tposition: -moz-sticky;\n\tposition: -o-sticky;\n\tposition: -ms-sticky;\n\tposition: sticky;\n\ttop: 0px;\n\tbackground: ``<<colour tiddler-background>>``;\n\tz-index: 500;\n}\n``\n</$reveal>\n"
},
"$:/themes/tiddlywiki/vanilla/themetweaks": {
"title": "$:/themes/tiddlywiki/vanilla/themetweaks",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ThemeTweaks/ThemeTweaks}}",
"text": "\\define lingo-base() $:/language/ThemeTweaks/\n\n\\define replacement-text()\n[img[$(imageTitle)$]]\n\\end\n\n\\define backgroundimage-dropdown()\n<div class=\"tc-drop-down-wrapper\">\n<$button popup=<<qualify \"$:/state/popup/themetweaks/backgroundimage\">> class=\"tc-btn-invisible tc-btn-dropdown\">{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/themetweaks/backgroundimage\">> type=\"popup\" position=\"belowleft\" text=\"\" default=\"\">\n<div class=\"tc-drop-down\">\n<$macrocall $name=\"image-picker\" actions=\"\"\"\n\n<$action-setfield\n\t$tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\"\n\t$value=<<imageTitle>>\n/>\n\n\"\"\"/>\n</div>\n</$reveal>\n</div>\n\\end\n\n\\define backgroundimageattachment-dropdown()\n<$select tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment\" default=\"scroll\">\n<option value=\"scroll\"><<lingo Settings/BackgroundImageAttachment/Scroll>></option>\n<option value=\"fixed\"><<lingo Settings/BackgroundImageAttachment/Fixed>></option>\n</$select>\n\\end\n\n\\define backgroundimagesize-dropdown()\n<$select tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize\" default=\"scroll\">\n<option value=\"auto\"><<lingo Settings/BackgroundImageSize/Auto>></option>\n<option value=\"cover\"><<lingo Settings/BackgroundImageSize/Cover>></option>\n<option value=\"contain\"><<lingo Settings/BackgroundImageSize/Contain>></option>\n</$select>\n\\end\n\n<<lingo ThemeTweaks/Hint>>\n\n! <<lingo Options>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\"><<lingo Options/SidebarLayout>></$link> |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\"><option value=\"fixed-fluid\"><<lingo Options/SidebarLayout/Fixed-Fluid>></option><option value=\"fluid-fixed\"><<lingo Options/SidebarLayout/Fluid-Fixed>></option></$select> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\"><<lingo Options/StickyTitles>></$link><br>//<<lingo Options/StickyTitles/Hint>>// |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\"><option value=\"no\">{{$:/language/No}}</option><option value=\"yes\">{{$:/language/Yes}}</option></$select> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/codewrapping\"><<lingo Options/CodeWrapping>></$link> |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/codewrapping\"><option value=\"pre\">{{$:/language/No}}</option><option value=\"pre-wrap\">{{$:/language/Yes}}</option></$select> |\n\n! <<lingo Settings>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/fontfamily\"><<lingo Settings/FontFamily>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/fontfamily\" default=\"\" tag=\"input\"/> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/codefontfamily\"><<lingo Settings/CodeFontFamily>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/codefontfamily\" default=\"\" tag=\"input\"/> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\"><<lingo Settings/BackgroundImage>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\" default=\"\" tag=\"input\"/> |<<backgroundimage-dropdown>> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment\"><<lingo Settings/BackgroundImageAttachment>></$link> |<<backgroundimageattachment-dropdown>> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize\"><<lingo Settings/BackgroundImageSize>></$link> |<<backgroundimagesize-dropdown>> | |\n\n! <<lingo Metrics>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/fontsize\"><<lingo Metrics/FontSize>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/fontsize\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/lineheight\"><<lingo Metrics/LineHeight>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/lineheight\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize\"><<lingo Metrics/BodyFontSize>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight\"><<lingo Metrics/BodyLineHeight>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storyleft\"><<lingo Metrics/StoryLeft>></$link><br>//<<lingo Metrics/StoryLeft/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storyleft\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storytop\"><<lingo Metrics/StoryTop>></$link><br>//<<lingo Metrics/StoryTop/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storytop\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storyright\"><<lingo Metrics/StoryRight>></$link><br>//<<lingo Metrics/StoryRight/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storyright\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storywidth\"><<lingo Metrics/StoryWidth>></$link><br>//<<lingo Metrics/StoryWidth/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storywidth\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\"><<lingo Metrics/TiddlerWidth>></$link><br>//<<lingo Metrics/TiddlerWidth/Hint>>//<br> |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint\"><<lingo Metrics/SidebarBreakpoint>></$link><br>//<<lingo Metrics/SidebarBreakpoint/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth\"><<lingo Metrics/SidebarWidth>></$link><br>//<<lingo Metrics/SidebarWidth/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth\" default=\"\" tag=\"input\"/> |\n"
}
}
}
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-3|PAPER Mon-O-2-1-3 — Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-4|PAPER Mon-P-2-2-4 — An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170530.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-1|PAPER Wed-P-6-2-1 — Calibration Approaches for Language Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Calibration Approaches for Language Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170829.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-5|PAPER Tue-O-3-4-5 — Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-3|PAPER Mon-P-1-2-3 — Attention Based CLDNNs for Short-Duration Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Based CLDNNs for Short-Duration Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-1|PAPER Thu-O-10-2-1 — CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-13|PAPER Wed-P-6-1-13 — Multi-Channel Apollo Mission Speech Transcripts Calibration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Apollo Mission Speech Transcripts Calibration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-13|PAPER Wed-P-6-2-13 — Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-2|PAPER Thu-O-10-2-2 — Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170564.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-4|PAPER Mon-O-2-1-4 — Fast Neural Network Language Model Lookups at N-Gram Speeds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Neural Network Language Model Lookups at N-Gram Speeds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-5|PAPER Mon-O-2-1-5 — Empirical Exploration of Novel Architectures and Objectives for Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Exploration of Novel Architectures and Objectives for Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-2|PAPER Thu-O-10-4-2 — PRAV: A Phonetically Rich Audio Visual Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PRAV: A Phonetically Rich Audio Visual Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-4|PAPER Wed-O-8-1-4 — Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-1|PAPER Thu-SS-10-10-1 — A Dual Source-Filter Model of Snore Audio for Snorer Group Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Dual Source-Filter Model of Snore Audio for Snorer Group Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171350.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-4|PAPER Tue-O-5-8-4 — Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-5|PAPER Tue-SS-5-11-5 — Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-7|PAPER Tue-SS-5-11-7 — Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-11|PAPER Tue-P-5-1-11 — Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172030.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-1|PAPER Tue-S&T-3-B-1 — Evolving Recurrent Neural Networks That Process and Classify Raw Audio in a Streaming Fashion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evolving Recurrent Neural Networks That Process and Classify Raw Audio in a Streaming Fashion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171784.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-5|PAPER Thu-O-9-1-5 — Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-1|PAPER Mon-O-2-6-1 — End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-2|PAPER Wed-O-6-6-2 — Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-6|PAPER Wed-O-6-6-6 — Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-1|PAPER Tue-O-3-10-1 — Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-4|PAPER Thu-O-9-8-4 — Turbo Decoders for Audio-Visual Continuous Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turbo Decoders for Audio-Visual Continuous Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-3|PAPER Thu-O-10-4-3 — NTCD-TIMIT: A New Database and Baseline for Noise-Robust Audio-Visual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NTCD-TIMIT: A New Database and Baseline for Noise-Robust Audio-Visual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-5|PAPER Tue-O-5-6-5 — On the Duration of Mandarin Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Duration of Mandarin Tones</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-12|PAPER Tue-P-5-2-12 — The Frequency Range of “The Ling Six Sounds” in Standard Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Frequency Range of “The Ling Six Sounds” in Standard Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-5|PAPER Tue-O-4-8-5 — Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Dependent WaveNet Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-5|PAPER Tue-O-4-1-5 — Statistical Voice Conversion with WaveNet-Based Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Statistical Voice Conversion with WaveNet-Based Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-7|PAPER Mon-P-1-4-7 — Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171550.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-8|PAPER Thu-SS-9-10-8 — Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-6|PAPER Tue-S&T-3-B-6 — Reading Validation for Pronunciation Evaluation in the Digitala Project]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reading Validation for Pronunciation Evaluation in the Digitala Project</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170934.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-5|PAPER Wed-O-7-1-5 — The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bob Speaks Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-4|PAPER Tue-P-3-2-4 — Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-4|PAPER Mon-SS-1-11-4 — On Building Mixed Lingual Speech Synthesis Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Building Mixed Lingual Speech Synthesis Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-5|PAPER Mon-SS-1-11-5 — Speech Synthesis for Mixed-Language Navigation Instructions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Synthesis for Mixed-Language Navigation Instructions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-1|PAPER Tue-P-3-1-1 — A Generative Model for Score Normalization in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Generative Model for Score Normalization in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-2|PAPER Tue-O-3-1-2 — CTC in the Context of Generalized Full-Sum HMM Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CTC in the Context of Generalized Full-Sum HMM Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-9|PAPER Wed-P-7-4-9 — Depression Detection Using Automatic Transcriptions of De-Identified Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Depression Detection Using Automatic Transcriptions of De-Identified Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171278.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-3|PAPER Mon-O-1-2-3 — A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-12|PAPER Tue-P-5-1-12 — Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-4|PAPER Mon-S&T-2-A-4 — HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-5|PAPER Wed-SS-6-11-5 — Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171468.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-6|PAPER Wed-SS-6-11-6 — Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170937.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-2|PAPER Wed-SS-7-11-2 — Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-3|PAPER Wed-P-6-3-3 — Fast and Accurate OOV Decoder on High-Level Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast and Accurate OOV Decoder on High-Level Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-5|PAPER Mon-P-2-3-5 — Optimizing DNN Adaptation for Recognition of Enhanced Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing DNN Adaptation for Recognition of Enhanced Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-1|PAPER Wed-S&T-6-B-1 — Integrating the Talkamatic Dialogue Manager with Alexa]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating the Talkamatic Dialogue Manager with Alexa</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-10|PAPER Tue-P-4-3-10 — Sequence to Sequence Modeling for User Simulation in Dialog Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence to Sequence Modeling for User Simulation in Dialog Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171606.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-4|PAPER Tue-SS-4-11-4 — Enhancing Backchannel Prediction Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Backchannel Prediction Using Word Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-5|PAPER Wed-O-8-4-5 — NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-6|PAPER Wed-SS-6-2-6 — Areal and Phylogenetic Features for Multilingual Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Areal and Phylogenetic Features for Multilingual Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-15|PAPER Wed-SS-7-1-15 — Uniform Multilingual Multi-Speaker Acoustic Model for Statistical Parametric Speech Synthesis of Low-Resourced Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uniform Multilingual Multi-Speaker Acoustic Model for Statistical Parametric Speech Synthesis of Low-Resourced Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-3|PAPER Mon-O-1-10-3 — An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-1|PAPER Tue-O-4-2-1 — A Comparison of Sentence-Level Speech Intelligibility Metrics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sentence-Level Speech Intelligibility Metrics</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-6|PAPER Tue-O-4-10-6 — Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-4|PAPER Tue-O-5-4-4 — Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-4|PAPER Wed-O-7-6-4 — Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-6|PAPER Thu-SS-10-10-6 — Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-4|PAPER Mon-O-2-4-4 — Musical Speech: A New Methodology for Transcribing Speech Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Musical Speech: A New Methodology for Transcribing Speech Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-4|PAPER Wed-P-6-2-4 — Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-3|PAPER Wed-O-7-6-3 — Towards End-to-End Spoken Dialogue Systems with Turn Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards End-to-End Spoken Dialogue Systems with Turn Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metrics for Modeling Code-Switching Across Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172034.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-1|PAPER Mon-S&T-2-A-1 — Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-4|PAPER Wed-P-8-1-4 — How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-4|PAPER Wed-P-7-2-4 — Acoustic Cues to the Singleton-Geminate Contrast: The Case of Libyan Arabic Sonorants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Cues to the Singleton-Geminate Contrast: The Case of Libyan Arabic Sonorants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-1|PAPER Mon-O-2-2-1 — Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bob Speaks Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-5|PAPER Wed-SS-6-2-5 — Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-10|PAPER Wed-P-8-1-10 — The Acoustics of Word Stress in Czech as a Function of Speaking Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Acoustics of Word Stress in Czech as a Function of Speaking Style</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-4|PAPER Thu-P-9-3-4 — Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170713.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-2|PAPER Tue-O-3-10-2 — Interaction and Transition Model for Speech Emotion Recognition in Dialogue]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interaction and Transition Model for Speech Emotion Recognition in Dialogue</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|^<div class="cpauthorindexpersoncardpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-5|PAPER Thu-SS-9-10-5 — It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-5|PAPER Mon-O-1-6-5 — Vowels in the Barunga Variety of North Australian Kriol]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowels in the Barunga Variety of North Australian Kriol</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bob Speaks Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171115.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-5|PAPER Thu-O-10-4-5 — Automatic Construction of the Finnish Parliament Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Construction of the Finnish Parliament Speech Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171371.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-14|PAPER Mon-P-2-2-14 — Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-3|PAPER Tue-P-5-2-3 — Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-1|PAPER Wed-S&T-6-B-1 — Integrating the Talkamatic Dialogue Manager with Alexa]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating the Talkamatic Dialogue Manager with Alexa</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-6|PAPER Mon-O-1-1-6 — Comparing Human and Machine Errors in Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Human and Machine Errors in Conversational Speech Transcription</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-1|PAPER Wed-O-6-8-1 — Emotional Features for Speech Overlaps Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Features for Speech Overlaps Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-6|PAPER Wed-P-7-3-6 — Laryngeal Articulation During Trumpet Performance: An Exploratory Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Laryngeal Articulation During Trumpet Performance: An Exploratory Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170479.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-5|PAPER Thu-P-9-4-5 — Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-5|PAPER Wed-O-8-1-5 — VoxCeleb: A Large-Scale Speaker Identification Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VoxCeleb: A Large-Scale Speaker Identification Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-12|PAPER Wed-SS-7-1-12 — Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-8|PAPER Thu-P-9-3-8 — R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-4|PAPER Tue-P-4-3-4 — Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171514.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-3|PAPER Tue-O-5-6-3 — Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nora the Empathetic Psychologist</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171665.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-9|PAPER Thu-P-9-1-9 — Robust Speech Recognition Based on Binaural Auditory Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition Based on Binaural Auditory Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-9|PAPER Wed-P-6-3-9 — Zero-Shot Learning Across Heterogeneous Overlapping Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning Across Heterogeneous Overlapping Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170518.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-1|PAPER Wed-O-7-4-1 — Towards Zero-Shot Frame Semantic Parsing for Domain Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Zero-Shot Frame Semantic Parsing for Domain Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-11|PAPER Wed-SS-7-1-11 — Building an ASR Corpus Using Althingi’s Parliamentary Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building an ASR Corpus Using Althingi’s Parliamentary Speeches</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building ASR Corpora Using Eyra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-2|PAPER Mon-S&T-2-A-2 — ChunkitApp: Investigating the Relevant Units of Online Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ChunkitApp: Investigating the Relevant Units of Online Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-11|PAPER Mon-P-1-4-11 — A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-16|PAPER Mon-P-2-2-16 — The Acquisition of Focal Lengthening in Stockholm Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Acquisition of Focal Lengthening in Stockholm Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-8|PAPER Tue-P-3-2-8 — Alternative Approaches to Neural Network Based Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternative Approaches to Neural Network Based Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-12|PAPER Thu-P-9-3-12 — Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-4|PAPER Mon-P-2-1-4 — Lexically Guided Perceptual Learning in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexically Guided Perceptual Learning in Mandarin Chinese</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-3|PAPER Tue-O-4-6-3 — Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-1|PAPER Wed-O-7-1-1 — An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-4|PAPER Mon-S&T-2-A-4 — HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-5|PAPER Tue-O-5-10-5 — The Sound of Deception — What Makes a Speaker Credible?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sound of Deception — What Makes a Speaker Credible?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-12|PAPER Wed-P-8-1-12 — Focus Acoustics in Mandarin Nominals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Focus Acoustics in Mandarin Nominals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-4|PAPER Mon-O-2-4-4 — Musical Speech: A New Methodology for Transcribing Speech Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Musical Speech: A New Methodology for Transcribing Speech Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-3|PAPER Thu-O-9-2-3 — A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-6|PAPER Mon-P-2-1-6 — Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-6|PAPER Tue-SS-5-11-6 — To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170833.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-5|PAPER Wed-SS-8-11-5 — Social Attractiveness in Dialogs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Attractiveness in Dialogs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-3|PAPER Wed-O-6-6-3 — An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pronunciation Learning with RNN-Transducers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-2|PAPER Thu-SS-9-10-2 — Description of the Upper Respiratory Tract Infection Corpus (URTIC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Upper Respiratory Tract Infection Corpus (URTIC)</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-3|PAPER Thu-SS-9-10-3 — Description of the Munich-Passau Snore Sound Corpus (MPSSC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Munich-Passau Snore Sound Corpus (MPSSC)</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-8|PAPER Thu-SS-10-10-8 — Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discussion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-5|PAPER Thu-O-9-6-5 — SEGAN: Speech Enhancement Generative Adversarial Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SEGAN: Speech Enhancement Generative Adversarial Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-4|PAPER Mon-O-2-4-4 — Musical Speech: A New Methodology for Transcribing Speech Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Musical Speech: A New Methodology for Transcribing Speech Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-4|PAPER Wed-P-6-2-4 — Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-1|PAPER Mon-P-2-2-1 — Critical Articulators Identification from RT-MRI of the Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Critical Articulators Identification from RT-MRI of the Vocal Tract</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-2|PAPER Tue-O-4-6-2 — Comparing Languages Using Hierarchical Prosodic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Languages Using Hierarchical Prosodic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-1|PAPER Tue-P-5-3-1 — Audio Content Based Geotagging in Multimedia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Content Based Geotagging in Multimedia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-16|PAPER Mon-P-2-2-16 — The Acquisition of Focal Lengthening in Stockholm Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Acquisition of Focal Lengthening in Stockholm Swedish</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-6|PAPER Wed-SS-8-11-6 — A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171371.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-14|PAPER Mon-P-2-2-14 — Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-2|PAPER Thu-O-9-2-2 — Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-8|PAPER Wed-P-8-2-8 — Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-5|PAPER Wed-O-8-1-5 — VoxCeleb: A Large-Scale Speaker Identification Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VoxCeleb: A Large-Scale Speaker Identification Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-2|PAPER Mon-O-1-4-2 — Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-4|PAPER Thu-O-10-11-4 — Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-4|PAPER Tue-P-4-2-4 — An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-3|PAPER Thu-O-9-1-3 — Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171791.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-10|PAPER Thu-P-9-1-10 — Adaptive Multichannel Dereverberation for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Multichannel Dereverberation for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-3|PAPER Thu-SS-9-11-3 — A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-4|PAPER Tue-O-5-4-4 — Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-6|PAPER Wed-P-6-4-6 — On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-3|PAPER Mon-S&T-2-B-3 — System for Speech Transcription and Post-Editing in Microsoft Word]]</div>|^<div class="cpauthorindexpersoncardpapertitle">System for Speech Transcription and Post-Editing in Microsoft Word</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-4|PAPER Mon-O-1-10-4 — VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-2|PAPER Mon-P-2-2-2 — Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170554.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-4|PAPER Tue-P-4-1-4 — Forward-Backward Convolutional LSTM for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Forward-Backward Convolutional LSTM for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170543.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-3|PAPER Tue-P-5-4-3 — Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-5|PAPER Thu-P-9-1-5 — Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170624.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-5|PAPER Mon-P-1-1-5 — Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-3|PAPER Tue-P-5-3-3 — Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-5|PAPER Tue-O-4-10-5 — A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-4|PAPER Tue-S&T-3-A-4 — A Signal Processing Approach for Speaker Separation Using SFF Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Signal Processing Approach for Speaker Separation Using SFF Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-1|PAPER Wed-O-6-4-1 — A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-4|PAPER Tue-S&T-3-A-4 — A Signal Processing Approach for Speaker Separation Using SFF Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Signal Processing Approach for Speaker Separation Using SFF Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170848.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-3|PAPER Tue-O-5-4-3 — Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-7|PAPER Wed-P-8-4-7 — Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|^<div class="cpauthorindexpersoncardpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-2|PAPER Mon-P-1-2-2 — Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metrics for Modeling Code-Switching Across Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-1|PAPER Thu-S&T-9-A-1 — Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-8|PAPER Mon-P-2-3-8 — Generalized Distillation Framework for Speaker Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Distillation Framework for Speaker Normalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-9|PAPER Wed-SS-7-1-9 — Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-10|PAPER Wed-SS-7-1-10 — Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-4|PAPER Wed-O-8-8-4 — On Improving Acoustic Models for TORGO Dysarthric Speech Database]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Improving Acoustic Models for TORGO Dysarthric Speech Database</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-9|PAPER Wed-P-8-3-9 — Online Adaptation of an Attention-Based Neural Network for Natural Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Adaptation of an Attention-Based Neural Network for Natural Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-6|PAPER Thu-O-9-4-6 — Similarity Learning Based Query Modeling for Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similarity Learning Based Query Modeling for Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-4|PAPER Mon-O-2-4-4 — Musical Speech: A New Methodology for Transcribing Speech Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Musical Speech: A New Methodology for Transcribing Speech Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170952.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-7|PAPER Wed-P-6-1-7 — Multiview Representation Learning via Deep CCA for Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiview Representation Learning via Deep CCA for Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-11|PAPER Tue-P-5-4-11 — A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-1|PAPER Wed-S&T-6-A-1 — Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-5|PAPER Wed-P-6-4-5 — The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-7|PAPER Wed-SS-7-1-7 — Leveraging Text Data for Word Segmentation for Underresourced Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Text Data for Word Segmentation for Underresourced Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-5|PAPER Tue-P-3-2-5 — Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-2|PAPER Mon-O-1-6-2 — Glottal Opening and Strategies of Production of Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Opening and Strategies of Production of Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-1|PAPER Tue-P-5-3-1 — Audio Content Based Geotagging in Multimedia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Content Based Geotagging in Multimedia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-1|PAPER Wed-O-7-8-1 — Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-4|PAPER Tue-P-5-1-4 — A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170325.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-1|PAPER Mon-O-1-10-1 — The Influence of Synthetic Voice on the Evaluation of a Virtual Character]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence of Synthetic Voice on the Evaluation of a Virtual Character</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-6|PAPER Mon-O-1-2-6 — Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-3|PAPER Tue-SS-5-11-3 — Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170612.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-4|PAPER Wed-P-6-3-4 — Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-4|PAPER Thu-O-9-6-4 — Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-5|PAPER Tue-SS-4-11-5 — A Computational Model for Phonetically Responsive Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Computational Model for Phonetically Responsive Spoken Dialogue Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-5|PAPER Wed-P-7-2-5 — Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-5|PAPER Tue-O-4-2-5 — Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-7|PAPER Wed-P-6-4-7 — Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-2|PAPER Tue-O-5-6-2 — Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170839.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-4|PAPER Tue-O-4-6-4 — Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-10|PAPER Tue-P-5-2-10 — Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-1|PAPER Tue-P-5-3-1 — Audio Content Based Geotagging in Multimedia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Content Based Geotagging in Multimedia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170564.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-4|PAPER Mon-O-2-1-4 — Fast Neural Network Language Model Lookups at N-Gram Speeds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Neural Network Language Model Lookups at N-Gram Speeds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-5|PAPER Mon-O-2-1-5 — Empirical Exploration of Novel Architectures and Objectives for Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Exploration of Novel Architectures and Objectives for Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170479.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-5|PAPER Thu-P-9-4-5 — Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-4|PAPER Thu-O-9-6-4 — Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170790.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-8|PAPER Mon-P-1-1-8 — Vowel Onset Point Detection Using Sonority Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowel Onset Point Detection Using Sonority Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-5|PAPER Wed-O-7-4-5 — Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-4|PAPER Thu-O-9-1-4 — Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-1|PAPER Wed-SS-7-11-1 — Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-9|PAPER Thu-SS-9-10-9 — An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-6|PAPER Wed-O-7-1-6 — A Neuro-Experimental Evidence for the Motor Theory of Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neuro-Experimental Evidence for the Motor Theory of Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-5|PAPER Mon-P-1-4-5 — Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-1|PAPER Wed-O-7-6-1 — An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170940.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-11|PAPER Tue-P-5-3-11 — Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-5|PAPER Wed-P-7-2-5 — Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-10|PAPER Tue-P-5-2-10 — Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-9|PAPER Wed-P-6-3-9 — Zero-Shot Learning Across Heterogeneous Overlapping Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning Across Heterogeneous Overlapping Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-K4-1|PAPER Thu-K4-1 — Re-Inventing Speech — The Biological Way]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Re-Inventing Speech — The Biological Way</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-3|PAPER Tue-S&T-3-B-3 — “Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-4|PAPER Wed-O-6-8-4 — Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-5|PAPER Wed-P-7-4-5 — Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-8|PAPER Wed-P-7-4-8 — Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-5|PAPER Wed-P-8-2-5 — Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-8|PAPER Thu-SS-10-10-8 — Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discussion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170406.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-15|PAPER Thu-P-9-3-15 — Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-8|PAPER Mon-P-2-4-8 — Discrete Duration Model for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discrete Duration Model for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-9|PAPER Mon-P-2-4-9 — Comparison of Modeling Target in LSTM-RNN Duration Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Modeling Target in LSTM-RNN Duration Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-6|PAPER Tue-O-3-1-6 — Reducing the Computational Complexity of Two-Dimensional LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing the Computational Complexity of Two-Dimensional LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-1|PAPER Mon-P-2-3-1 — Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-13|PAPER Tue-P-5-3-13 — Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-5|PAPER Wed-P-8-2-5 — Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-7|PAPER Mon-P-2-3-7 — Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-11|PAPER Wed-P-6-1-11 — Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-6|PAPER Thu-P-9-3-6 — Electrophysiological Correlates of Familiar Voice Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Electrophysiological Correlates of Familiar Voice Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-10|PAPER Wed-P-8-2-10 — Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-9|PAPER Mon-P-2-3-9 — Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-7|PAPER Thu-P-9-1-7 — To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-1|PAPER Tue-SS-3-11-1 — Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170485.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-9|PAPER Wed-P-7-3-9 — An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-6|PAPER Wed-P-7-3-6 — Laryngeal Articulation During Trumpet Performance: An Exploratory Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Laryngeal Articulation During Trumpet Performance: An Exploratory Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-1|PAPER Mon-O-2-6-1 — End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-9|PAPER Tue-P-4-3-9 — Measuring Synchrony in Task-Based Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Measuring Synchrony in Task-Based Dialogues</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-9|PAPER Wed-P-8-2-9 — Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171278.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-3|PAPER Mon-O-1-2-3 — A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171494.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-4|PAPER Tue-O-3-10-4 — Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-14|PAPER Tue-P-5-3-14 — Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-3|PAPER Tue-SS-4-11-3 — Motion Analysis in Vocalized Surprise Expressions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Motion Analysis in Vocalized Surprise Expressions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-4|PAPER Tue-SS-5-11-4 — Prosodic Analysis of Attention-Drawing Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Analysis of Attention-Drawing Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-6|PAPER Tue-P-4-3-6 — Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-10|PAPER Wed-P-8-3-10 — Spanish Sign Language Recognition with Different Topology Hidden Markov Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spanish Sign Language Recognition with Different Topology Hidden Markov Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171183.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-8|PAPER Wed-P-6-3-8 — Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-9|PAPER Wed-P-7-4-9 — Depression Detection Using Automatic Transcriptions of De-Identified Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Depression Detection Using Automatic Transcriptions of De-Identified Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-2|PAPER Tue-O-3-2-2 — Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-2|PAPER Wed-P-7-4-2 — An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-8|PAPER Tue-P-5-3-8 — Improved End-of-Query Detection for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved End-of-Query Detection for Streaming Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-5|PAPER Mon-O-1-6-5 — Vowels in the Barunga Variety of North Australian Kriol]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowels in the Barunga Variety of North Australian Kriol</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170532.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-10|PAPER Mon-P-2-1-10 — Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171647.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-6|PAPER Tue-O-5-4-6 — Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-2|PAPER Wed-P-6-4-2 — Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-5|PAPER Mon-S&T-2-A-5 — A System for Real Time Collaborative Transcription Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A System for Real Time Collaborative Transcription Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-K3-1|PAPER Wed-K3-1 — Conversing with Social Agents That Smile and Laugh]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conversing with Social Agents That Smile and Laugh</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171292.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-3|PAPER Mon-O-1-6-3 — Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-2|PAPER Wed-O-8-8-2 — Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-4|PAPER Wed-P-8-1-4 — How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170470.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-3|PAPER Thu-O-10-8-3 — Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-8|PAPER Wed-P-8-2-8 — Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-10|PAPER Thu-P-9-3-10 — Perception and Production of Word-Final /ʁ/ in French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception and Production of Word-Final /ʁ/ in French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-6|PAPER Tue-O-5-8-6 — Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170726.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-3|PAPER Mon-O-2-4-3 — Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-3|PAPER Wed-O-6-4-3 — A Spectro-Temporal Demodulation Technique for Pitch Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Demodulation Technique for Pitch Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170271.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-7|PAPER Tue-P-5-3-7 — A Mask Estimation Method Integrating Data Field Model for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mask Estimation Method Integrating Data Field Model for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170109.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-2|PAPER Thu-O-9-6-2 — Improved Codebook-Based Speech Enhancement Based on MBE Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Codebook-Based Speech Enhancement Based on MBE Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171665.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-9|PAPER Thu-P-9-1-9 — Robust Speech Recognition Based on Binaural Auditory Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition Based on Binaural Auditory Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-6|PAPER Tue-P-4-3-6 — Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170293.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-2|PAPER Tue-SS-5-11-2 — Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-5|PAPER Mon-P-2-1-5 — The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171205.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-5|PAPER Wed-O-7-6-5 — Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-1|PAPER Tue-P-3-2-1 — Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170856.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-4|PAPER Tue-O-5-1-4 — Stochastic Recurrent Neural Network for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Recurrent Neural Network for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-1|PAPER Thu-O-10-1-1 — Deep Neural Factorization for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Factorization for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170940.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-11|PAPER Tue-P-5-3-11 — Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-5|PAPER Thu-O-10-11-5 — Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Does Posh English Sound Attractive?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-2|PAPER Wed-O-6-8-2 — Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-3|PAPER Wed-O-6-8-3 — Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-6|PAPER Mon-O-1-4-6 — Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-4|PAPER Thu-O-9-6-4 — Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170830.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-1|PAPER Tue-O-4-4-1 — A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-5|PAPER Wed-P-6-1-5 — Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-2|PAPER Wed-O-6-8-2 — Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-10|PAPER Tue-P-5-2-10 — Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-3|PAPER Tue-O-5-10-3 — Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-6|PAPER Mon-P-2-2-6 — The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-5|PAPER Mon-P-2-1-5 — The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170071.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-4|PAPER Tue-O-3-1-4 — Multitask Learning with CTC and Segmental CRF for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with CTC and Segmental CRF for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-6|PAPER Tue-O-4-2-6 — Modeling Categorical Perception with the Receptive Fields of Auditory Neurons]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Categorical Perception with the Receptive Fields of Auditory Neurons</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171535.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-2|PAPER Tue-O-3-6-2 — Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-5|PAPER Tue-O-4-8-5 — Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-8|PAPER Wed-P-7-2-8 — Reshaping the Transformed LF Model: Generating the Glottal Source from the Waveshape Parameter R,,d,,]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reshaping the Transformed LF Model: Generating the Glottal Source from the Waveshape Parameter R,,d,,</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-8|PAPER Thu-P-9-3-8 — R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-10|PAPER Mon-P-1-1-10 — Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-4|PAPER Wed-O-7-4-4 — Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-5|PAPER Wed-SS-6-11-5 — Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-6|PAPER Tue-P-5-2-6 — Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-2|PAPER Mon-S&T-2-B-2 — PercyConfigurator — Perception Experiments as a Service]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PercyConfigurator — Perception Experiments as a Service</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-3|PAPER Thu-SS-9-10-3 — Description of the Munich-Passau Snore Sound Corpus (MPSSC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Munich-Passau Snore Sound Corpus (MPSSC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-1|PAPER Wed-O-7-8-1 — Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170396.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-1|PAPER Tue-SS-4-11-1 — Adjusting the Frame: Biphasic Performative Control of Speech Rhythm]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adjusting the Frame: Biphasic Performative Control of Speech Rhythm</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171292.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-3|PAPER Mon-O-1-6-3 — Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170876.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-12|PAPER Wed-P-7-2-12 — A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-5|PAPER Mon-P-1-2-5 — Enhanced Feature Extraction for Speech Detection in Media Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhanced Feature Extraction for Speech Detection in Media Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-3|PAPER Tue-P-3-1-3 — End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Topic Identification for Speech Without ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-6|PAPER Tue-P-4-1-6 — Deep Activation Mixture Model for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Activation Mixture Model for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-4|PAPER Wed-O-8-10-4 — Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-5|PAPER Thu-O-9-2-5 — Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-6|PAPER Thu-O-10-11-6 — Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-1|PAPER Wed-P-7-3-1 — Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-1|PAPER Thu-P-9-1-1 — Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-9|PAPER Tue-SS-5-11-9 — Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-5|PAPER Tue-O-4-2-5 — Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-7|PAPER Wed-P-6-4-7 — Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170934.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-5|PAPER Wed-O-7-1-5 — The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-6|PAPER Tue-P-5-2-6 — Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-5|PAPER Tue-O-4-4-5 — Detecting Overlapped Speech on Short Timeframes Using Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Overlapped Speech on Short Timeframes Using Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-2|PAPER Tue-SS-4-11-2 — Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Topic Identification for Speech Without ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Does Posh English Sound Attractive?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-4|PAPER Wed-P-8-4-4 — Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-13|PAPER Thu-P-9-4-13 — An Expanded Taxonomy of Semiotic Classes for Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Expanded Taxonomy of Semiotic Classes for Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171081.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-3|PAPER Wed-O-7-8-3 — Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-8|PAPER Mon-P-1-4-8 — Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-5|PAPER Tue-O-3-2-5 — Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170961.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-3|PAPER Tue-O-4-10-3 — Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-8|PAPER Wed-P-8-2-8 — Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170833.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-5|PAPER Wed-SS-8-11-5 — Social Attractiveness in Dialogs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Attractiveness in Dialogs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-4|PAPER Thu-O-10-8-4 — The Social Life of Setswana Ejectives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Social Life of Setswana Ejectives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-1|PAPER Tue-O-3-4-1 — Deep Neural Network Embeddings for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Embeddings for Text-Independent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-4|PAPER Tue-P-3-2-4 — Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-12|PAPER Tue-P-5-4-12 — Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-7|PAPER Mon-P-2-1-7 — Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171680.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-3|PAPER Mon-P-1-4-3 — Phone Duration Modeling for LVCSR Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Duration Modeling for LVCSR Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-1|PAPER Tue-O-3-4-1 — Deep Neural Network Embeddings for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Embeddings for Text-Independent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-2|PAPER Wed-O-7-8-2 — Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-4|PAPER Wed-P-7-3-4 — Classification-Based Detection of Glottal Closure Instants from Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification-Based Detection of Glottal Closure Instants from Speech Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-4|PAPER Wed-S&T-6-A-4 — Voice Conservation and TTS System for People Facing Total Laryngectomy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conservation and TTS System for People Facing Total Laryngectomy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-5|PAPER Mon-P-2-3-5 — Optimizing DNN Adaptation for Recognition of Enhanced Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing DNN Adaptation for Recognition of Enhanced Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170855.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-4|PAPER Wed-SS-7-1-4 — Eliciting Meaningful Units from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eliciting Meaningful Units from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-11|PAPER Tue-P-5-4-11 — A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pronunciation Learning with RNN-Transducers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-13|PAPER Mon-P-2-2-13 — Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-5|PAPER Mon-O-1-4-5 — Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-10|PAPER Tue-P-3-2-10 — Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-6|PAPER Tue-O-5-8-6 — Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-8|PAPER Thu-P-9-4-8 — Predicting Head Pose from Speech with a Conditional Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Head Pose from Speech with a Conditional Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-4|PAPER Mon-P-1-2-4 — Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-4|PAPER Thu-O-10-4-4 — The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-4|PAPER Wed-SS-7-11-4 — Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-4|PAPER Tue-P-4-3-4 — Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-1|PAPER Tue-O-3-4-1 — Deep Neural Network Embeddings for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Embeddings for Text-Independent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-3|PAPER Mon-SS-1-11-3 — Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171205.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-5|PAPER Wed-O-7-6-5 — Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-10|PAPER Wed-P-6-2-10 — Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-2|PAPER Mon-SS-1-11-2 — Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-9|PAPER Tue-P-5-1-9 — Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-14|PAPER Tue-P-5-4-14 — Binaural Reverberant Speech Separation Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binaural Reverberant Speech Separation Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-3|PAPER Thu-O-9-2-3 — A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-9|PAPER Tue-P-5-3-9 — Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-2|PAPER Wed-O-7-4-2 — ClockWork-RNN Based Architectures for Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ClockWork-RNN Based Architectures for Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-15|PAPER Wed-P-7-3-15 — Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-9|PAPER Tue-P-5-3-9 — Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-1|PAPER Wed-O-8-10-1 — Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-4|PAPER Wed-O-8-10-4 — Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-6|PAPER Wed-O-7-6-6 — Hyperarticulation of Corrections in Multilingual Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hyperarticulation of Corrections in Multilingual Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-1|PAPER Wed-O-6-1-1 — Aerodynamic Features of French Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aerodynamic Features of French Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170530.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-1|PAPER Wed-P-6-2-1 — Calibration Approaches for Language Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Calibration Approaches for Language Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-2|PAPER Mon-O-2-1-2 — A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-6|PAPER Tue-SS-4-11-6 — Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-3|PAPER Wed-O-8-10-3 — Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-4|PAPER Thu-O-10-4-4 — The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170518.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-1|PAPER Wed-O-7-4-1 — Towards Zero-Shot Frame Semantic Parsing for Domain Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Zero-Shot Frame Semantic Parsing for Domain Scaling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-8|PAPER Wed-P-8-3-8 — To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170166.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-1|PAPER Wed-P-6-1-1 — Developing On-Line Speaker Diarization System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing On-Line Speaker Diarization System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-6|PAPER Wed-O-8-6-6 — Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-6|PAPER Mon-SS-1-11-6 — Addressing Code-Switching in French/Algerian Arabic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Addressing Code-Switching in French/Algerian Arabic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-7|PAPER Mon-P-2-1-7 — Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-3|PAPER Mon-SS-2-8-3 — A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-5|PAPER Mon-P-1-4-5 — Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-3|PAPER Wed-O-7-2-3 — Recognizing Multi-Talker Speech with Permutation Invariant Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recognizing Multi-Talker Speech with Permutation Invariant Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-4|PAPER Mon-O-1-4-4 — Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-5|PAPER Tue-P-3-2-5 — Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170626.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-2|PAPER Mon-O-2-2-2 — Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-5|PAPER Tue-O-3-10-5 — Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-5|PAPER Thu-P-9-1-5 — Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-4|PAPER Wed-P-6-2-4 — Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Error Management for Improving Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-3|PAPER Tue-P-4-2-3 — End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-12|PAPER Wed-SS-7-1-12 — Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-6|PAPER Wed-O-7-8-6 — Learning Similarity Functions for Pronunciation Variations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Similarity Functions for Pronunciation Variations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-2|PAPER Thu-P-9-3-2 — Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-2|PAPER Mon-O-2-4-2 — An Investigation of Crowd Speech for Room Occupancy Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Crowd Speech for Room Occupancy Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-3|PAPER Tue-O-4-8-3 — An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-6|PAPER Tue-P-3-1-6 — Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170596.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-4|PAPER Wed-O-7-10-4 — Investigating Scalability in Hierarchical Language Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Scalability in Hierarchical Language Identification System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-2|PAPER Wed-P-6-2-2 — Bidirectional Modelling for Short Duration Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Modelling for Short Duration Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-4|PAPER Mon-SS-2-8-4 — Replay Attack Detection Using DNN for Channel Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Replay Attack Detection Using DNN for Channel Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-4|PAPER Mon-S&T-2-A-4 — HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170937.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-2|PAPER Wed-SS-7-11-2 — Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-15|PAPER Tue-P-5-1-15 — MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-7|PAPER Wed-P-8-1-7 — Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-15|PAPER Tue-P-5-1-15 — MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-4|PAPER Wed-SS-7-11-4 — Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-5|PAPER Tue-P-3-2-5 — Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-6|PAPER Mon-O-2-2-6 — Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-10|PAPER Wed-P-7-4-10 — An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170268.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-6|PAPER Wed-SS-7-1-6 — Machine Assisted Analysis of Vowel Length Contrasts in Wolof]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Machine Assisted Analysis of Vowel Length Contrasts in Wolof</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-12|PAPER Wed-P-8-3-12 — Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-4|PAPER Mon-O-2-6-4 — L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170626.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-2|PAPER Mon-O-2-2-2 — Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-5|PAPER Tue-O-3-10-5 — Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-10|PAPER Wed-P-6-3-10 — Hierarchical Recurrent Neural Network for Story Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Recurrent Neural Network for Story Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170937.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-2|PAPER Wed-SS-7-11-2 — Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-6|PAPER Wed-SS-7-11-6 — Learning Weakly Supervised Multimodal Phoneme Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Weakly Supervised Multimodal Phoneme Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-13|PAPER Wed-P-7-2-13 — A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-2|PAPER Mon-SS-1-11-2 — Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-2|PAPER Wed-O-8-8-2 — Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-3|PAPER Mon-P-2-2-3 — Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-15|PAPER Tue-P-4-3-15 — Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-3|PAPER Thu-P-9-4-3 — Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-5|PAPER Tue-SS-4-11-5 — A Computational Model for Phonetically Responsive Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Computational Model for Phonetically Responsive Spoken Dialogue Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-2|PAPER Mon-P-1-1-2 — Robust Source-Filter Separation of Speech Signal in the Phase Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Source-Filter Separation of Speech Signal in the Phase Domain</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-5|PAPER Wed-O-7-2-5 — Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170196.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-2|PAPER Tue-O-4-2-2 — An Auditory Model of Speaker Size Perception for Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Auditory Model of Speaker Size Perception for Voiced Speech Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-8|PAPER Tue-P-5-1-8 — Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-3|PAPER Tue-P-4-2-3 — End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-5|PAPER Wed-P-7-2-5 — Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-4|PAPER Tue-P-5-4-4 — Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-6|PAPER Wed-SS-8-11-6 — A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-2|PAPER Tue-O-3-1-2 — CTC in the Context of Generalized Full-Sum HMM Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CTC in the Context of Generalized Full-Sum HMM Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-5|PAPER Wed-O-8-4-5 — NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-5|PAPER Tue-P-5-4-5 — Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-6|PAPER Tue-SS-4-11-6 — Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-6|PAPER Tue-O-5-8-6 — Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-6|PAPER Tue-O-5-6-6 — The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-10|PAPER Mon-P-2-2-10 — Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-11|PAPER Mon-P-2-4-11 — Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-3|PAPER Wed-O-7-6-3 — Towards End-to-End Spoken Dialogue Systems with Turn Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards End-to-End Spoken Dialogue Systems with Turn Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-2|PAPER Mon-O-2-6-2 — Dialect Perception by Older Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Perception by Older Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-8|PAPER Mon-SS-1-11-8 — Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-6|PAPER Wed-SS-7-11-6 — Learning Weakly Supervised Multimodal Phoneme Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Weakly Supervised Multimodal Phoneme Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-9|PAPER Wed-P-8-3-9 — Online Adaptation of an Attention-Based Neural Network for Natural Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Adaptation of an Attention-Based Neural Network for Natural Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-1|PAPER Mon-O-2-1-1 — Approaches for Neural-Network Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaches for Neural-Network Language Model Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-1|PAPER Wed-O-8-10-1 — Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-4|PAPER Wed-O-8-10-4 — Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-6|PAPER Wed-P-8-2-6 — Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-1|PAPER Mon-P-2-1-1 — Factors Affecting the Intelligibility of Low-Pass Filtered Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factors Affecting the Intelligibility of Low-Pass Filtered Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-2|PAPER Mon-P-2-1-2 — Phonetic Restoration of Temporally Reversed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Restoration of Temporally Reversed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-14|PAPER Tue-P-5-3-14 — Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-3|PAPER Tue-P-5-1-3 — On the Role of Temporal Variability in the Acquisition of the German Vowel Length Contrast]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Role of Temporal Variability in the Acquisition of the German Vowel Length Contrast</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-10|PAPER Wed-P-7-2-10 — What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171647.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-6|PAPER Tue-O-5-4-6 — Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-8|PAPER Wed-P-7-4-8 — Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-4|PAPER Tue-P-5-4-4 — Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170271.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-7|PAPER Tue-P-5-3-7 — A Mask Estimation Method Integrating Data Field Model for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mask Estimation Method Integrating Data Field Model for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-4|PAPER Mon-P-1-2-4 — Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-3|PAPER Tue-S&T-3-B-3 — “Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-13|PAPER Wed-P-7-3-13 — A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-13|PAPER Wed-P-7-2-13 — A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-1|PAPER Tue-O-5-1-1 — Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pronunciation Learning with RNN-Transducers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-7|PAPER Tue-P-3-1-7 — DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-5|PAPER Tue-P-5-1-5 — Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-4|PAPER Wed-SS-8-11-4 — Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-5|PAPER Wed-P-7-2-5 — Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-8|PAPER Mon-P-1-2-8 — Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-5|PAPER Tue-O-5-1-5 — Frame and Segment Level Recurrent Neural Networks for Phone Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame and Segment Level Recurrent Neural Networks for Phone Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-3|PAPER Wed-O-8-6-3 — Eigenvector-Based Speech Mask Estimation Using Logistic Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eigenvector-Based Speech Mask Estimation Using Logistic Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-5|PAPER Tue-SS-5-11-5 — Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-1|PAPER Wed-S&T-6-B-1 — Integrating the Talkamatic Dialogue Manager with Alexa]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating the Talkamatic Dialogue Manager with Alexa</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171334.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-1|PAPER Wed-O-7-10-1 — Spoken Language Identification Using LSTM-Based Angular Proximity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Language Identification Using LSTM-Based Angular Proximity</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-5|PAPER Thu-O-9-2-5 — Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-10|PAPER Mon-P-1-2-10 — A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-8|PAPER Tue-P-4-1-8 — Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-9|PAPER Tue-P-4-1-9 — A Comparative Evaluation of GMM-Free State Tying Methods for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Evaluation of GMM-Free State Tying Methods for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-5|PAPER Wed-O-6-8-5 — Optimized Time Series Filters for Detecting Laughter and Filler Events]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimized Time Series Filters for Detecting Laughter and Filler Events</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-5|PAPER Thu-SS-10-10-5 — DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-8|PAPER Tue-P-5-3-8 — Improved End-of-Query Detection for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved End-of-Query Detection for Streaming Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-3|PAPER Thu-O-9-2-3 — A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-5|PAPER Mon-O-2-1-5 — Empirical Exploration of Novel Architectures and Objectives for Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Exploration of Novel Architectures and Objectives for Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-3|PAPER Mon-O-2-10-3 — Factorial Modeling for Effective Suppression of Directional Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorial Modeling for Effective Suppression of Directional Noise</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-2|PAPER Tue-O-3-2-2 — Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170714.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-14|PAPER Tue-P-5-1-14 — The Relationship Between the Perception and Production of Non-Native Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between the Perception and Production of Non-Native Tones</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-2|PAPER Wed-O-7-1-2 — Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-6|PAPER Wed-O-7-1-6 — A Neuro-Experimental Evidence for the Motor Theory of Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neuro-Experimental Evidence for the Motor Theory of Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170999.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-4|PAPER Mon-O-1-2-4 — An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-9|PAPER Tue-P-3-1-9 — Deep Speaker Embeddings for Short-Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Embeddings for Short-Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-5|PAPER Thu-O-10-2-5 — Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170624.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-5|PAPER Mon-P-1-1-5 — Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-3|PAPER Tue-P-5-3-3 — Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nora the Empathetic Psychologist</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-1|PAPER Mon-P-1-1-1 — Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-12|PAPER Thu-P-9-3-12 — Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-3|PAPER Mon-O-1-1-3 — Embedding-Based Speaker Adaptive Training of Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Embedding-Based Speaker Adaptive Training of Deep Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-5|PAPER Mon-O-2-1-5 — Empirical Exploration of Novel Architectures and Objectives for Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Exploration of Novel Architectures and Objectives for Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-5|PAPER Wed-P-8-3-5 — Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-1|PAPER Thu-O-9-8-1 — Combining Residual Networks with LSTMs for Lipreading]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Residual Networks with LSTMs for Lipreading</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-3|PAPER Tue-S&T-3-B-3 — “Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-2|PAPER Mon-O-1-4-2 — Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171254.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-5|PAPER Wed-O-6-4-5 — Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-10|PAPER Tue-P-5-1-10 — Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-6|PAPER Mon-P-1-1-6 — Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-6|PAPER Tue-O-5-10-6 — Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-13|PAPER Wed-P-8-1-13 — Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171706.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-2|PAPER Tue-O-5-10-2 — Exploring Dynamic Measures of Stance in Spoken Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Dynamic Measures of Stance in Spoken Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-10|PAPER Tue-P-5-1-10 — Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170954.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-12|PAPER Tue-P-5-3-12 — Subband Selection for Binaural Speech Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subband Selection for Binaural Speech Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-3|PAPER Wed-O-7-6-3 — Towards End-to-End Spoken Dialogue Systems with Turn Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards End-to-End Spoken Dialogue Systems with Turn Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-4|PAPER Mon-O-1-6-4 — How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170518.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-1|PAPER Wed-O-7-4-1 — Towards Zero-Shot Frame Semantic Parsing for Domain Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Zero-Shot Frame Semantic Parsing for Domain Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-2|PAPER Tue-O-5-1-2 — Highway-LSTM and Recurrent Highway Networks for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Highway-LSTM and Recurrent Highway Networks for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171081.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-3|PAPER Wed-O-7-8-3 — Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-4|PAPER Tue-P-3-2-4 — Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-6|PAPER Thu-O-9-8-6 — Visually Grounded Learning of Keyword Prediction from Untranscribed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visually Grounded Learning of Keyword Prediction from Untranscribed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-4|PAPER Thu-O-10-8-4 — The Social Life of Setswana Ejectives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Social Life of Setswana Ejectives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metrics for Modeling Code-Switching Across Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-2|PAPER Tue-P-5-4-2 — Multi-Target Ensemble Learning for Monaural Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Target Ensemble Learning for Monaural Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-5|PAPER Thu-O-9-2-5 — Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170532.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-10|PAPER Mon-P-2-1-10 — Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-1|PAPER Thu-P-9-4-1 — Principles for Learning Controllable TTS from Annotated and Latent Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Principles for Learning Controllable TTS from Annotated and Latent Variation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-6|PAPER Tue-O-3-10-6 — Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-11|PAPER Mon-P-1-4-11 — A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-11|PAPER Tue-P-5-2-11 — Prediction of Speech Delay from Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prediction of Speech Delay from Acoustic Measurements</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172054.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-4|PAPER Thu-S&T-9-A-4 — The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-6|PAPER Thu-O-10-1-6 — Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-K1-1|PAPER Mon-K1-1 — ISCA Medal for Scientific Achievement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ISCA Medal for Scientific Achievement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170108.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-11|PAPER Tue-P-3-1-11 — Gain Compensation for Fast i-Vector Extraction Over Short Duration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gain Compensation for Fast i-Vector Extraction Over Short Duration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170596.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-4|PAPER Wed-O-7-10-4 — Investigating Scalability in Hierarchical Language Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Scalability in Hierarchical Language Identification System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-4|PAPER Wed-P-8-4-4 — Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-6|PAPER Thu-O-10-1-6 — Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-6|PAPER Tue-O-4-1-6 — Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-3|PAPER Wed-P-7-2-3 — Vowel and Consonant Sequences in three Bavarian Dialects of Austria]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowel and Consonant Sequences in three Bavarian Dialects of Austria</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-1|PAPER Wed-P-7-3-1 — Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-6|PAPER Mon-P-2-1-6 — Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-2|PAPER Wed-SS-8-11-2 — The Role of Temporal Amplitude Modulations in the Political Arena: Hillary Clinton vs. Donald Trump]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Temporal Amplitude Modulations in the Political Arena: Hillary Clinton vs. Donald Trump</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-1|PAPER Wed-O-7-1-1 — An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-5|PAPER Thu-S&T-9-A-5 — Visible Vowels: A Tool for the Visualization of Vowel Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visible Vowels: A Tool for the Visualization of Vowel Variation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-6|PAPER Tue-P-5-4-6 — On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-4|PAPER Tue-O-3-4-4 — Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-14|PAPER Wed-P-6-2-14 — Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171118.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-1|PAPER Thu-O-9-1-1 — Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-5|PAPER Tue-O-3-8-5 — Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171393.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-2|PAPER Wed-O-8-1-2 — Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-15|PAPER Wed-P-7-3-15 — Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171342.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-4|PAPER Wed-O-6-10-4 — Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-1|PAPER Mon-O-1-2-1 — Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-1|PAPER Tue-O-5-1-1 — Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-6|PAPER Thu-O-10-1-6 — Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-9|PAPER Wed-P-8-2-9 — Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-6|PAPER Mon-S&T-2-B-6 — Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170839.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-4|PAPER Tue-O-4-6-4 — Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-2|PAPER Wed-P-8-1-2 — Intonation of Contrastive Topic in Estonian]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intonation of Contrastive Topic in Estonian</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-7|PAPER Wed-SS-8-11-7 — Pitch Convergence as an Effect of Perceived Attractiveness and Likability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pitch Convergence as an Effect of Perceived Attractiveness and Likability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-7|PAPER Tue-P-3-1-7 — DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-2|PAPER Wed-O-8-8-2 — Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-4|PAPER Thu-O-10-11-4 — Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-3|PAPER Mon-SS-1-8-3 — Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171393.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-2|PAPER Wed-O-8-1-2 — Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-15|PAPER Wed-P-7-3-15 — Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-16|PAPER Wed-P-7-3-16 — Novel Shifted Real Spectrum for Exact Signal Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Shifted Real Spectrum for Exact Signal Reconstruction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-7|PAPER Thu-P-9-1-7 — To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-2|PAPER Mon-SS-1-11-2 — Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171350.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-4|PAPER Tue-O-5-8-4 — Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-2|PAPER Wed-O-7-6-2 — Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-1|PAPER Wed-P-6-3-1 — Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-6|PAPER Thu-O-9-8-6 — Visually Grounded Learning of Keyword Prediction from Untranscribed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visually Grounded Learning of Keyword Prediction from Untranscribed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-2|PAPER Tue-O-3-1-2 — CTC in the Context of Generalized Full-Sum HMM Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CTC in the Context of Generalized Full-Sum HMM Training</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-5|PAPER Tue-P-4-2-5 — Parallel Neural Network Features for Improved Tandem Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Neural Network Features for Improved Tandem Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-3|PAPER Mon-P-2-3-3 — An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171784.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-5|PAPER Thu-O-9-1-5 — Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-5|PAPER Thu-O-9-2-5 — Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-6|PAPER Thu-O-9-2-6 — ‘pyannote.metrics‘: A Toolkit for Reproducible Evaluation, Diagnostic, and Error Analysis of Speaker Diarization Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">‘pyannote.metrics‘: A Toolkit for Reproducible Evaluation, Diagnostic, and Error Analysis of Speaker Diarization Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-6|PAPER Thu-O-10-11-6 — Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-5|PAPER Wed-P-7-4-5 — Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-5|PAPER Wed-P-8-2-5 — Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-6|PAPER Thu-SS-10-10-6 — Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-5|PAPER Tue-O-3-2-5 — Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170961.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-3|PAPER Tue-O-4-10-3 — Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-5|PAPER Mon-O-2-4-5 — Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-7|PAPER Tue-P-4-3-7 — Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-3|PAPER Tue-O-3-8-3 — Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-4|PAPER Tue-O-3-8-4 — DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-3|PAPER Tue-O-4-1-3 — Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-4|PAPER Tue-O-4-10-4 — Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-10|PAPER Tue-P-5-4-10 — Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-6|PAPER Wed-P-8-4-6 — Generative Adversarial Network-Based Postfilter for STFT Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Postfilter for STFT Spectrograms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-7|PAPER Wed-P-8-2-7 — Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-10|PAPER Mon-P-2-2-10 — Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170854.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-4|PAPER Wed-O-7-1-4 — Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170854.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-4|PAPER Wed-O-7-1-4 — Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170269.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-2|PAPER Wed-P-8-3-2 — Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-4|PAPER Tue-O-3-6-4 — Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-3|PAPER Tue-SS-4-11-3 — Motion Analysis in Vocalized Surprise Expressions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Motion Analysis in Vocalized Surprise Expressions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-6|PAPER Tue-P-4-3-6 — Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-1|PAPER Tue-O-4-10-1 — Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-2|PAPER Thu-P-9-4-2 — Sampling-Based Speech Parameter Generation Using Moment-Matching Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sampling-Based Speech Parameter Generation Using Moment-Matching Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170893.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-5|PAPER Wed-P-6-3-5 — Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-1|PAPER Tue-O-4-10-1 — Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170669.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-4|PAPER Mon-P-2-4-4 — Global Syllable Vectors for Building TTS Front-End with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global Syllable Vectors for Building TTS Front-End with Deep Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-3|PAPER Wed-P-6-2-3 — Conditional Generative Adversarial Nets Classifier for Spoken Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Nets Classifier for Spoken Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-3|PAPER Thu-O-10-2-3 — i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-6|PAPER Mon-P-2-3-6 — Deep Least Squares Regression for Speaker Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Least Squares Regression for Speaker Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-5|PAPER Tue-P-5-4-5 — Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170940.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-11|PAPER Tue-P-5-3-11 — Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-4|PAPER Tue-P-3-1-4 — Adversarial Network Bottleneck Features for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Network Bottleneck Features for Noise Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170876.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-12|PAPER Wed-P-7-2-12 — A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-6|PAPER Mon-O-2-6-6 — A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170876.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-12|PAPER Wed-P-7-2-12 — A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-5|PAPER Tue-O-4-4-5 — Detecting Overlapped Speech on Short Timeframes Using Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Overlapped Speech on Short Timeframes Using Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171680.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-3|PAPER Mon-P-1-4-3 — Phone Duration Modeling for LVCSR Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Duration Modeling for LVCSR Using Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171680.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-3|PAPER Mon-P-1-4-3 — Phone Duration Modeling for LVCSR Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Duration Modeling for LVCSR Using Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171514.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-3|PAPER Tue-O-5-6-3 — Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170612.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-4|PAPER Wed-P-6-3-4 — Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-12|PAPER Tue-P-5-2-12 — The Frequency Range of “The Ling Six Sounds” in Standard Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Frequency Range of “The Ling Six Sounds” in Standard Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-13|PAPER Tue-P-5-3-13 — Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-10|PAPER Tue-P-5-3-10 — Improving Source Separation via Multi-Speaker Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Source Separation via Multi-Speaker Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-2|PAPER Tue-P-5-4-2 — Multi-Target Ensemble Learning for Monaural Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Target Ensemble Learning for Monaural Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-5|PAPER Thu-O-10-11-5 — Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-9|PAPER Thu-SS-9-10-9 — An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-2|PAPER Thu-O-10-11-2 — Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-5|PAPER Wed-P-6-2-5 — Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-6|PAPER Mon-P-2-3-6 — Deep Least Squares Regression for Speaker Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Least Squares Regression for Speaker Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-8|PAPER Thu-P-9-4-8 — Predicting Head Pose from Speech with a Conditional Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Head Pose from Speech with a Conditional Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-7|PAPER Mon-P-1-4-7 — Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-1|PAPER Wed-O-7-6-1 — An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-8|PAPER Thu-P-9-1-8 — End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-1|PAPER Mon-P-1-4-1 — Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-6|PAPER Mon-S&T-2-B-6 — Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-3|PAPER Thu-S&T-9-A-3 — Remote Articulation Test System Based on WebRTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Articulation Test System Based on WebRTC</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-6|PAPER Tue-P-5-2-6 — Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-6|PAPER Wed-O-6-10-6 — Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}}
</p></div>
{{Author Index Link Row}}
|cpborderless|k
|cpaidxlinkrowtable|k
|<$link to="AUTHOR LIST — A"><div class="cpaidxlinkrowstyle">A</div></$link>|<$link to="AUTHOR LIST — B"><div class="cpaidxlinkrowstyle">B</div></$link>|<$link to="AUTHOR LIST — C"><div class="cpaidxlinkrowstyle">C</div></$link>|<$link to="AUTHOR LIST — D"><div class="cpaidxlinkrowstyle">D</div></$link>|<$link to="AUTHOR LIST — E"><div class="cpaidxlinkrowstyle">E</div></$link>|<$link to="AUTHOR LIST — F"><div class="cpaidxlinkrowstyle">F</div></$link>|<$link to="AUTHOR LIST — G"><div class="cpaidxlinkrowstyle">G</div></$link>|<$link to="AUTHOR LIST — H"><div class="cpaidxlinkrowstyle">H</div></$link>|<$link to="AUTHOR LIST — I"><div class="cpaidxlinkrowstyle">I</div></$link>|<$link to="AUTHOR LIST — J"><div class="cpaidxlinkrowstyle">J</div></$link>|<$link to="AUTHOR LIST — K"><div class="cpaidxlinkrowstyle">K</div></$link>|<$link to="AUTHOR LIST — L"><div class="cpaidxlinkrowstyle">L</div></$link>|<$link to="AUTHOR LIST — M"><div class="cpaidxlinkrowstyle">M</div></$link>|
|<$link to="AUTHOR LIST — N"><div class="cpaidxlinkrowstyle">N</div></$link>|<$link to="AUTHOR LIST — O"><div class="cpaidxlinkrowstyle">O</div></$link>|<$link to="AUTHOR LIST — P"><div class="cpaidxlinkrowstyle">P</div></$link>|<$link to="AUTHOR LIST — Q"><div class="cpaidxlinkrowstyle">Q</div></$link>|<$link to="AUTHOR LIST — R"><div class="cpaidxlinkrowstyle">R</div></$link>|<$link to="AUTHOR LIST — S"><div class="cpaidxlinkrowstyle">S</div></$link>|<$link to="AUTHOR LIST — T"><div class="cpaidxlinkrowstyle">T</div></$link>|<$link to="AUTHOR LIST — U"><div class="cpaidxlinkrowstyle">U</div></$link>|<$link to="AUTHOR LIST — V"><div class="cpaidxlinkrowstyle">V</div></$link>|<$link to="AUTHOR LIST — W"><div class="cpaidxlinkrowstyle">W</div></$link>|<$link to="AUTHOR LIST — X"><div class="cpaidxlinkrowstyle">X</div></$link>|<$link to="AUTHOR LIST — Y"><div class="cpaidxlinkrowstyle">Y</div></$link>|<$link to="AUTHOR LIST — Z"><div class="cpaidxlinkrowstyle">Z</div></$link>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-3|PAPER Mon-S&T-2-B-3 — System for Speech Transcription and Post-Editing in Microsoft Word]]</div>|^<div class="cpauthorindexpersoncardpapertitle">System for Speech Transcription and Post-Editing in Microsoft Word</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-1|PAPER Thu-O-9-6-1 — Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-6|PAPER Mon-O-1-6-6 — Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-11|PAPER Wed-SS-7-1-11 — Building an ASR Corpus Using Althingi’s Parliamentary Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building an ASR Corpus Using Althingi’s Parliamentary Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-3|PAPER Mon-O-1-10-3 — An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-5|PAPER Tue-SS-4-11-5 — A Computational Model for Phonetically Responsive Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Computational Model for Phonetically Responsive Spoken Dialogue Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170982.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-6|PAPER Mon-P-1-2-6 — Audio Classification Using Class-Specific Learned Descriptors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Classification Using Class-Specific Learned Descriptors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-5|PAPER Mon-P-1-2-5 — Enhanced Feature Extraction for Speech Detection in Media Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhanced Feature Extraction for Speech Detection in Media Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171535.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-2|PAPER Tue-O-3-6-2 — Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-5|PAPER Tue-O-4-8-5 — Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-8|PAPER Thu-P-9-3-8 — R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171514.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-3|PAPER Tue-O-5-6-3 — Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-6|PAPER Mon-O-1-6-6 — Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-3|PAPER Wed-P-7-3-3 — Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-5|PAPER Mon-P-2-4-5 — Prosody Control of Utterance Sequence for Information Delivering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Control of Utterance Sequence for Information Delivering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-4|PAPER Thu-O-9-2-4 — Estimating Speaker Clustering Quality Using Logistic Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Speaker Clustering Quality Using Logistic Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-10|PAPER Tue-P-3-2-10 — Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-6|PAPER Wed-O-7-6-6 — Hyperarticulation of Corrections in Multilingual Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hyperarticulation of Corrections in Multilingual Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-3|PAPER Wed-P-6-3-3 — Fast and Accurate OOV Decoder on High-Level Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast and Accurate OOV Decoder on High-Level Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-12|PAPER Wed-SS-7-1-12 — Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171791.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-10|PAPER Thu-P-9-1-10 — Adaptive Multichannel Dereverberation for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Multichannel Dereverberation for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-5|PAPER Mon-O-2-6-5 — Effects of Pitch Fall and L1 on Vowel Length Identification in L2 Japanese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Pitch Fall and L1 on Vowel Length Identification in L2 Japanese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-6|PAPER Wed-O-8-8-6 — Speech Processing Approach for Diagnosing Dementia in an Early Stage]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Processing Approach for Diagnosing Dementia in an Early Stage</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-6|PAPER Mon-O-2-2-6 — Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171334.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-1|PAPER Wed-O-7-10-1 — Spoken Language Identification Using LSTM-Based Angular Proximity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Language Identification Using LSTM-Based Angular Proximity</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metrics for Modeling Code-Switching Across Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-6|PAPER Tue-O-3-10-6 — Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-2|PAPER Tue-P-4-1-2 — Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-4|PAPER Thu-O-10-8-4 — The Social Life of Setswana Ejectives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Social Life of Setswana Ejectives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-9|PAPER Tue-P-3-1-9 — Deep Speaker Embeddings for Short-Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Embeddings for Short-Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-5|PAPER Thu-O-10-2-5 — Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-6|PAPER Mon-P-2-3-6 — Deep Least Squares Regression for Speaker Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Least Squares Regression for Speaker Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170532.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-10|PAPER Mon-P-2-1-10 — Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-1|PAPER Thu-P-9-4-1 — Principles for Learning Controllable TTS from Annotated and Latent Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Principles for Learning Controllable TTS from Annotated and Latent Variation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-6|PAPER Tue-O-4-1-6 — Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-K2-1|PAPER Tue-K2-1 — Dialogue as Collaborative Problem Solving]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialogue as Collaborative Problem Solving</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-2|PAPER Tue-O-4-10-2 — Learning Latent Representations for Speech Generation and Transformation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Latent Representations for Speech Generation and Transformation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170485.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-9|PAPER Wed-P-7-3-9 — An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-4|PAPER Wed-P-8-3-4 — Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-13|PAPER Tue-P-5-2-13 — Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-7|PAPER Wed-P-8-1-7 — Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-8|PAPER Tue-P-3-2-8 — Alternative Approaches to Neural Network Based Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternative Approaches to Neural Network Based Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171385.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-2|PAPER Thu-O-10-1-2 — Semi-Supervised DNN Training with Word Selection for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised DNN Training with Word Selection for ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-4|PAPER Mon-P-1-4-4 — Towards Better Decoding and Language Model Integration in Sequence to Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Better Decoding and Language Model Integration in Sequence to Sequence Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-4|PAPER Tue-P-5-1-4 — A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-3|PAPER Mon-O-1-4-3 — A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-6|PAPER Wed-P-6-4-6 — On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-7|PAPER Wed-SS-8-11-7 — Pitch Convergence as an Effect of Perceived Attractiveness and Likability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pitch Convergence as an Effect of Perceived Attractiveness and Likability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-5|PAPER Wed-O-8-4-5 — NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-4|PAPER Tue-O-4-2-4 — Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-13|PAPER Wed-P-6-3-13 — A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Topic Identification for Speech Without ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-13|PAPER Wed-P-6-3-13 — A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-6|PAPER Wed-P-7-2-6 — Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-5|PAPER Thu-P-9-3-5 — Stability of Prosodic Characteristics Across Age and Gender Groups]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stability of Prosodic Characteristics Across Age and Gender Groups</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-5|PAPER Wed-SS-7-11-5 — Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-6|PAPER Tue-O-3-8-6 — Increasing Recall of Lengthening Detection via Semi-Automatic Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Increasing Recall of Lengthening Detection via Semi-Automatic Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-11|PAPER Mon-P-2-1-11 — The Relative Cueing Power of F0 and Duration in German Prominence Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relative Cueing Power of F0 and Duration in German Prominence Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171279.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-14|PAPER Wed-P-8-1-14 — The Perception of English Intonation Patterns by German L2 Speakers of English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of English Intonation Patterns by German L2 Speakers of English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-1|PAPER Wed-P-8-1-1 — Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-2|PAPER Thu-SS-9-10-2 — Description of the Upper Respiratory Tract Infection Corpus (URTIC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Upper Respiratory Tract Infection Corpus (URTIC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170875.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-4|PAPER Thu-SS-9-11-4 — Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-6|PAPER Mon-O-1-1-6 — Comparing Human and Machine Errors in Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Human and Machine Errors in Conversational Speech Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-11|PAPER Tue-P-5-2-11 — Prediction of Speech Delay from Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prediction of Speech Delay from Acoustic Measurements</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172054.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-4|PAPER Thu-S&T-9-A-4 — The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-7|PAPER Wed-P-6-2-7 — LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-5|PAPER Tue-S&T-3-B-5 — Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-4|PAPER Wed-O-8-6-4 — Real-Time Speech Enhancement with GCC-NMF]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speech Enhancement with GCC-NMF</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-4|PAPER Thu-O-10-11-4 — Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-5|PAPER Mon-P-2-1-5 — The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170832.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-4|PAPER Tue-O-4-4-4 — Variational Recurrent Neural Networks for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Recurrent Neural Networks for Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170856.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-4|PAPER Tue-O-5-1-4 — Stochastic Recurrent Neural Network for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Recurrent Neural Network for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-1|PAPER Thu-O-10-1-1 — Deep Neural Factorization for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Factorization for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-11|PAPER Tue-P-5-1-11 — Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-5|PAPER Wed-SS-7-11-5 — Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-12|PAPER Wed-P-6-3-12 — Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-5|PAPER Mon-P-1-2-5 — Enhanced Feature Extraction for Speech Detection in Media Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhanced Feature Extraction for Speech Detection in Media Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170145.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-2|PAPER Mon-O-1-1-2 — Student-Teacher Training with Diverse Decision Tree Ensembles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Student-Teacher Training with Diverse Decision Tree Ensembles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-10|PAPER Tue-P-5-3-10 — Improving Source Separation via Multi-Speaker Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Source Separation via Multi-Speaker Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-4|PAPER Tue-O-4-2-4 — Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-6|PAPER Wed-P-6-4-6 — On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-1|PAPER Mon-O-2-2-1 — Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-6|PAPER Tue-O-3-4-6 — DNN Bottleneck Features for Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN Bottleneck Features for Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-2|PAPER Tue-O-3-4-2 — Tied Variational Autoencoder Backends for i-Vector Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Variational Autoencoder Backends for i-Vector Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-1|PAPER Thu-O-9-4-1 — A Rescoring Approach for Keyword Search Using Lattice Context Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Rescoring Approach for Keyword Search Using Lattice Context Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-1|PAPER Mon-P-1-2-1 — Multilingual i-Vector Based Statistical Modeling for Music Genre Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual i-Vector Based Statistical Modeling for Music Genre Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-9|PAPER Mon-P-2-4-9 — Comparison of Modeling Target in LSTM-RNN Duration Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Modeling Target in LSTM-RNN Duration Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-6|PAPER Tue-P-3-1-6 — Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-9|PAPER Wed-P-6-1-9 — Distilling Knowledge from an Ensemble of Models for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distilling Knowledge from an Ensemble of Models for Punctuation Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-9|PAPER Wed-P-6-1-9 — Distilling Knowledge from an Ensemble of Models for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distilling Knowledge from an Ensemble of Models for Punctuation Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-5|PAPER Wed-P-7-3-5 — A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-9|PAPER Wed-P-8-1-9 — Creaky Voice as a Function of Tonal Categories and Prosodic Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creaky Voice as a Function of Tonal Categories and Prosodic Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-6|PAPER Wed-O-7-1-6 — A Neuro-Experimental Evidence for the Motor Theory of Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neuro-Experimental Evidence for the Motor Theory of Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-13|PAPER Tue-P-5-2-13 — Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-4|PAPER Wed-P-8-4-4 — Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-1|PAPER Wed-P-8-1-1 — Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-9|PAPER Tue-P-5-4-9 — A Fully Convolutional Neural Network for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fully Convolutional Neural Network for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-4|PAPER Wed-P-7-3-4 — Classification-Based Detection of Glottal Closure Instants from Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification-Based Detection of Glottal Closure Instants from Speech Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-4|PAPER Wed-S&T-6-A-4 — Voice Conservation and TTS System for People Facing Total Laryngectomy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conservation and TTS System for People Facing Total Laryngectomy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170669.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-4|PAPER Mon-P-2-4-4 — Global Syllable Vectors for Building TTS Front-End with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global Syllable Vectors for Building TTS Front-End with Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-5|PAPER Tue-O-5-6-5 — On the Duration of Mandarin Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Duration of Mandarin Tones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-7|PAPER Tue-P-3-1-7 — DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-8|PAPER Wed-P-8-4-8 — Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-6|PAPER Mon-P-2-2-6 — The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-3|PAPER Wed-P-8-1-3 — Reanalyze Fundamental Frequency Peak Delay in Mandarin]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reanalyze Fundamental Frequency Peak Delay in Mandarin</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-3|PAPER Mon-P-1-2-3 — Attention Based CLDNNs for Short-Duration Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Based CLDNNs for Short-Duration Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-1|PAPER Thu-O-10-2-1 — CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170515.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-3|PAPER Thu-O-9-6-3 — Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-1|PAPER Tue-P-3-2-1 — Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170726.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-3|PAPER Mon-O-2-4-3 — Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-3|PAPER Wed-O-6-4-3 — A Spectro-Temporal Demodulation Technique for Pitch Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Demodulation Technique for Pitch Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171365.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-10|PAPER Mon-P-2-3-10 — Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-5|PAPER Mon-S&T-2-A-5 — A System for Real Time Collaborative Transcription Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A System for Real Time Collaborative Transcription Correction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-1|PAPER Wed-O-7-8-1 — Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-11|PAPER Mon-P-2-4-11 — Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-5|PAPER Tue-O-3-8-5 — Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-5|PAPER Thu-O-9-6-5 — SEGAN: Speech Enhancement Generative Adversarial Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SEGAN: Speech Enhancement Generative Adversarial Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170325.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-1|PAPER Mon-O-1-10-1 — The Influence of Synthetic Voice on the Evaluation of a Virtual Character]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence of Synthetic Voice on the Evaluation of a Virtual Character</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170112.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-1|PAPER Wed-P-7-4-1 — Manual and Automatic Transcriptions in Dementia Detection from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manual and Automatic Transcriptions in Dementia Detection from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171791.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-10|PAPER Thu-P-9-1-10 — Adaptive Multichannel Dereverberation for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Multichannel Dereverberation for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-13|PAPER Tue-P-5-1-13 — Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-15|PAPER Tue-P-5-1-15 — MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-13|PAPER Tue-P-5-1-13 — Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-1|PAPER Wed-P-6-4-1 — Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-4|PAPER Mon-O-1-4-4 — Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-6|PAPER Mon-O-2-10-6 — On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-3|PAPER Tue-O-3-4-3 — Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171760.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-16|PAPER Tue-P-5-3-16 — Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-3|PAPER Wed-O-7-10-3 — Dialect Recognition Based on Unsupervised Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Recognition Based on Unsupervised Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-13|PAPER Wed-P-6-1-13 — Multi-Channel Apollo Mission Speech Transcripts Calibration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Apollo Mission Speech Transcripts Calibration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-13|PAPER Wed-P-6-2-13 — Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-2|PAPER Thu-O-10-2-2 — Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-2|PAPER Mon-P-1-1-2 — Robust Source-Filter Separation of Speech Signal in the Phase Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Source-Filter Separation of Speech Signal in the Phase Domain</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171257.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-8|PAPER Tue-P-5-4-8 — Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-5|PAPER Wed-O-7-2-5 — Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-4|PAPER Mon-O-2-2-4 — Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-11|PAPER Wed-SS-7-1-11 — Building an ASR Corpus Using Althingi’s Parliamentary Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building an ASR Corpus Using Althingi’s Parliamentary Speeches</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building ASR Corpora Using Eyra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-5|PAPER Tue-O-3-8-5 — Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-13|PAPER Mon-P-2-1-13 — Sociophonetic Realizations Guide Subsequent Lexical Access]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sociophonetic Realizations Guide Subsequent Lexical Access</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-5|PAPER Wed-O-8-1-5 — VoxCeleb: A Large-Scale Speaker Identification Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VoxCeleb: A Large-Scale Speaker Identification Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-3|PAPER Tue-P-5-2-3 — Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171420.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-10|PAPER Thu-P-9-4-10 — A Neural Parametric Singing Synthesizer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural Parametric Singing Synthesizer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-1|PAPER Tue-O-3-2-1 — Functional Principal Component Analysis of Vocal Tract Area Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Functional Principal Component Analysis of Vocal Tract Area Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-4|PAPER Wed-P-6-2-4 — Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-7|PAPER Wed-P-6-2-7 — LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-13|PAPER Wed-P-6-3-13 — A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-3|PAPER Wed-P-7-3-3 — Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-5|PAPER Tue-O-3-6-5 — Automatic Measurement of Pre-Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Measurement of Pre-Aspiration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-6|PAPER Wed-O-7-8-6 — Learning Similarity Functions for Pronunciation Variations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Similarity Functions for Pronunciation Variations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-5|PAPER Mon-O-1-10-5 — Beyond the Listening Test: An Interactive Approach to TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Beyond the Listening Test: An Interactive Approach to TTS Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-11|PAPER Mon-P-2-4-11 — Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-16|PAPER Wed-SS-7-1-16 — Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metrics for Modeling Code-Switching Across Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170450.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-2|PAPER Mon-SS-1-8-2 — Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-6|PAPER Wed-O-6-6-6 — Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-6|PAPER Mon-O-2-2-6 — Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-5|PAPER Tue-O-4-6-5 — Pashto Intonation Patterns]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pashto Intonation Patterns</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-8|PAPER Mon-P-2-2-8 — The Effect of Gesture on Persuasive Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Gesture on Persuasive Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171663.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-9|PAPER Mon-SS-1-11-9 — Crowdsourcing Universal Part-of-Speech Tags for Code-Switching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowdsourcing Universal Part-of-Speech Tags for Code-Switching</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-6|PAPER Tue-O-5-10-6 — Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-4|PAPER Tue-P-4-3-4 — Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-9|PAPER Tue-P-5-2-9 — Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-2|PAPER Mon-O-2-4-2 — An Investigation of Crowd Speech for Room Occupancy Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Crowd Speech for Room Occupancy Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-1|PAPER Tue-SS-3-11-1 — Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-3|PAPER Tue-O-4-8-3 — An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-2|PAPER Wed-P-6-2-2 — Bidirectional Modelling for Short Duration Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Modelling for Short Duration Language Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-12|PAPER Wed-P-8-2-12 — An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-6|PAPER Thu-P-9-3-6 — Electrophysiological Correlates of Familiar Voice Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Electrophysiological Correlates of Familiar Voice Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-4|PAPER Tue-SS-5-11-4 — Prosodic Analysis of Attention-Drawing Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Analysis of Attention-Drawing Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-5|PAPER Wed-O-8-6-5 — Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170830.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-1|PAPER Tue-O-4-4-1 — A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-4|PAPER Tue-P-3-1-4 — Adversarial Network Bottleneck Features for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Network Bottleneck Features for Noise Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170952.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-7|PAPER Wed-P-6-1-7 — Multiview Representation Learning via Deep CCA for Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiview Representation Learning via Deep CCA for Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-4|PAPER Wed-P-6-4-4 — Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170751.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-3|PAPER Thu-O-10-1-3 — Gaussian Prediction Based Attention for Online End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gaussian Prediction Based Attention for Online End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-6|PAPER Tue-O-4-4-6 — Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-1|PAPER Wed-P-8-1-1 — Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-2|PAPER Tue-P-4-1-2 — Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170532.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-10|PAPER Mon-P-2-1-10 — Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-10|PAPER Mon-P-2-4-10 — Learning Word Vector Representations Based on Acoustic Counts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Word Vector Representations Based on Acoustic Counts</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-1|PAPER Tue-O-3-8-1 — An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-3|PAPER Tue-O-4-1-3 — Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170848.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-3|PAPER Tue-O-5-4-3 — Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-2|PAPER Wed-P-6-4-2 — Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-6|PAPER Wed-P-8-4-6 — Generative Adversarial Network-Based Postfilter for STFT Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Postfilter for STFT Spectrograms</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-1|PAPER Thu-P-9-4-1 — Principles for Learning Controllable TTS from Annotated and Latent Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Principles for Learning Controllable TTS from Annotated and Latent Variation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-14|PAPER Thu-P-9-4-14 — Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-1|PAPER Tue-O-3-6-1 — Creak as a Feature of Lexical Stress in Estonian]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creak as a Feature of Lexical Stress in Estonian</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-2|PAPER Tue-O-4-6-2 — Comparing Languages Using Hierarchical Prosodic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Languages Using Hierarchical Prosodic Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-9|PAPER Wed-P-7-2-9 — Kinematic Signatures of Prosody in Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Kinematic Signatures of Prosody in Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-3|PAPER Thu-O-9-8-3 — Improving Speaker-Independent Lipreading with Domain-Adversarial Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker-Independent Lipreading with Domain-Adversarial Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-4|PAPER Wed-SS-8-11-4 — Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172059.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-5|PAPER Mon-S&T-2-B-5 — Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-9|PAPER Tue-P-4-3-9 — Measuring Synchrony in Task-Based Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Measuring Synchrony in Task-Based Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-4|PAPER Thu-O-10-8-4 — The Social Life of Setswana Ejectives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Social Life of Setswana Ejectives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-1|PAPER Thu-O-10-4-1 — CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-15|PAPER Mon-P-2-2-15 — Accurate Synchronization of Speech and EGG Signal Using Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Accurate Synchronization of Speech and EGG Signal Using Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171476.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-5|PAPER Wed-SS-7-1-5 — Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-5|PAPER Mon-O-2-4-5 — Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-2|PAPER Mon-P-1-2-2 — Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-6|PAPER Mon-P-1-4-6 — Binary Deep Neural Networks for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Deep Neural Networks for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-8|PAPER Mon-P-2-4-8 — Discrete Duration Model for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discrete Duration Model for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-9|PAPER Mon-P-2-4-9 — Comparison of Modeling Target in LSTM-RNN Duration Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Modeling Target in LSTM-RNN Duration Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-5|PAPER Tue-P-3-1-5 — What Does the Speaker Embedding Encode?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What Does the Speaker Embedding Encode?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-6|PAPER Tue-O-5-10-6 — Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-4|PAPER Mon-SS-2-8-4 — Replay Attack Detection Using DNN for Channel Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Replay Attack Detection Using DNN for Channel Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170714.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-14|PAPER Tue-P-5-1-14 — The Relationship Between the Perception and Production of Non-Native Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between the Perception and Production of Non-Native Tones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-1|PAPER Thu-O-10-4-1 — CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-1|PAPER Tue-O-5-1-1 — Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pronunciation Learning with RNN-Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-2|PAPER Wed-O-6-4-2 — Improving YANGsaf F0 Estimator with Adaptive Kalman Filter]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving YANGsaf F0 Estimator with Adaptive Kalman Filter</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-4|PAPER Tue-O-4-10-4 — Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-6|PAPER Mon-O-2-1-6 — Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-11|PAPER Wed-P-7-4-11 — Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-5|PAPER Wed-SS-6-2-5 — Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171385.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-2|PAPER Thu-O-10-1-2 — Semi-Supervised DNN Training with Word Selection for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised DNN Training with Word Selection for ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-6|PAPER Tue-P-4-2-6 — Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-1|PAPER Wed-P-6-3-1 — Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171118.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-1|PAPER Thu-O-9-1-1 — Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-6|PAPER Thu-O-9-8-6 — Visually Grounded Learning of Keyword Prediction from Untranscribed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visually Grounded Learning of Keyword Prediction from Untranscribed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-4|PAPER Mon-O-1-6-4 — How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171279.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-14|PAPER Wed-P-8-1-14 — The Perception of English Intonation Patterns by German L2 Speakers of English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of English Intonation Patterns by German L2 Speakers of English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170726.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-3|PAPER Mon-O-2-4-3 — Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170839.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-4|PAPER Tue-O-4-6-4 — Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-5|PAPER Mon-O-1-6-5 — Vowels in the Barunga Variety of North Australian Kriol]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowels in the Barunga Variety of North Australian Kriol</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-1|PAPER Wed-SS-6-11-1 — SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172054.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-4|PAPER Thu-S&T-9-A-4 — The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-13|PAPER Mon-P-2-1-13 — Sociophonetic Realizations Guide Subsequent Lexical Access]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sociophonetic Realizations Guide Subsequent Lexical Access</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170325.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-1|PAPER Mon-O-1-10-1 — The Influence of Synthetic Voice on the Evaluation of a Virtual Character]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence of Synthetic Voice on the Evaluation of a Virtual Character</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-1|PAPER Tue-O-3-6-1 — Creak as a Feature of Lexical Stress in Estonian]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creak as a Feature of Lexical Stress in Estonian</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-2|PAPER Tue-P-4-3-2 — Improving Prediction of Speech Activity Using Multi-Participant Respiratory State]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Prediction of Speech Activity Using Multi-Participant Respiratory State</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-2|PAPER Tue-O-4-6-2 — Comparing Languages Using Hierarchical Prosodic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Languages Using Hierarchical Prosodic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-6|PAPER Tue-SS-5-11-6 — To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-14|PAPER Tue-P-4-3-14 — Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-5|PAPER Mon-P-2-4-5 — Prosody Control of Utterance Sequence for Information Delivering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Control of Utterance Sequence for Information Delivering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Dependent WaveNet Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-5|PAPER Tue-O-4-1-5 — Statistical Voice Conversion with WaveNet-Based Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Statistical Voice Conversion with WaveNet-Based Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170779.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-2|PAPER Tue-P-4-2-2 — Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-3|PAPER Tue-P-3-1-3 — End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-6|PAPER Wed-P-6-1-6 — Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170779.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-2|PAPER Tue-P-4-2-2 — Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Dependent WaveNet Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-4|PAPER Tue-P-5-2-4 — Zero Frequency Filter Based Analysis of Voice Disorders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero Frequency Filter Based Analysis of Voice Disorders</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-3|PAPER Mon-O-2-6-3 — Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170543.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-3|PAPER Tue-P-5-4-3 — Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-1|PAPER Wed-P-6-3-1 — Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170626.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-2|PAPER Mon-O-2-2-2 — Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-4|PAPER Wed-O-6-4-4 — Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-1|PAPER Mon-P-1-1-1 — Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-9|PAPER Mon-P-2-3-9 — Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-4|PAPER Tue-P-4-2-4 — An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-6|PAPER Tue-O-3-10-6 — Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-5|PAPER Mon-SS-1-11-5 — Speech Synthesis for Mixed-Language Navigation Instructions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Synthesis for Mixed-Language Navigation Instructions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-5|PAPER Tue-P-5-4-5 — Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170175.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-6|PAPER Tue-O-4-6-6 — A New Model of Final Lowering in Spontaneous Monologue]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Model of Final Lowering in Spontaneous Monologue</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-1|PAPER Wed-P-7-3-1 — Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-6|PAPER Tue-O-3-6-6 — Acoustic and Electroglottographic Study of Breathy and Modal Vowels as Produced by Heritage and Native Gujarati Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic and Electroglottographic Study of Breathy and Modal Vowels as Produced by Heritage and Native Gujarati Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-1|PAPER Tue-P-5-1-1 — An Automatically Aligned Corpus of Child-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatically Aligned Corpus of Child-Directed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-3|PAPER Mon-O-2-6-3 — Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-2|PAPER Mon-O-1-4-2 — Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-7|PAPER Tue-P-4-3-7 — Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-1|PAPER Tue-O-5-2-1 — The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-6|PAPER Tue-P-3-1-6 — Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170108.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-11|PAPER Tue-P-3-1-11 — Gain Compensation for Fast i-Vector Extraction Over Short Duration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gain Compensation for Fast i-Vector Extraction Over Short Duration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-2|PAPER Tue-P-4-3-2 — Improving Prediction of Speech Activity Using Multi-Participant Respiratory State]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Prediction of Speech Activity Using Multi-Participant Respiratory State</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-3|PAPER Tue-O-4-4-3 — Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-1|PAPER Wed-P-7-2-1 — Mental Representation of Japanese Mora; Focusing on its Intrinsic Duration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mental Representation of Japanese Mora; Focusing on its Intrinsic Duration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-3|PAPER Tue-O-3-8-3 — Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-1|PAPER Tue-O-4-2-1 — A Comparison of Sentence-Level Speech Intelligibility Metrics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sentence-Level Speech Intelligibility Metrics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-2|PAPER Mon-P-2-2-2 — Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-3|PAPER Wed-SS-7-11-3 — Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-5|PAPER Wed-SS-7-11-5 — Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|^<div class="cpauthorindexpersoncardpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170832.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-4|PAPER Tue-O-4-4-4 — Variational Recurrent Neural Networks for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Recurrent Neural Networks for Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170612.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-4|PAPER Wed-P-6-3-4 — Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170638.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-1|PAPER Wed-P-8-3-1 — Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-4|PAPER Tue-O-4-10-4 — Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170106.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-2|PAPER Thu-O-9-8-2 — Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-6|PAPER Mon-S&T-2-B-6 — Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-4|PAPER Mon-P-2-1-4 — Lexically Guided Perceptual Learning in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexically Guided Perceptual Learning in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-3|PAPER Tue-O-4-2-3 — The Recognition of Compounds: A Computational Account]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Recognition of Compounds: A Computational Account</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-3|PAPER Tue-O-4-2-3 — The Recognition of Compounds: A Computational Account]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Recognition of Compounds: A Computational Account</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-9|PAPER Mon-P-2-3-9 — Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-13|PAPER Wed-P-6-1-13 — Multi-Channel Apollo Mission Speech Transcripts Calibration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Apollo Mission Speech Transcripts Calibration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-3|PAPER Wed-O-6-10-3 — RNN-LDA Clustering for Feature Based DNN Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RNN-LDA Clustering for Feature Based DNN Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-3|PAPER Mon-SS-2-8-3 — A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170518.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-1|PAPER Wed-O-7-4-1 — Towards Zero-Shot Frame Semantic Parsing for Domain Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Zero-Shot Frame Semantic Parsing for Domain Scaling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-8|PAPER Wed-P-8-3-8 — To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-8|PAPER Tue-P-4-1-8 — Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-9|PAPER Tue-P-4-1-9 — A Comparative Evaluation of GMM-Free State Tying Methods for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Evaluation of GMM-Free State Tying Methods for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-5|PAPER Thu-SS-10-10-5 — DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171183.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-8|PAPER Wed-P-6-3-8 — Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-9|PAPER Wed-P-7-4-9 — Depression Detection Using Automatic Transcriptions of De-Identified Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Depression Detection Using Automatic Transcriptions of De-Identified Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-3|PAPER Tue-SS-5-11-3 — Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-3|PAPER Wed-SS-8-11-3 — Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-1|PAPER Wed-P-6-4-1 — Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-13|PAPER Mon-P-2-2-13 — Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-5|PAPER Wed-P-6-4-5 — The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170268.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-6|PAPER Wed-SS-7-1-6 — Machine Assisted Analysis of Vowel Length Contrasts in Wolof]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Machine Assisted Analysis of Vowel Length Contrasts in Wolof</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-6|PAPER Tue-SS-3-11-6 — Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170848.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-3|PAPER Tue-O-5-4-3 — Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-7|PAPER Wed-P-8-4-7 — Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-5|PAPER Thu-O-10-8-5 — How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170987.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-4|PAPER Tue-O-5-6-4 — Phonological Complexity, Segment Rate and Speech Tempo Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Complexity, Segment Rate and Speech Tempo Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-1|PAPER Mon-P-2-1-1 — Factors Affecting the Intelligibility of Low-Pass Filtered Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factors Affecting the Intelligibility of Low-Pass Filtered Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-5|PAPER Mon-P-1-4-5 — Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-4|PAPER Wed-P-8-4-4 — Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pronunciation Learning with RNN-Transducers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-11|PAPER Mon-P-2-2-11 — When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch]]</div>|^<div class="cpauthorindexpersoncardpapertitle">When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172034.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-1|PAPER Mon-S&T-2-A-1 — Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-2|PAPER Wed-S&T-6-A-2 — A Thematicity-Based Prosody Enrichment Tool for CTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thematicity-Based Prosody Enrichment Tool for CTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-6|PAPER Tue-SS-3-11-6 — Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Does Posh English Sound Attractive?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-10|PAPER Tue-P-5-4-10 — Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-5|PAPER Tue-O-5-6-5 — On the Duration of Mandarin Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Duration of Mandarin Tones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-3|PAPER Mon-P-1-2-3 — Attention Based CLDNNs for Short-Duration Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Based CLDNNs for Short-Duration Acoustic Scene Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170830.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-1|PAPER Tue-O-4-4-1 — A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170751.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-3|PAPER Thu-O-10-1-3 — Gaussian Prediction Based Attention for Online End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gaussian Prediction Based Attention for Online End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170071.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-4|PAPER Tue-O-3-1-4 — Multitask Learning with CTC and Segmental CRF for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with CTC and Segmental CRF for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171118.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-1|PAPER Thu-O-9-1-1 — Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-4|PAPER Mon-O-2-6-4 — L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|^<div class="cpauthorindexpersoncardpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170071.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-4|PAPER Tue-O-3-1-4 — Multitask Learning with CTC and Segmental CRF for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with CTC and Segmental CRF for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-4|PAPER Wed-SS-7-11-4 — Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Aare, Kätlin|AUTHOR Kätlin Aare]]|
|[[Abad, Alberto|AUTHOR Alberto Abad]]|
|[[AbdAlmageed, Wael|AUTHOR Wael AbdAlmageed]]|
|[[Abdo, Omnia|AUTHOR Omnia Abdo]]|
|[[Abdou, Sherif|AUTHOR Sherif Abdou]]|
|[[Abe, Masanobu|AUTHOR Masanobu Abe]]|
|[[Abidi, K.|AUTHOR K. Abidi]]|
|[[Abraham, Basil|AUTHOR Basil Abraham]]|
|[[Achanta, Sivanand|AUTHOR Sivanand Achanta]]|
|[[Acheson, Daniel J.|AUTHOR Daniel J. Acheson]]|
|[[Adda, Gilles|AUTHOR Gilles Adda]]|
|[[Adda-Decker, Martine|AUTHOR Martine Adda-Decker]]|
|[[Adi, Yossi|AUTHOR Yossi Adi]]|
|[[Adiga, Nagaraj|AUTHOR Nagaraj Adiga]]|
|[[Agiomyrgiannakis, Yannis|AUTHOR Yannis Agiomyrgiannakis]]|
|[[Agrawal, Dharmesh M.|AUTHOR Dharmesh M. Agrawal]]|
|[[Agrawal, Purvi|AUTHOR Purvi Agrawal]]|
|[[Aguilar, Lourdes|AUTHOR Lourdes Aguilar]]|
|[[Agurto, Carla|AUTHOR Carla Agurto]]|
|[[Ahmad, W.|AUTHOR W. Ahmad]]|
|[[Ahmed, Farhia|AUTHOR Farhia Ahmed]]|
|[[Ahn, ChungHyun|AUTHOR ChungHyun Ahn]]|
|[[Aihara, Ryo|AUTHOR Ryo Aihara]]|
|[[Airaksinen, Manu|AUTHOR Manu Airaksinen]]|
|[[Ajili, Moez|AUTHOR Moez Ajili]]|
|[[Akhtiamov, Oleg|AUTHOR Oleg Akhtiamov]]|
|[[Akiba, Tomoyosi|AUTHOR Tomoyosi Akiba]]|
|[[Akira, Hayakawa|AUTHOR Hayakawa Akira]]|
|[[Alam, Hassan|AUTHOR Hassan Alam]]|
|[[Alam, Jahangir|AUTHOR Jahangir Alam]]|
|[[Alcorn, Alyssa M.|AUTHOR Alyssa M. Alcorn]]|
|[[Aldeneh, Zakaria|AUTHOR Zakaria Aldeneh]]|
|[[Aleksic, Petar|AUTHOR Petar Aleksic]]|
|[[Alexander, Rachel|AUTHOR Rachel Alexander]]|
|[[Algra, Jouke|AUTHOR Jouke Algra]]|
|[[Al Hanai, Tuka|AUTHOR Tuka Al Hanai]]|
|[[Ali, Ahmed|AUTHOR Ahmed Ali]]|
|[[Alku, Paavo|AUTHOR Paavo Alku]]|
|[[Allen, James|AUTHOR James Allen]]|
|[[Alluri, K.N.R.K. Raju|AUTHOR K.N.R.K. Raju Alluri]]|
|[[Almeida, Andre|AUTHOR Andre Almeida]]|
|[[Alonso, Agustin|AUTHOR Agustin Alonso]]|
|[[Al-Radhi, Mohammed Salah|AUTHOR Mohammed Salah Al-Radhi]]|
|[[Alumäe, Tanel|AUTHOR Tanel Alumäe]]|
|[[Alwan, Abeer|AUTHOR Abeer Alwan]]|
|[[Amatuni, Andrei|AUTHOR Andrei Amatuni]]|
|[[Amazouz, Djegdjiga|AUTHOR Djegdjiga Amazouz]]|
|[[Ambati, Bharat Ram|AUTHOR Bharat Ram Ambati]]|
|[[Ambikairajah, Eliathamby|AUTHOR Eliathamby Ambikairajah]]|
|[[Ambrazaitis, Gilbert|AUTHOR Gilbert Ambrazaitis]]|
|[[Amiriparian, Shahin|AUTHOR Shahin Amiriparian]]|
|[[Amman, Scott|AUTHOR Scott Amman]]|
|[[An, Maobo|AUTHOR Maobo An]]|
|[[Ananthapadmanabha, T.V.|AUTHOR T.V. Ananthapadmanabha]]|
|[[Andersen, Asger Heidemann|AUTHOR Asger Heidemann Andersen]]|
|[[Anderson, Hans|AUTHOR Hans Anderson]]|
|[[Anderson, Peter|AUTHOR Peter Anderson]]|
|[[Ando, Atsushi|AUTHOR Atsushi Ando]]|
|[[Ando, Hiroshi|AUTHOR Hiroshi Ando]]|
|[[André, Elisabeth|AUTHOR Elisabeth André]]|
|[[Andreeva, Bistra|AUTHOR Bistra Andreeva]]|
|[[Andrei, Valentin|AUTHOR Valentin Andrei]]|
|[[Anjos, André|AUTHOR André Anjos]]|
|[[Antoniou, Mark|AUTHOR Mark Antoniou]]|
|[[Aono, Yushi|AUTHOR Yushi Aono]]|
|[[Arai, Jun|AUTHOR Jun Arai]]|
|[[Arai, Takayuki|AUTHOR Takayuki Arai]]|
|[[Araki, Shoko|AUTHOR Shoko Araki]]|
|[[Arantes, Pablo|AUTHOR Pablo Arantes]]|
|[[Ardaillon, Luc|AUTHOR Luc Ardaillon]]|
|[[Arias-Vergara, Tomás|AUTHOR Tomás Arias-Vergara]]|
|[[Arık, Sercan Ö.|AUTHOR Sercan Ö. Arık]]|
|[[Ariki, Yasuo|AUTHOR Yasuo Ariki]]|
|[[Arimoto, Yoshiko|AUTHOR Yoshiko Arimoto]]|
|[[Arnela, Marc|AUTHOR Marc Arnela]]|
|[[Arora, Raman|AUTHOR Raman Arora]]|
|[[Arora, Vipul|AUTHOR Vipul Arora]]|
|[[Arsikere, Harish|AUTHOR Harish Arsikere]]|
|[[Asadiabadi, Sasan|AUTHOR Sasan Asadiabadi]]|
|[[Asaei, Afsaneh|AUTHOR Afsaneh Asaei]]|
|[[Asami, Taichi|AUTHOR Taichi Asami]]|
|[[Askjær-Jørgensen, Trine|AUTHOR Trine Askjær-Jørgensen]]|
|[[Astésano, Corine|AUTHOR Corine Astésano]]|
|[[Astolfi, Arianna|AUTHOR Arianna Astolfi]]|
|[[Astudillo, Ramon Fernandez|AUTHOR Ramon Fernandez Astudillo]]|
|[[Asu, Eva Liina|AUTHOR Eva Liina Asu]]|
|[[Athanasopoulou, Angeliki|AUTHOR Angeliki Athanasopoulou]]|
|[[Atkins, David C.|AUTHOR David C. Atkins]]|
|[[Atsushi, Ando|AUTHOR Ando Atsushi]]|
|[[Audhkhasi, Kartik|AUTHOR Kartik Audhkhasi]]|
|[[Audibert, Nicolas|AUTHOR Nicolas Audibert]]|
|[[Avanzi, Mathieu|AUTHOR Mathieu Avanzi]]|
|[[Aylett, Matthew P.|AUTHOR Matthew P. Aylett]]|
|[[Ayllón, David|AUTHOR David Ayllón]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Baby, Arun|AUTHOR Arun Baby]]|
|[[Bacchiani, Michiel|AUTHOR Michiel Bacchiani]]|
|[[Bach, Francis|AUTHOR Francis Bach]]|
|[[Bäckström, Tom|AUTHOR Tom Bäckström]]|
|[[Badin, Pierre|AUTHOR Pierre Badin]]|
|[[Badino, Leonardo|AUTHOR Leonardo Badino]]|
|[[Bagby, Tom|AUTHOR Tom Bagby]]|
|[[Baggott, Matthew J.|AUTHOR Matthew J. Baggott]]|
|[[Bahmaninezhad, Fahimeh|AUTHOR Fahimeh Bahmaninezhad]]|
|[[Bai, Linxue|AUTHOR Linxue Bai]]|
|[[Baird, Alice|AUTHOR Alice Baird]]|
|[[Baker, Justin T.|AUTHOR Justin T. Baker]]|
|[[Balog, András|AUTHOR András Balog]]|
|[[Baltrušaitis, Tadas|AUTHOR Tadas Baltrušaitis]]|
|[[Bandini, Andrea|AUTHOR Andrea Bandini]]|
|[[Bando, Yoshiaki|AUTHOR Yoshiaki Bando]]|
|[[Bang, Jeong-Uk|AUTHOR Jeong-Uk Bang]]|
|[[Banno, Hideki|AUTHOR Hideki Banno]]|
|[[Bao, Changchun|AUTHOR Changchun Bao]]|
|[[Bao, Feng|AUTHOR Feng Bao]]|
|[[Bapna, Ankur|AUTHOR Ankur Bapna]]|
|[[Barbosa, Plínio A.|AUTHOR Plínio A. Barbosa]]|
|[[Barker, Jon|AUTHOR Jon Barker]]|
|[[Barlaz, Marissa|AUTHOR Marissa Barlaz]]|
|[[Barra-Chicote, Roberto|AUTHOR Roberto Barra-Chicote]]|
|[[Barras, Claude|AUTHOR Claude Barras]]|
|[[Barriere, Valentin|AUTHOR Valentin Barriere]]|
|[[Baskar, Murali Karthick|AUTHOR Murali Karthick Baskar]]|
|[[Batista, Fernando|AUTHOR Fernando Batista]]|
|[[Batliner, Anton|AUTHOR Anton Batliner]]|
|[[Batzu, Pier|AUTHOR Pier Batzu]]|
|[[Baucom, Brian|AUTHOR Brian Baucom]]|
|[[Bauer, Josef|AUTHOR Josef Bauer]]|
|[[Baumann, Timo|AUTHOR Timo Baumann]]|
|[[Bayer, Ali Orkan|AUTHOR Ali Orkan Bayer]]|
|[[Beare, Richard|AUTHOR Richard Beare]]|
|[[Beaufays, Françoise|AUTHOR Françoise Beaufays]]|
|[[Beck, Eugen|AUTHOR Eugen Beck]]|
|[[Beckman, Mary E.|AUTHOR Mary E. Beckman]]|
|[[Bedi, Gillinder|AUTHOR Gillinder Bedi]]|
|[[Beerends, John|AUTHOR John Beerends]]|
|[[Beke, András|AUTHOR András Beke]]|
|[[Belinkov, Yonatan|AUTHOR Yonatan Belinkov]]|
|[[Bell, Peter|AUTHOR Peter Bell]]|
|[[Beneš, Karel|AUTHOR Karel Beneš]]|
|[[Bengio, Samy|AUTHOR Samy Bengio]]|
|[[Bengio, Yoshua|AUTHOR Yoshua Bengio]]|
|[[Ben Jannet, Mohamed Ameur|AUTHOR Mohamed Ameur Ben Jannet]]|
|[[Ben Kheder, Waad|AUTHOR Waad Ben Kheder]]|
|[[Beňuš, Štefan|AUTHOR Štefan Beňuš]]|
|[[Bergelson, Elika|AUTHOR Elika Bergelson]]|
|[[Bergmann, Christina|AUTHOR Christina Bergmann]]|
|[[Berisha, Visar|AUTHOR Visar Berisha]]|
|[[Berman, Alex|AUTHOR Alex Berman]]|
|[[Bertero, Dario|AUTHOR Dario Bertero]]|
|[[Berthelsen, Harald|AUTHOR Harald Berthelsen]]|
|[[Bertoldi, Nicola|AUTHOR Nicola Bertoldi]]|
|[[Besacier, Laurent|AUTHOR Laurent Besacier]]|
|[[Beskow, Jonas|AUTHOR Jonas Beskow]]|
|[[Best, Catherine T.|AUTHOR Catherine T. Best]]|
|[[Betz, Simon|AUTHOR Simon Betz]]|
|[[Bhat, Chitralekha|AUTHOR Chitralekha Bhat]]|
|[[Bhati, Saurabhchand|AUTHOR Saurabhchand Bhati]]|
|[[Bhattacharya, Gautam|AUTHOR Gautam Bhattacharya]]|
|[[Biadsy, Fadi|AUTHOR Fadi Biadsy]]|
|[[Bian, Tianling|AUTHOR Tianling Bian]]|
|[[Bin Siddique, Farhad|AUTHOR Farhad Bin Siddique]]|
|[[Birkholz, Peter|AUTHOR Peter Birkholz]]|
|[[Bishop, Judith|AUTHOR Judith Bishop]]|
|[[Bjerva, Johannes|AUTHOR Johannes Bjerva]]|
|[[Björkenstam, Kristina N.|AUTHOR Kristina N. Björkenstam]]|
|[[B.K., Dhanush|AUTHOR Dhanush B.K.]]|
|[[Blaauw, Merlijn|AUTHOR Merlijn Blaauw]]|
|[[Black, Alan W.|AUTHOR Alan W. Black]]|
|[[Blackburn, Daniel|AUTHOR Daniel Blackburn]]|
|[[Blaylock, Reed|AUTHOR Reed Blaylock]]|
|[[Bocklet, Tobias|AUTHOR Tobias Bocklet]]|
|[[Boë, Louis-Jean|AUTHOR Louis-Jean Boë]]|
|[[Boenninghoff, Benedikt|AUTHOR Benedikt Boenninghoff]]|
|[[Bohn, Ocke-Schwen|AUTHOR Ocke-Schwen Bohn]]|
|[[Bollepalli, Bajibabu|AUTHOR Bajibabu Bollepalli]]|
|[[Bölte, Sven|AUTHOR Sven Bölte]]|
|[[Bonada, Jordi|AUTHOR Jordi Bonada]]|
|[[Bonafonte, Antonio|AUTHOR Antonio Bonafonte]]|
|[[Bonastre, Jean-François|AUTHOR Jean-François Bonastre]]|
|[[Bone, Daniel|AUTHOR Daniel Bone]]|
|[[Borgström, Bengt J.|AUTHOR Bengt J. Borgström]]|
|[[Bořil, Tomáš|AUTHOR Tomáš Bořil]]|
|[[Borský, Michal|AUTHOR Michal Borský]]|
|[[Bosker, Hans Rutger|AUTHOR Hans Rutger Bosker]]|
|[[Botros, Noor|AUTHOR Noor Botros]]|
|[[Bouchekif, Abdessalam|AUTHOR Abdessalam Bouchekif]]|
|[[Boucher, Victor J.|AUTHOR Victor J. Boucher]]|
|[[Bouillon, Pierrette|AUTHOR Pierrette Bouillon]]|
|[[Boula de Mareüil, Philippe|AUTHOR Philippe Boula de Mareüil]]|
|[[Bourlard, Hervé|AUTHOR Hervé Bourlard]]|
|[[Bousquet, Pierre-Michel|AUTHOR Pierre-Michel Bousquet]]|
|[[Boves, L.|AUTHOR L. Boves]]|
|[[Braginsky, Mika|AUTHOR Mika Braginsky]]|
|[[Brakel, Philemon|AUTHOR Philemon Brakel]]|
|[[Brandt, Erika|AUTHOR Erika Brandt]]|
|[[Brattain, Laura J.|AUTHOR Laura J. Brattain]]|
|[[Braude, David A.|AUTHOR David A. Braude]]|
|[[Braun, Bettina|AUTHOR Bettina Braun]]|
|[[Braunschweiler, Norbert|AUTHOR Norbert Braunschweiler]]|
|[[Bredin, Hervé|AUTHOR Hervé Bredin]]|
|[[Brooks, Elizabeth|AUTHOR Elizabeth Brooks]]|
|[[Brueckner, Raymond|AUTHOR Raymond Brueckner]]|
|[[Bruguier, Antoine|AUTHOR Antoine Bruguier]]|
|[[Brümmer, Niko|AUTHOR Niko Brümmer]]|
|[[Bruni, Jagoda|AUTHOR Jagoda Bruni]]|
|[[Brusco, Pablo|AUTHOR Pablo Brusco]]|
|[[Brutti, Alessio|AUTHOR Alessio Brutti]]|
|[[Bryan, Craig J.|AUTHOR Craig J. Bryan]]|
|[[Bryhadyr, Nataliya|AUTHOR Nataliya Bryhadyr]]|
|[[Buçinca, Zana|AUTHOR Zana Buçinca]]|
|[[Buck, Markus|AUTHOR Markus Buck]]|
|[[Budzianowski, Paweł|AUTHOR Paweł Budzianowski]]|
|[[Buera, Luis|AUTHOR Luis Buera]]|
|[[Bulling, Philipp|AUTHOR Philipp Bulling]]|
|[[Bullock, Barbara E.|AUTHOR Barbara E. Bullock]]|
|[[Bunnell, H. Timothy|AUTHOR H. Timothy Bunnell]]|
|[[Bunt, Harry|AUTHOR Harry Bunt]]|
|[[Burchfield, L. Ann|AUTHOR L. Ann Burchfield]]|
|[[Burget, Lukáš|AUTHOR Lukáš Burget]]|
|[[Burileanu, Corneliu|AUTHOR Corneliu Burileanu]]|
|[[Burmania, Alec|AUTHOR Alec Burmania]]|
|[[Busa-Fekete, Róbert|AUTHOR Róbert Busa-Fekete]]|
|[[Busso, Carlos|AUTHOR Carlos Busso]]|
|[[Byrd, Dani|AUTHOR Dani Byrd]]|
|[[Byun, Jun|AUTHOR Jun Byun]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Cabarrão, Vera|AUTHOR Vera Cabarrão]]|
|[[Cabral, João Paulo|AUTHOR João Paulo Cabral]]|
|[[Cai, Danwei|AUTHOR Danwei Cai]]|
|[[Cai, Lianhong|AUTHOR Lianhong Cai]]|
|[[Cai, Weicheng|AUTHOR Weicheng Cai]]|
|[[Camelin, Nathalie|AUTHOR Nathalie Camelin]]|
|[[Cameron, Hugh|AUTHOR Hugh Cameron]]|
|[[Campbell, Nick|AUTHOR Nick Campbell]]|
|[[Campbell, William|AUTHOR William Campbell]]|
|[[Campos, Joana|AUTHOR Joana Campos]]|
|[[Can, Doğan|AUTHOR Doğan Can]]|
|[[Candeias, Sara|AUTHOR Sara Candeias]]|
|[[Cano, María José|AUTHOR María José Cano]]|
|[[Cao, Beiming|AUTHOR Beiming Cao]]|
|[[Cao, Chong|AUTHOR Chong Cao]]|
|[[Cao, Yuhang|AUTHOR Yuhang Cao]]|
|[[Cao, Zhanzhong|AUTHOR Zhanzhong Cao]]|
|[[Capes, Tim|AUTHOR Tim Capes]]|
|[[Carignan, Christopher|AUTHOR Christopher Carignan]]|
|[[Carlson, Jason C.|AUTHOR Jason C. Carlson]]|
|[[Caroselli, Joe|AUTHOR Joe Caroselli]]|
|[[Carullo, Alessio|AUTHOR Alessio Carullo]]|
|[[Casanueva, Iñigo|AUTHOR Iñigo Casanueva]]|
|[[Caseiro, Diamantino|AUTHOR Diamantino Caseiro]]|
|[[Casillas, Marisa|AUTHOR Marisa Casillas]]|
|[[Castan, Diego|AUTHOR Diego Castan]]|
|[[Castellana, Antonella|AUTHOR Antonella Castellana]]|
|[[Cau, Cecile|AUTHOR Cecile Cau]]|
|[[Caucheteux, Lise|AUTHOR Lise Caucheteux]]|
|[[Cecchi, Guillermo A.|AUTHOR Guillermo A. Cecchi]]|
|[[Cernak, Milos|AUTHOR Milos Cernak]]|
|[[Černocký, Jan|AUTHOR Jan Černocký]]|
|[[Cha, Jih-Ho|AUTHOR Jih-Ho Cha]]|
|[[Chaabouni, Rahma|AUTHOR Rahma Chaabouni]]|
|[[Chakrabarti, Indrajit|AUTHOR Indrajit Chakrabarti]]|
|[[Chambers, Craig G.|AUTHOR Craig G. Chambers]]|
|[[Chaminade, Thierry|AUTHOR Thierry Chaminade]]|
|[[Champagne-Lavau, Maud|AUTHOR Maud Champagne-Lavau]]|
|[[Chan, William|AUTHOR William Chan]]|
|[[Chandrashekaran, Akshay|AUTHOR Akshay Chandrashekaran]]|
|[[Chandu, Khyathi Raghavi|AUTHOR Khyathi Raghavi Chandu]]|
|[[Chang, Alison|AUTHOR Alison Chang]]|
|[[Chang, Shiyu|AUTHOR Shiyu Chang]]|
|[[Chang, Shuangyu|AUTHOR Shuangyu Chang]]|
|[[Chang, Shuo-Yiin|AUTHOR Shuo-Yiin Chang]]|
|[[Chang, Xuankai|AUTHOR Xuankai Chang]]|
|[[Charlet, Delphine|AUTHOR Delphine Charlet]]|
|[[Charonyktakis, Paulos|AUTHOR Paulos Charonyktakis]]|
|[[Chasaide, Ailbhe Ní|AUTHOR Ailbhe Ní Chasaide]]|
|[[Cheah, Lam A.|AUTHOR Lam A. Cheah]]|
|[[Chelba, Ciprian|AUTHOR Ciprian Chelba]]|
|[[Chen, Aoju|AUTHOR Aoju Chen]]|
|[[Chen, Berlin|AUTHOR Berlin Chen]]|
|[[Chen, Bo|AUTHOR Bo Chen]]|
|[[Chen, Bo-Rui|AUTHOR Bo-Rui Chen]]|
|[[Chen, Chen|AUTHOR Chen Chen]]|
|[[Chen, Chin-Po|AUTHOR Chin-Po Chen]]|
|[[Chen, Deming|AUTHOR Deming Chen]]|
|[[Chen, Fei|AUTHOR Fei Chen]]|
|[[Chen, Hsuan-Yu|AUTHOR Hsuan-Yu Chen]]|
|[[Chen, I-Fan|AUTHOR I-Fan Chen]]|
|[[Chen, Jinhui|AUTHOR Jinhui Chen]]|
|[[Chen, Kuan-Yu|AUTHOR Kuan-Yu Chen]]|
|[[Chen, Nancy F.|AUTHOR Nancy F. Chen]]|
|[[Chen, Si|AUTHOR Si Chen]]|
|[[Chen, Siyuan|AUTHOR Siyuan Chen]]|
|[[Chen, Wenda|AUTHOR Wenda Chen]]|
|[[Chen, X.|AUTHOR X. Chen]]|
|[[Chen, Yafan|AUTHOR Yafan Chen]]|
|[[Chen, Ying|AUTHOR Ying Chen]]|
|[[Chen, Ying-Wen|AUTHOR Ying-Wen Chen]]|
|[[Chen, Yixiang|AUTHOR Yixiang Chen]]|
|[[Chen, Yun-Nung|AUTHOR Yun-Nung Chen]]|
|[[Chen, Zhifeng|AUTHOR Zhifeng Chen]]|
|[[Chen, Zhipeng|AUTHOR Zhipeng Chen]]|
|[[Chen, Zhuo|AUTHOR Zhuo Chen]]|
|[[Chen, Zhuxin|AUTHOR Zhuxin Chen]]|
|[[Cheng, Gaofeng|AUTHOR Gaofeng Cheng]]|
|[[Cheng, Zuofu|AUTHOR Zuofu Cheng]]|
|[[Chennupati, Nivedita|AUTHOR Nivedita Chennupati]]|
|[[Chi, Tai-Shih|AUTHOR Tai-Shih Chi]]|
|[[Chien, Jen-Tzung|AUTHOR Jen-Tzung Chien]]|
|[[Chien, Yu-Ren|AUTHOR Yu-Ren Chien]]|
|[[Chikhi, Samy|AUTHOR Samy Chikhi]]|
|[[Child, Rewon|AUTHOR Rewon Child]]|
|[[Chin, Kean|AUTHOR Kean Chin]]|
|[[Ching, P.C.|AUTHOR P.C. Ching]]|
|[[Chng, Eng Siong|AUTHOR Eng Siong Chng]]|
|[[Cho, Eunah|AUTHOR Eunah Cho]]|
|[[Choi, Ikkyu|AUTHOR Ikkyu Choi]]|
|[[Choi, Inkyu|AUTHOR Inkyu Choi]]|
|[[Choi, Mu-Yeol|AUTHOR Mu-Yeol Choi]]|
|[[Chong, Chee Seng|AUTHOR Chee Seng Chong]]|
|[[Choo, Kihyun|AUTHOR Kihyun Choo]]|
|[[Chorowski, Jan|AUTHOR Jan Chorowski]]|
|[[Chowdhury, Shreyan|AUTHOR Shreyan Chowdhury]]|
|[[Christensen, Heidi|AUTHOR Heidi Christensen]]|
|[[Christensen, Mads Græsbøll|AUTHOR Mads Græsbøll Christensen]]|
|[[Christodoulides, George|AUTHOR George Christodoulides]]|
|[[Chung, Cheng-Tao|AUTHOR Cheng-Tao Chung]]|
|[[Chung, Joon Son|AUTHOR Joon Son Chung]]|
|[[Church, Kenneth W.|AUTHOR Kenneth W. Church]]|
|[[Chwalek, Patrick C.|AUTHOR Patrick C. Chwalek]]|
|[[Clark, Rob|AUTHOR Rob Clark]]|
|[[Clavel, Chloé|AUTHOR Chloé Clavel]]|
|[[C.M., Vikram|AUTHOR Vikram C.M.]]|
|[[Čmejla, Roman|AUTHOR Roman Čmejla]]|
|[[Coates, Adam|AUTHOR Adam Coates]]|
|[[Cohen, Yishai|AUTHOR Yishai Cohen]]|
|[[Colbath, Sean|AUTHOR Sean Colbath]]|
|[[Coles, Paul|AUTHOR Paul Coles]]|
|[[Colett, Hannah R.|AUTHOR Hannah R. Colett]]|
|[[Colibro, Daniele|AUTHOR Daniele Colibro]]|
|[[Collins, Zachary|AUTHOR Zachary Collins]]|
|[[Conkie, Alistair|AUTHOR Alistair Conkie]]|
|[[Conlan, Owen|AUTHOR Owen Conlan]]|
|[[Cooke, Martin|AUTHOR Martin Cooke]]|
|[[Cooper, Erica|AUTHOR Erica Cooper]]|
|[[Cooper-Leavitt, Jamison|AUTHOR Jamison Cooper-Leavitt]]|
|[[Corris, Miriam|AUTHOR Miriam Corris]]|
|[[Cortes, Elísabet Eir|AUTHOR Elísabet Eir Cortes]]|
|[[Cowan, Benjamin R.|AUTHOR Benjamin R. Cowan]]|
|[[Cristia, Alejandrina|AUTHOR Alejandrina Cristia]]|
|[[Crook, Paul|AUTHOR Paul Crook]]|
|[[Csapó, Tamás Gábor|AUTHOR Tamás Gábor Csapó]]|
|[[Cuayáhuitl, Heriberto|AUTHOR Heriberto Cuayáhuitl]]|
|[[Cucchiarini, Catia|AUTHOR Catia Cucchiarini]]|
|[[Cucu, Horia|AUTHOR Horia Cucu]]|
|[[Cui, Jia|AUTHOR Jia Cui]]|
|[[Cui, Xiaodong|AUTHOR Xiaodong Cui]]|
|[[Cumani, Sandro|AUTHOR Sandro Cumani]]|
|[[Cummins, Nicholas|AUTHOR Nicholas Cummins]]|
|[[Cutler, Anne|AUTHOR Anne Cutler]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Dabbaghchian, Saeed|AUTHOR Saeed Dabbaghchian]]|
|[[Dai, Jia|AUTHOR Jia Dai]]|
|[[Dai, Li-Rong|AUTHOR Li-Rong Dai]]|
|[[d’Alessandro, Christophe|AUTHOR Christophe d’Alessandro]]|
|[[Dalmasso, Emanuele|AUTHOR Emanuele Dalmasso]]|
|[[Damnati, Géraldine|AUTHOR Géraldine Damnati]]|
|[[Dandapat, Samarendra|AUTHOR Samarendra Dandapat]]|
|[[Dang, Jianwu|AUTHOR Jianwu Dang]]|
|[[Dang, Ting|AUTHOR Ting Dang]]|
|[[Daniel, Adrien|AUTHOR Adrien Daniel]]|
|[[Das, Amit|AUTHOR Amit Das]]|
|[[Das, Biswajit|AUTHOR Biswajit Das]]|
|[[Das, Rohan Kumar|AUTHOR Rohan Kumar Das]]|
|[[Dasgupta, Hirak|AUTHOR Hirak Dasgupta]]|
|[[D’Ausilio, Alessandro|AUTHOR Alessandro D’Ausilio]]|
|[[Davel, Marelie|AUTHOR Marelie Davel]]|
|[[Davies, William J.|AUTHOR William J. Davies]]|
|[[Davis, Chris|AUTHOR Chris Davis]]|
|[[Dawalatabad, Nauman|AUTHOR Nauman Dawalatabad]]|
|[[Dean, David|AUTHOR David Dean]]|
|[[Deena, Salil|AUTHOR Salil Deena]]|
|[[Deepak, K.T.|AUTHOR K.T. Deepak]]|
|[[Degirmenci, Niyazi Cem|AUTHOR Niyazi Cem Degirmenci]]|
|[[de Haan, Jan Mark|AUTHOR Jan Mark de Haan]]|
|[[Dehak, Najim|AUTHOR Najim Dehak]]|
|[[Dehak, Reda|AUTHOR Reda Dehak]]|
|[[Deisher, Michael|AUTHOR Michael Deisher]]|
|[[Delalez, Samuel|AUTHOR Samuel Delalez]]|
|[[Delcroix, Marc|AUTHOR Marc Delcroix]]|
|[[Delgado, Héctor|AUTHOR Héctor Delgado]]|
|[[Del Giudice, Max|AUTHOR Max Del Giudice]]|
|[[Delhay, Arnaud|AUTHOR Arnaud Delhay]]|
|[[Delvaux, Véronique|AUTHOR Véronique Delvaux]]|
|[[Demberg, Vera|AUTHOR Vera Demberg]]|
|[[Demolin, Didier|AUTHOR Didier Demolin]]|
|[[De Mori, Renato|AUTHOR Renato De Mori]]|
|[[Demuth, Katherine|AUTHOR Katherine Demuth]]|
|[[Derrick, Donald|AUTHOR Donald Derrick]]|
|[[Destefano, Chelle|AUTHOR Chelle Destefano]]|
|[[de Wit, Harriet|AUTHOR Harriet de Wit]]|
|[[Dey, Anik|AUTHOR Anik Dey]]|
|[[Dey, Subhadeep|AUTHOR Subhadeep Dey]]|
|[[D’Haro, L.F.|AUTHOR L.F. D’Haro]]|
|[[Dhiman, Jitendra Kumar|AUTHOR Jitendra Kumar Dhiman]]|
|[[Dhinakaran, Krupakar|AUTHOR Krupakar Dhinakaran]]|
|[[Diakoloukas, Vassilios|AUTHOR Vassilios Diakoloukas]]|
|[[Diez Sánchez, Mireia|AUTHOR Mireia Diez Sánchez]]|
|[[Digalakis, Vassilios|AUTHOR Vassilios Digalakis]]|
|[[Di Gangi, Mattia Antonino|AUTHOR Mattia Antonino Di Gangi]]|
|[[Dighe, Pranay|AUTHOR Pranay Dighe]]|
|[[Dijkstra, Jelske|AUTHOR Jelske Dijkstra]]|
|[[Dimitriadis, Dimitrios|AUTHOR Dimitrios Dimitriadis]]|
|[[Dinarelli, Marco|AUTHOR Marco Dinarelli]]|
|[[Ding, Hongwei|AUTHOR Hongwei Ding]]|
|[[Ding, Wan|AUTHOR Wan Ding]]|
|[[Do, Cong-Thanh|AUTHOR Cong-Thanh Do]]|
|[[Do, Quoc Truong|AUTHOR Quoc Truong Do]]|
|[[Do, Van Hai|AUTHOR Van Hai Do]]|
|[[Docio-Fernandez, Laura|AUTHOR Laura Docio-Fernandez]]|
|[[Doddipatla, Rama|AUTHOR Rama Doddipatla]]|
|[[Dogil, Grzegorz|AUTHOR Grzegorz Dogil]]|
|[[Dohen, Marion|AUTHOR Marion Dohen]]|
|[[Dolatian, Hossep|AUTHOR Hossep Dolatian]]|
|[[Domínguez, Mónica|AUTHOR Mónica Domínguez]]|
|[[Dong, Jing|AUTHOR Jing Dong]]|
|[[Dong, Minghui|AUTHOR Minghui Dong]]|
|[[Donini, Michele|AUTHOR Michele Donini]]|
|[[Downing, Sylvia J.|AUTHOR Sylvia J. Downing]]|
|[[Drager, Katie|AUTHOR Katie Drager]]|
|[[Drake, Mark|AUTHOR Mark Drake]]|
|[[Draxler, Christoph|AUTHOR Christoph Draxler]]|
|[[Dreyer, Markus|AUTHOR Markus Dreyer]]|
|[[Dreyfus, Gérard|AUTHOR Gérard Dreyfus]]|
|[[Droppo, Jasha|AUTHOR Jasha Droppo]]|
|[[Drude, Lukas|AUTHOR Lukas Drude]]|
|[[Drugman, Thomas|AUTHOR Thomas Drugman]]|
|[[Du, Jun|AUTHOR Jun Du]]|
|[[Duckhorn, Frank|AUTHOR Frank Duckhorn]]|
|[[Duenser, Andreas|AUTHOR Andreas Duenser]]|
|[[Duerichen, Robert|AUTHOR Robert Duerichen]]|
|[[Dufour, Richard|AUTHOR Richard Dufour]]|
|[[Dunbar, Ewan|AUTHOR Ewan Dunbar]]|
|[[Dupoux, Emmanuel|AUTHOR Emmanuel Dupoux]]|
|[[Duran, Daniel|AUTHOR Daniel Duran]]|
|[[Dutta, Indranil|AUTHOR Indranil Dutta]]|
|[[Dyer, Chris|AUTHOR Chris Dyer]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Ebbers, Janek|AUTHOR Janek Ebbers]]|
|[[Ebhotemhen, Eustace|AUTHOR Eustace Ebhotemhen]]|
|[[Edlund, Jens|AUTHOR Jens Edlund]]|
|[[Edwards, Jan|AUTHOR Jan Edwards]]|
|[[Egorow, Olga|AUTHOR Olga Egorow]]|
|[[Eig, Jonathan|AUTHOR Jonathan Eig]]|
|[[Einspieler, Christa|AUTHOR Christa Einspieler]]|
|[[Eisner, Frank|AUTHOR Frank Eisner]]|
|[[Ekström, Jenny|AUTHOR Jenny Ekström]]|
|[[El Fakhri, Georges|AUTHOR Georges El Fakhri]]|
|[[Elie, Benjamin|AUTHOR Benjamin Elie]]|
|[[Elizalde, Benjamin|AUTHOR Benjamin Elizalde]]|
|[[El-Khamy, Mostafa|AUTHOR Mostafa El-Khamy]]|
|[[Ell, Stephen R.|AUTHOR Stephen R. Ell]]|
|[[Elsner, Micha|AUTHOR Micha Elsner]]|
|[[El Yagoubi, Radouane|AUTHOR Radouane El Yagoubi]]|
|[[Enarvi, Seppo|AUTHOR Seppo Enarvi]]|
|[[Engelbart, Mathis|AUTHOR Mathis Engelbart]]|
|[[Englebienne, Gwenn|AUTHOR Gwenn Englebienne]]|
|[[Engwall, Olov|AUTHOR Olov Engwall]]|
|[[Enomoto, Mika|AUTHOR Mika Enomoto]]|
|[[Epps, Julien|AUTHOR Julien Epps]]|
|[[Eriksson, Anders|AUTHOR Anders Eriksson]]|
|[[Ernestus, M.|AUTHOR M. Ernestus]]|
|[[Erzin, Engin|AUTHOR Engin Erzin]]|
|[[Escudero, Juan Pablo|AUTHOR Juan Pablo Escudero]]|
|[[Escudero-Mancebo, David|AUTHOR David Escudero-Mancebo]]|
|[[Espic, Felipe|AUTHOR Felipe Espic]]|
|[[Espín, Juan M.|AUTHOR Juan M. Espín]]|
|[[Espy-Wilson, Carol|AUTHOR Carol Espy-Wilson]]|
|[[Essid, Slim|AUTHOR Slim Essid]]|
|[[Estebas-Vilaplana, Eva|AUTHOR Eva Estebas-Vilaplana]]|
|[[Estève, Yannick|AUTHOR Yannick Estève]]|
|[[Evanini, Keelan|AUTHOR Keelan Evanini]]|
|[[Evans, Nicholas|AUTHOR Nicholas Evans]]|
|[[Evers, Vanessa|AUTHOR Vanessa Evers]]|
|[[Evert, Stefan|AUTHOR Stefan Evert]]|
|[[Ewald, Otto|AUTHOR Otto Ewald]]|
|[[Eyben, Florian|AUTHOR Florian Eyben]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Fadiga, Luciano|AUTHOR Luciano Fadiga]]|
|[[Fainberg, Joachim|AUTHOR Joachim Fainberg]]|
|[[Falavigna, Daniele|AUTHOR Daniele Falavigna]]|
|[[Falck-Ytter, Terje|AUTHOR Terje Falck-Ytter]]|
|[[Falik, Ohad|AUTHOR Ohad Falik]]|
|[[Fan, Ping|AUTHOR Ping Fan]]|
|[[Farrell, Kevin|AUTHOR Kevin Farrell]]|
|[[Farrús, Mireia|AUTHOR Mireia Farrús]]|
|[[Fashal, Mervat|AUTHOR Mervat Fashal]]|
|[[Fatima, Syeda Narjis|AUTHOR Syeda Narjis Fatima]]|
|[[Fauth, Camille|AUTHOR Camille Fauth]]|
|[[Fayet, Cedric|AUTHOR Cedric Fayet]]|
|[[Federico, Marcello|AUTHOR Marcello Federico]]|
|[[Fels, Sidney|AUTHOR Sidney Fels]]|
|[[Feng, Pengming|AUTHOR Pengming Feng]]|
|[[Feng, Siyuan|AUTHOR Siyuan Feng]]|
|[[Feng, Xue|AUTHOR Xue Feng]]|
|[[Feng, Zhe|AUTHOR Zhe Feng]]|
|[[Fernandez, Raul|AUTHOR Raul Fernandez]]|
|[[Fernández Gallardo, Laura|AUTHOR Laura Fernández Gallardo]]|
|[[Fernando, Sarith|AUTHOR Sarith Fernando]]|
|[[Ferras, Marc|AUTHOR Marc Ferras]]|
|[[Ferreira Netto, Waldemar|AUTHOR Waldemar Ferreira Netto]]|
|[[Ferrer, Luciana|AUTHOR Luciana Ferrer]]|
|[[Fingscheidt, Tim|AUTHOR Tim Fingscheidt]]|
|[[Flor^encio, Dinei|AUTHOR Dinei Flor^encio]]|
|[[Fonollosa, José A.R.|AUTHOR José A.R. Fonollosa]]|
|[[Fonseca, Nuno|AUTHOR Nuno Fonseca]]|
|[[Font, Roberto|AUTHOR Roberto Font]]|
|[[Fotedar, Gaurav|AUTHOR Gaurav Fotedar]]|
|[[Fougeron, Cécile|AUTHOR Cécile Fougeron]]|
|[[Fougner, Chris|AUTHOR Chris Fougner]]|
|[[Foulkes, Paul|AUTHOR Paul Foulkes]]|
|[[Fousek, Petr|AUTHOR Petr Fousek]]|
|[[Fox, Robert A.|AUTHOR Robert A. Fox]]|
|[[Fraga-Silva, Thiago|AUTHOR Thiago Fraga-Silva]]|
|[[Franceschi, Luca|AUTHOR Luca Franceschi]]|
|[[Francois, Holly|AUTHOR Holly Francois]]|
|[[Frank, Michael C.|AUTHOR Michael C. Frank]]|
|[[Franken, Matthias K.|AUTHOR Matthias K. Franken]]|
|[[Franzen, Jan|AUTHOR Jan Franzen]]|
|[[Fredes, Josué|AUTHOR Josué Fredes]]|
|[[Fredouille, Corinne|AUTHOR Corinne Fredouille]]|
|[[Freitag, Michael|AUTHOR Michael Freitag]]|
|[[Frej, Mohamed Yassine|AUTHOR Mohamed Yassine Frej]]|
|[[French, Peter|AUTHOR Peter French]]|
|[[Freyne, Jill|AUTHOR Jill Freyne]]|
|[[Fridolin, Ivo|AUTHOR Ivo Fridolin]]|
|[[Fry, Michael|AUTHOR Michael Fry]]|
|[[Fuchs, Robert|AUTHOR Robert Fuchs]]|
|[[Fuentes, Olac|AUTHOR Olac Fuentes]]|
|[[Fujimoto, Masakiyo|AUTHOR Masakiyo Fujimoto]]|
|[[Fukuda, Takashi|AUTHOR Takashi Fukuda]]|
|[[Fukuoka, Ishin|AUTHOR Ishin Fukuoka]]|
|[[Fung, Pascale|AUTHOR Pascale Fung]]|
|[[Funk, Riccarda|AUTHOR Riccarda Funk]]|
|[[Furuya, Ken’ichi|AUTHOR Ken’ichi Furuya]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Gale, William|AUTHOR William Gale]]|
|[[Gales, Mark J.F.|AUTHOR Mark J.F. Gales]]|
|[[Galibert, Olivier|AUTHOR Olivier Galibert]]|
|[[Galindo, Luis Angel|AUTHOR Luis Angel Galindo]]|
|[[Gałka, Jakub|AUTHOR Jakub Gałka]]|
|[[Gálvez, Ramiro H.|AUTHOR Ramiro H. Gálvez]]|
|[[Ganapathy, Sriram|AUTHOR Sriram Ganapathy]]|
|[[Gangamohan, P.|AUTHOR P. Gangamohan]]|
|[[Gangashetty, Suryakanth V.|AUTHOR Suryakanth V. Gangashetty]]|
|[[Ganzeboom, Mario|AUTHOR Mario Ganzeboom]]|
|[[Gao, Guanglai|AUTHOR Guanglai Gao]]|
|[[Gao, Shengxiang|AUTHOR Shengxiang Gao]]|
|[[Gao, Wei|AUTHOR Wei Gao]]|
|[[Gao, Yixin|AUTHOR Yixin Gao]]|
|[[Garcia, N.|AUTHOR N. Garcia]]|
|[[García, Paola|AUTHOR Paola García]]|
|[[Garcia-Mateo, Carmen|AUTHOR Carmen Garcia-Mateo]]|
|[[Garcia-Romero, Daniel|AUTHOR Daniel Garcia-Romero]]|
|[[Garimella, Sri|AUTHOR Sri Garimella]]|
|[[Garland, Matt|AUTHOR Matt Garland]]|
|[[Garner, Philip N.|AUTHOR Philip N. Garner]]|
|[[Gašić, Milica|AUTHOR Milica Gašić]]|
|[[Gau, Susan Shur-Fen|AUTHOR Susan Shur-Fen Gau]]|
|[[Gauthier, Elodie|AUTHOR Elodie Gauthier]]|
|[[Gauvain, J.L.|AUTHOR J.L. Gauvain]]|
|[[Ge, Fengpei|AUTHOR Fengpei Ge]]|
|[[Gelderblom, Femke B.|AUTHOR Femke B. Gelderblom]]|
|[[Gelly, G.|AUTHOR G. Gelly]]|
|[[Gendrot, Cedric|AUTHOR Cedric Gendrot]]|
|[[Georges, Munir|AUTHOR Munir Georges]]|
|[[Georgiadou, Despoina|AUTHOR Despoina Georgiadou]]|
|[[Georgiou, Panayiotis|AUTHOR Panayiotis Georgiou]]|
|[[Gerczuk, Maurice|AUTHOR Maurice Gerczuk]]|
|[[Gerholm, Tove|AUTHOR Tove Gerholm]]|
|[[Gerkmann, Timo|AUTHOR Timo Gerkmann]]|
|[[Gerlach, Johanna|AUTHOR Johanna Gerlach]]|
|[[Gessinger, Iona|AUTHOR Iona Gessinger]]|
|[[Ghaffarzadegan, Shabnam|AUTHOR Shabnam Ghaffarzadegan]]|
|[[Ghahremani, Pegah|AUTHOR Pegah Ghahremani]]|
|[[Ghannay, Sahar|AUTHOR Sahar Ghannay]]|
|[[Ghio, Alain|AUTHOR Alain Ghio]]|
|[[Ghodsi, Mohammadreza|AUTHOR Mohammadreza Ghodsi]]|
|[[Ghone, Atish Shankar|AUTHOR Atish Shankar Ghone]]|
|[[Ghosh, Prasanta Kumar|AUTHOR Prasanta Kumar Ghosh]]|
|[[Ghosh, Soumya K.|AUTHOR Soumya K. Ghosh]]|
|[[Ghosh, Sucheta|AUTHOR Sucheta Ghosh]]|
|[[Ghoshal, Arnab|AUTHOR Arnab Ghoshal]]|
|[[Gibiansky, Andrew|AUTHOR Andrew Gibiansky]]|
|[[Gibson, James|AUTHOR James Gibson]]|
|[[Gideon, John|AUTHOR John Gideon]]|
|[[Gilbert, James M.|AUTHOR James M. Gilbert]]|
|[[Gillespie, Stephanie|AUTHOR Stephanie Gillespie]]|
|[[Gilmartin, Emer|AUTHOR Emer Gilmartin]]|
|[[Gil-Pita, Roberto|AUTHOR Roberto Gil-Pita]]|
|[[Glarner, Thomas|AUTHOR Thomas Glarner]]|
|[[Glass, James|AUTHOR James Glass]]|
|[[Glavitsch, Ulrike|AUTHOR Ulrike Glavitsch]]|
|[[Glembek, Ondřej|AUTHOR Ondřej Glembek]]|
|[[Gnanapragasam, Danushen|AUTHOR Danushen Gnanapragasam]]|
|[[Gobl, Christer|AUTHOR Christer Gobl]]|
|[[Godoy, Elizabeth|AUTHOR Elizabeth Godoy]]|
|[[Goecke, Roland|AUTHOR Roland Goecke]]|
|[[Goehner, Kyle|AUTHOR Kyle Goehner]]|
|[[Goel, Vaibhava|AUTHOR Vaibhava Goel]]|
|[[Gogoi, Pamir|AUTHOR Pamir Gogoi]]|
|[[Goldstein, Louis|AUTHOR Louis Goldstein]]|
|[[Golipour, Ladan|AUTHOR Ladan Golipour]]|
|[[Gong, Yifan|AUTHOR Yifan Gong]]|
|[[Gonzalez, Jose A.|AUTHOR Jose A. Gonzalez]]|
|[[González-Ferreras, César|AUTHOR César González-Ferreras]]|
|[[Goo, Jahyun|AUTHOR Jahyun Goo]]|
|[[Gosztolya, Gábor|AUTHOR Gábor Gosztolya]]|
|[[Götze, Jana|AUTHOR Jana Götze]]|
|[[Gowda, Dhananjaya|AUTHOR Dhananjaya Gowda]]|
|[[Gracco, Vincent L.|AUTHOR Vincent L. Gracco]]|
|[[Graf, Simon|AUTHOR Simon Graf]]|
|[[Graff, David|AUTHOR David Graff]]|
|[[Gravano, Agustín|AUTHOR Agustín Gravano]]|
|[[Green, Jordan R.|AUTHOR Jordan R. Green]]|
|[[Green, Phil D.|AUTHOR Phil D. Green]]|
|[[Greenberg, Clayton|AUTHOR Clayton Greenberg]]|
|[[Greenberg, Craig|AUTHOR Craig Greenberg]]|
|[[Greenwood, David|AUTHOR David Greenwood]]|
|[[Greer, Timothy|AUTHOR Timothy Greer]]|
|[[Gref, Michael|AUTHOR Michael Gref]]|
|[[Gresse, Adrien|AUTHOR Adrien Gresse]]|
|[[Grézl, František|AUTHOR František Grézl]]|
|[[Grigonytė, Gintarė|AUTHOR Gintarė Grigonytė]]|
|[[Grohe, Ann-Kathrin|AUTHOR Ann-Kathrin Grohe]]|
|[[Grossman, Ruth|AUTHOR Ruth Grossman]]|
|[[Grósz, Tamás|AUTHOR Tamás Grósz]]|
|[[Group, SRE’16 I4U|AUTHOR SRE’16 I4U Group]]|
|[[Grůber, Martin|AUTHOR Martin Grůber]]|
|[[Gu, Wentao|AUTHOR Wentao Gu]]|
|[[Gu, Yu|AUTHOR Yu Gu]]|
|[[Guan, Jian|AUTHOR Jian Guan]]|
|[[Guasch, Oriol|AUTHOR Oriol Guasch]]|
|[[Guðnason, Jón|AUTHOR Jón Guðnason]]|
|[[Guevara-Rukoz, Adriana|AUTHOR Adriana Guevara-Rukoz]]|
|[[Guha, Tanaya|AUTHOR Tanaya Guha]]|
|[[Gully, Amelia J.|AUTHOR Amelia J. Gully]]|
|[[Gundogdu, Batuhan|AUTHOR Batuhan Gundogdu]]|
|[[Guo, Feng|AUTHOR Feng Guo]]|
|[[Guo, Jinxi|AUTHOR Jinxi Guo]]|
|[[Guo, Jun|AUTHOR Jun Guo]]|
|[[Guo, Wu|AUTHOR Wu Guo]]|
|[[Gupta, Rahul|AUTHOR Rahul Gupta]]|
|[[Gustafson, Joakim|AUTHOR Joakim Gustafson]]|
|[[Gustavsson, Lisa|AUTHOR Lisa Gustavsson]]|
|[[Gutkin, Alexander|AUTHOR Alexander Gutkin]]|
|[[Gutzeit, Suska|AUTHOR Suska Gutzeit]]|
|[[Guzewich, Peter|AUTHOR Peter Guzewich]]|
|[[Guzmán, Gualberto|AUTHOR Gualberto Guzmán]]|
|[[Gwon, Youngjune|AUTHOR Youngjune Gwon]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Ha, Linne|AUTHOR Linne Ha]]|
|[[Hadian, Hossein|AUTHOR Hossein Hadian]]|
|[[Hadjitarkhani, Abie|AUTHOR Abie Hadjitarkhani]]|
|[[Haeb-Umbach, Reinhold|AUTHOR Reinhold Haeb-Umbach]]|
|[[Hagerer, Gerhard|AUTHOR Gerhard Hagerer]]|
|[[Hagita, Norihiro|AUTHOR Norihiro Hagita]]|
|[[Hagoort, Peter|AUTHOR Peter Hagoort]]|
|[[Hahm, Seongjun|AUTHOR Seongjun Hahm]]|
|[[Haider, Fasih|AUTHOR Fasih Haider]]|
|[[Hain, Thomas|AUTHOR Thomas Hain]]|
|[[Hakkani-Tür, Dilek|AUTHOR Dilek Hakkani-Tür]]|
|[[Halimi, Sonia|AUTHOR Sonia Halimi]]|
|[[Hall, Andreia|AUTHOR Andreia Hall]]|
|[[Hall, Kathleen Currie|AUTHOR Kathleen Currie Hall]]|
|[[Hall, Phil|AUTHOR Phil Hall]]|
|[[Hämäläinen, Perttu|AUTHOR Perttu Hämäläinen]]|
|[[Han, Jiqing|AUTHOR Jiqing Han]]|
|[[Han, Kyu J.|AUTHOR Kyu J. Han]]|
|[[Hansen, John H.L.|AUTHOR John H.L. Hansen]]|
|[[Hantke, Simone|AUTHOR Simone Hantke]]|
|[[Hanulíková, Adriana|AUTHOR Adriana Hanulíková]]|
|[[Hanzlíček, Zdeněk|AUTHOR Zdeněk Hanzlíček]]|
|[[Hao, Lixia|AUTHOR Lixia Hao]]|
|[[Hara, Sunao|AUTHOR Sunao Hara]]|
|[[Harandi, Negar M.|AUTHOR Negar M. Harandi]]|
|[[Harkness, Kirsty|AUTHOR Kirsty Harkness]]|
|[[Harlow, R.|AUTHOR R. Harlow]]|
|[[Harman, Craig|AUTHOR Craig Harman]]|
|[[Harmegnies, Bernard|AUTHOR Bernard Harmegnies]]|
|[[Harrison, Philip|AUTHOR Philip Harrison]]|
|[[Hartmann, William|AUTHOR William Hartmann]]|
|[[Hartono, Rachmat|AUTHOR Rachmat Hartono]]|
|[[Harvey, Richard|AUTHOR Richard Harvey]]|
|[[Hasan, Taufiq|AUTHOR Taufiq Hasan]]|
|[[Hasegawa-Johnson, Mark|AUTHOR Mark Hasegawa-Johnson]]|
|[[Hashimoto, Kei|AUTHOR Kei Hashimoto]]|
|[[Hashimoto, Tetsuya|AUTHOR Tetsuya Hashimoto]]|
|[[Hassid, Sergio|AUTHOR Sergio Hassid]]|
|[[Hayashi, Tomoki|AUTHOR Tomoki Hayashi]]|
|[[He, Di|AUTHOR Di He]]|
|[[He, Yunjuan|AUTHOR Yunjuan He]]|
|[[Heck, Larry|AUTHOR Larry Heck]]|
|[[Heck, Michael|AUTHOR Michael Heck]]|
|[[Heeman, Peter A.|AUTHOR Peter A. Heeman]]|
|[[Heeringa, Wilbert|AUTHOR Wilbert Heeringa]]|
|[[Hegde, Rajesh M.|AUTHOR Rajesh M. Hegde]]|
|[[Heiser, Clemens|AUTHOR Clemens Heiser]]|
|[[Hejná, Míša|AUTHOR Míša Hejná]]|
|[[Heldner, Mattias|AUTHOR Mattias Heldner]]|
|[[Helgadóttir, Inga Rún|AUTHOR Inga Rún Helgadóttir]]|
|[[Helmke, Hartmut|AUTHOR Hartmut Helmke]]|
|[[Henter, Gustav Eje|AUTHOR Gustav Eje Henter]]|
|[[Hentschel, Michael|AUTHOR Michael Hentschel]]|
|[[Heo, Hee-soo|AUTHOR Hee-soo Heo]]|
|[[Herbig, Tobias|AUTHOR Tobias Herbig]]|
|[[Hermes, Zainab|AUTHOR Zainab Hermes]]|
|[[Hermjakob, Ulf|AUTHOR Ulf Hermjakob]]|
|[[Hernáez, Inma|AUTHOR Inma Hernáez]]|
|[[Hernandez-Cordero, Jaime|AUTHOR Jaime Hernandez-Cordero]]|
|[[Hernando, Javier|AUTHOR Javier Hernando]]|
|[[Herzog, Michael|AUTHOR Michael Herzog]]|
|[[Hestness, Joel|AUTHOR Joel Hestness]]|
|[[Hewer, Alexander|AUTHOR Alexander Hewer]]|
|[[Heymann, Jahn|AUTHOR Jahn Heymann]]|
|[[Hidalgo, Guillermo|AUTHOR Guillermo Hidalgo]]|
|[[Higashinaka, Ryuichiro|AUTHOR Ryuichiro Higashinaka]]|
|[[Higuchi, Takuya|AUTHOR Takuya Higuchi]]|
|[[Himawan, Ivan|AUTHOR Ivan Himawan]]|
|[[Hiovain, Katri|AUTHOR Katri Hiovain]]|
|[[Hiramatsu, Kaoru|AUTHOR Kaoru Hiramatsu]]|
|[[Hirose, Yuki|AUTHOR Yuki Hirose]]|
|[[Hirsch, Hans-Günter|AUTHOR Hans-Günter Hirsch]]|
|[[Hirschberg, Julia|AUTHOR Julia Hirschberg]]|
|[[Hirschfeld, Diane|AUTHOR Diane Hirschfeld]]|
|[[Hlavnička, Jan|AUTHOR Jan Hlavnička]]|
|[[Hoetjes, Marieke|AUTHOR Marieke Hoetjes]]|
|[[Hofer, Joachim|AUTHOR Joachim Hofer]]|
|[[Hoffman, Johan|AUTHOR Johan Hoffman]]|
|[[Hoffmeister, Björn|AUTHOR Björn Hoffmeister]]|
|[[Hohenhorst, Winfried|AUTHOR Winfried Hohenhorst]]|
|[[Hojo, Nobukatsu|AUTHOR Nobukatsu Hojo]]|
|[[Holdsworth, Ed|AUTHOR Ed Holdsworth]]|
|[[Homayounpour, Mohammad Mehdi|AUTHOR Mohammad Mehdi Homayounpour]]|
|[[Homma, Yukinori|AUTHOR Yukinori Homma]]|
|[[Hooper, Angela|AUTHOR Angela Hooper]]|
|[[Hoory, Ron|AUTHOR Ron Hoory]]|
|[[Horáková, D.|AUTHOR D. Horáková]]|
|[[Hörberg, Thomas|AUTHOR Thomas Hörberg]]|
|[[Hori, Takaaki|AUTHOR Takaaki Hori]]|
|[[Horo, Luke|AUTHOR Luke Horo]]|
|[[Hou, Junfeng|AUTHOR Junfeng Hou]]|
|[[Hou, Luying|AUTHOR Luying Hou]]|
|[[Hough, Julian|AUTHOR Julian Hough]]|
|[[Houghton, Steve|AUTHOR Steve Houghton]]|
|[[Howcroft, David M.|AUTHOR David M. Howcroft]]|
|[[Hrúz, Marek|AUTHOR Marek Hrúz]]|
|[[Hsiao, Roger|AUTHOR Roger Hsiao]]|
|[[Hsu, Chin-Cheng|AUTHOR Chin-Cheng Hsu]]|
|[[Hsu, Cristiane|AUTHOR Cristiane Hsu]]|
|[[Hsu, Hsiang-Ping|AUTHOR Hsiang-Ping Hsu]]|
|[[Hsu, Wei-Ning|AUTHOR Wei-Ning Hsu]]|
|[[Hsu, Yu-Yin|AUTHOR Yu-Yin Hsu]]|
|[[Hu, Qiong|AUTHOR Qiong Hu]]|
|[[Hu, Wenping|AUTHOR Wenping Hu]]|
|[[Hua, Kanru|AUTHOR Kanru Hua]]|
|[[Huang, Chu-Ren|AUTHOR Chu-Ren Huang]]|
|[[Huang, David|AUTHOR David Huang]]|
|[[Huang, D.-Y.|AUTHOR D.-Y. Huang]]|
|[[Huang, Hengguan|AUTHOR Hengguan Huang]]|
|[[Huang, Liang|AUTHOR Liang Huang]]|
|[[Huang, Qiang|AUTHOR Qiang Huang]]|
|[[Huang, Qizheng|AUTHOR Qizheng Huang]]|
|[[Huang, Yan|AUTHOR Yan Huang]]|
|[[Huang, Yinghui|AUTHOR Yinghui Huang]]|
|[[Huang, Yuchen|AUTHOR Yuchen Huang]]|
|[[Huang, Yuyun|AUTHOR Yuyun Huang]]|
|[[Huang, Zhaocheng|AUTHOR Zhaocheng Huang]]|
|[[Huang, Zhaoqiong|AUTHOR Zhaoqiong Huang]]|
|[[Huber, Markus|AUTHOR Markus Huber]]|
|[[Huber, Rainer|AUTHOR Rainer Huber]]|
|[[Huckvale, Mark|AUTHOR Mark Huckvale]]|
|[[Huddleston, Nancy|AUTHOR Nancy Huddleston]]|
|[[Huet, Kathy|AUTHOR Kathy Huet]]|
|[[Huet, Stéphane|AUTHOR Stéphane Huet]]|
|[[Hughes, Thad|AUTHOR Thad Hughes]]|
|[[Hughes, Vincent|AUTHOR Vincent Hughes]]|
|[[Hung, Jeih-Weih|AUTHOR Jeih-Weih Hung]]|
|[[Hunt, Melvyn|AUTHOR Melvyn Hunt]]|
|[[Hussen Abdelaziz, Ahmed|AUTHOR Ahmed Hussen Abdelaziz]]|
|[[Huston, Timothy|AUTHOR Timothy Huston]]|
|[[Hwang, Hsin-Te|AUTHOR Hsin-Te Hwang]]|
|[[Hyder, Rakib|AUTHOR Rakib Hyder]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Ichikawa, Osamu|AUTHOR Osamu Ichikawa]]|
|[[Ijima, Yusuke|AUTHOR Yusuke Ijima]]|
|[[Ikauniece, Indra|AUTHOR Indra Ikauniece]]|
|[[Inaguma, Hirofumi|AUTHOR Hirofumi Inaguma]]|
|[[India, Miquel|AUTHOR Miquel India]]|
|[[Inoue, Koji|AUTHOR Koji Inoue]]|
|[[Ip, Martin Ho Kwan|AUTHOR Martin Ho Kwan Ip]]|
|[[Irhimeh, Sufian|AUTHOR Sufian Irhimeh]]|
|[[Irino, Toshio|AUTHOR Toshio Irino]]|
|[[Irtza, Saad|AUTHOR Saad Irtza]]|
|[[Ishi, Carlos|AUTHOR Carlos Ishi]]|
|[[Ishida, Mako|AUTHOR Mako Ishida]]|
|[[Ishiguro, Hiroshi|AUTHOR Hiroshi Ishiguro]]|
|[[Ishii, Ryo|AUTHOR Ryo Ishii]]|
|[[Ishimoto, Yuichi|AUTHOR Yuichi Ishimoto]]|
|[[Issa, Amel|AUTHOR Amel Issa]]|
|[[Ito, Kayoko|AUTHOR Kayoko Ito]]|
|[[Ito, Kiwako|AUTHOR Kiwako Ito]]|
|[[Ito, Takayuki|AUTHOR Takayuki Ito]]|
|[[Itoh, Yoshiaki|AUTHOR Yoshiaki Itoh]]|
|[[Iwata, Kazuhiko|AUTHOR Kazuhiko Iwata]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Jabaian, Bassam|AUTHOR Bassam Jabaian]]|
|[[Jacewicz, Ewa|AUTHOR Ewa Jacewicz]]|
|[[Jahromi, Mohsen Zareian|AUTHOR Mohsen Zareian Jahromi]]|
|[[Jaitly, Navdeep|AUTHOR Navdeep Jaitly]]|
|[[Jančovič, Peter|AUTHOR Peter Jančovič]]|
|[[Jang, Hye Jin|AUTHOR Hye Jin Jang]]|
|[[Jang, Inseon|AUTHOR Inseon Jang]]|
|[[Jang, Younseon|AUTHOR Younseon Jang]]|
|[[Janott, Christoph|AUTHOR Christoph Janott]]|
|[[Jansche, Martin|AUTHOR Martin Jansche]]|
|[[Jansson, Johan|AUTHOR Johan Jansson]]|
|[[Janu, Thomas|AUTHOR Thomas Janu]]|
|[[Jati, Arindam|AUTHOR Arindam Jati]]|
|[[Jaumard-Hakoun, Aurore|AUTHOR Aurore Jaumard-Hakoun]]|
|[[Jelil, Sarfaraz|AUTHOR Sarfaraz Jelil]]|
|[[Jemel, Boutheina|AUTHOR Boutheina Jemel]]|
|[[Jensen, Jesper|AUTHOR Jesper Jensen]]|
|[[Jensen, Jesper Rindom|AUTHOR Jesper Rindom Jensen]]|
|[[Jeon, Kwang Myung|AUTHOR Kwang Myung Jeon]]|
|[[Jessen, Michael|AUTHOR Michael Jessen]]|
|[[Jesus, Luis M.T.|AUTHOR Luis M.T. Jesus]]|
|[[Ji, Heng|AUTHOR Heng Ji]]|
|[[Ji, Youna|AUTHOR Youna Ji]]|
|[[Ji, Zhe|AUTHOR Zhe Ji]]|
|[[Jia, Jia|AUTHOR Jia Jia]]|
|[[Jiao, Li|AUTHOR Li Jiao]]|
|[[Jiao, Yishan|AUTHOR Yishan Jiao]]|
|[[Jin, Ma|AUTHOR Ma Jin]]|
|[[Jin, Rong|AUTHOR Rong Jin]]|
|[[Jochim, Markus|AUTHOR Markus Jochim]]|
|[[Johnson, Leif|AUTHOR Leif Johnson]]|
|[[Jokisch, Oliver|AUTHOR Oliver Jokisch]]|
|[[Jonell, Patrik|AUTHOR Patrik Jonell]]|
|[[Jones, Caroline|AUTHOR Caroline Jones]]|
|[[Jones, Karen|AUTHOR Karen Jones]]|
|[[Jorrín, Jesús|AUTHOR Jesús Jorrín]]|
|[[Jorrín-Prieto, Jesús|AUTHOR Jesús Jorrín-Prieto]]|
|[[Joseph, Shaun|AUTHOR Shaun Joseph]]|
|[[Josse, Yvan|AUTHOR Yvan Josse]]|
|[[Joy, Neethu Mariam|AUTHOR Neethu Mariam Joy]]|
|[[Juang, Biing-Hwang|AUTHOR Biing-Hwang Juang]]|
|[[Jung, Jee-weon|AUTHOR Jee-weon Jung]]|
|[[Junttila, Katja|AUTHOR Katja Junttila]]|
|[[Juvela, Lauri|AUTHOR Lauri Juvela]]|
|[[Jůzová, Markéta|AUTHOR Markéta Jůzová]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[K., Nikitha|AUTHOR Nikitha K.]]|
|[[Kaburagi, Tokihiko|AUTHOR Tokihiko Kaburagi]]|
|[[Kachkovskaia, Tatiana|AUTHOR Tatiana Kachkovskaia]]|
|[[Kacprzak, Stanisław|AUTHOR Stanisław Kacprzak]]|
|[[Kadiri, Sudarsana Reddy|AUTHOR Sudarsana Reddy Kadiri]]|
|[[Kager, René|AUTHOR René Kager]]|
|[[Kahn, Juliette|AUTHOR Juliette Kahn]]|
|[[Kain, Alexander|AUTHOR Alexander Kain]]|
|[[Kakouros, Sofoklis|AUTHOR Sofoklis Kakouros]]|
|[[Kalita, Sishir|AUTHOR Sishir Kalita]]|
|[[Kalkunte Suresh, Akshay|AUTHOR Akshay Kalkunte Suresh]]|
|[[Kallio, Heini|AUTHOR Heini Kallio]]|
|[[Kamble, Madhu R.|AUTHOR Madhu R. Kamble]]|
|[[Kameoka, Hirokazu|AUTHOR Hirokazu Kameoka]]|
|[[Kamiyama, Hosana|AUTHOR Hosana Kamiyama]]|
|[[Kamper, Herman|AUTHOR Herman Kamper]]|
|[[Kampman, Onno|AUTHOR Onno Kampman]]|
|[[Kampstra, Frederik|AUTHOR Frederik Kampstra]]|
|[[Kaneko, Daisuke|AUTHOR Daisuke Kaneko]]|
|[[Kaneko, Takuhiro|AUTHOR Takuhiro Kaneko]]|
|[[Kano, Takatomo|AUTHOR Takatomo Kano]]|
|[[Kant, Anjali|AUTHOR Anjali Kant]]|
|[[Karafiát, Martin|AUTHOR Martin Karafiát]]|
|[[Karhila, Reima|AUTHOR Reima Karhila]]|
|[[Karita, Shigeki|AUTHOR Shigeki Karita]]|
|[[Karpov, Alexey A.|AUTHOR Alexey A. Karpov]]|
|[[Karthik, Girija Ramesan|AUTHOR Girija Ramesan Karthik]]|
|[[Karvitsky, Gennady|AUTHOR Gennady Karvitsky]]|
|[[Kashino, Kunio|AUTHOR Kunio Kashino]]|
|[[Kashyap, H.|AUTHOR H. Kashyap]]|
|[[Kasten, Conner|AUTHOR Conner Kasten]]|
|[[Kathania, H.K.|AUTHOR H.K. Kathania]]|
|[[Katzberg, Fabrice|AUTHOR Fabrice Katzberg]]|
|[[Kaushik, Lakshmish|AUTHOR Lakshmish Kaushik]]|
|[[Kavanagh, Colleen|AUTHOR Colleen Kavanagh]]|
|[[Kawahara, Hideki|AUTHOR Hideki Kawahara]]|
|[[Kawahara, Tatsuya|AUTHOR Tatsuya Kawahara]]|
|[[Kawai, Hisashi|AUTHOR Hisashi Kawai]]|
|[[Kaya, Heysem|AUTHOR Heysem Kaya]]|
|[[Keating, Patricia A.|AUTHOR Patricia A. Keating]]|
|[[Keegan, P.J.|AUTHOR P.J. Keegan]]|
|[[Keidel Fernández, Alejandra|AUTHOR Alejandra Keidel Fernández]]|
|[[Keith, Francis|AUTHOR Francis Keith]]|
|[[Kember, Heather|AUTHOR Heather Kember]]|
|[[Kenny, Patrick|AUTHOR Patrick Kenny]]|
|[[Keshet, Joseph|AUTHOR Joseph Keshet]]|
|[[Kheyrkhah, Timothée|AUTHOR Timothée Kheyrkhah]]|
|[[Khokhlov, Yuri|AUTHOR Yuri Khokhlov]]|
|[[Khonglah, Banriskhem K.|AUTHOR Banriskhem K. Khonglah]]|
|[[Khorram, Soheil|AUTHOR Soheil Khorram]]|
|[[Khosravani, Abbas|AUTHOR Abbas Khosravani]]|
|[[Khoury, Elie|AUTHOR Elie Khoury]]|
|[[Khudanpur, Sanjeev|AUTHOR Sanjeev Khudanpur]]|
|[[Khurana, Sameer|AUTHOR Sameer Khurana]]|
|[[Kibira, William|AUTHOR William Kibira]]|
|[[Kim, Byung-Hak|AUTHOR Byung-Hak Kim]]|
|[[Kim, Chanwoo|AUTHOR Chanwoo Kim]]|
|[[Kim, Hoirin|AUTHOR Hoirin Kim]]|
|[[Kim, Hong Kook|AUTHOR Hong Kook Kim]]|
|[[Kim, Jaebok|AUTHOR Jaebok Kim]]|
|[[Kim, Jaeyoung|AUTHOR Jaeyoung Kim]]|
|[[Kim, Jangwon|AUTHOR Jangwon Kim]]|
|[[Kim, Jeesun|AUTHOR Jeesun Kim]]|
|[[Kim, Jonny|AUTHOR Jonny Kim]]|
|[[Kim, Jungsuk|AUTHOR Jungsuk Kim]]|
|[[Kim, Myungjong|AUTHOR Myungjong Kim]]|
|[[Kim, Nam Kyun|AUTHOR Nam Kyun Kim]]|
|[[Kim, Nam Soo|AUTHOR Nam Soo Kim]]|
|[[Kim, Sang-Hun|AUTHOR Sang-Hun Kim]]|
|[[Kim, Suyoun|AUTHOR Suyoun Kim]]|
|[[Kim, Taesu|AUTHOR Taesu Kim]]|
|[[Kim, Taesup|AUTHOR Taesup Kim]]|
|[[Kim, Wooil|AUTHOR Wooil Kim]]|
|[[Kim, Yoon-Chul|AUTHOR Yoon-Chul Kim]]|
|[[Kim, Younggwan|AUTHOR Younggwan Kim]]|
|[[Kimball, Owen|AUTHOR Owen Kimball]]|
|[[King, Brian|AUTHOR Brian King]]|
|[[King, J.|AUTHOR J. King]]|
|[[King, Simon|AUTHOR Simon King]]|
|[[Kinnunen, Tomi|AUTHOR Tomi Kinnunen]]|
|[[Kinoshita, Keisuke|AUTHOR Keisuke Kinoshita]]|
|[[Kirkpatrick, Matthew G.|AUTHOR Matthew G. Kirkpatrick]]|
|[[Kitahara, Mafuyu|AUTHOR Mafuyu Kitahara]]|
|[[Kitamura, Tatsuya|AUTHOR Tatsuya Kitamura]]|
|[[Kjaran, Róbert|AUTHOR Róbert Kjaran]]|
|[[Kjartansson, Oddur|AUTHOR Oddur Kjartansson]]|
|[[Klakow, Dietrich|AUTHOR Dietrich Klakow]]|
|[[Kleber, Felicitas|AUTHOR Felicitas Kleber]]|
|[[Kleinhans, Janine|AUTHOR Janine Kleinhans]]|
|[[Klempíř, Jiří|AUTHOR Jiří Klempíř]]|
|[[Kleynhans, Neil|AUTHOR Neil Kleynhans]]|
|[[Kliegl, Markus|AUTHOR Markus Kliegl]]|
|[[Klimkov, Viacheslav|AUTHOR Viacheslav Klimkov]]|
|[[Klingler, Nicola|AUTHOR Nicola Klingler]]|
|[[Klumpp, Philipp|AUTHOR Philipp Klumpp]]|
|[[Klüpfel, Simon|AUTHOR Simon Klüpfel]]|
|[[K.M., Srinivasa Raghavan|AUTHOR Srinivasa Raghavan K.M.]]|
|[[Knight, Kevin|AUTHOR Kevin Knight]]|
|[[Knill, K.M.|AUTHOR K.M. Knill]]|
|[[Ko, Hanseok|AUTHOR Hanseok Ko]]|
|[[Kobashikawa, Satoshi|AUTHOR Satoshi Kobashikawa]]|
|[[Kobayashi, Kazuhiro|AUTHOR Kazuhiro Kobayashi]]|
|[[Kobayashi, Tetsunori|AUTHOR Tetsunori Kobayashi]]|
|[[Koch, Philipp|AUTHOR Philipp Koch]]|
|[[Kocharov, Daniil|AUTHOR Daniil Kocharov]]|
|[[Kockmann, Marcel|AUTHOR Marcel Kockmann]]|
|[[Köhler, Joachim|AUTHOR Joachim Köhler]]|
|[[Kohtz, Lea S.|AUTHOR Lea S. Kohtz]]|
|[[Koishida, Kazuhito|AUTHOR Kazuhito Koishida]]|
|[[Kojima, Kazunori|AUTHOR Kazunori Kojima]]|
|[[Kokkinakis, Kostas|AUTHOR Kostas Kokkinakis]]|
|[[Komatani, Kazunori|AUTHOR Kazunori Komatani]]|
|[[Komaty, Alain|AUTHOR Alain Komaty]]|
|[[Kong, Lingpeng|AUTHOR Lingpeng Kong]]|
|[[Kong, Qiuqiang|AUTHOR Qiuqiang Kong]]|
|[[Konno, Ryota|AUTHOR Ryota Konno]]|
|[[Kons, Zvi|AUTHOR Zvi Kons]]|
|[[Kontogiorgos, Dimosthenis|AUTHOR Dimosthenis Kontogiorgos]]|
|[[Kopparapu, Sunil Kumar|AUTHOR Sunil Kumar Kopparapu]]|
|[[Korenevsky, Maxim|AUTHOR Maxim Korenevsky]]|
|[[Koriyama, Tomoki|AUTHOR Tomoki Koriyama]]|
|[[Korpusik, Mandy|AUTHOR Mandy Korpusik]]|
|[[Kösem, Anne|AUTHOR Anne Kösem]]|
|[[Koshinaka, Takafumi|AUTHOR Takafumi Koshinaka]]|
|[[Kothapally, Vinay|AUTHOR Vinay Kothapally]]|
|[[Kothinti, Sandeep Reddy|AUTHOR Sandeep Reddy Kothinti]]|
|[[Kotlerman, Lili|AUTHOR Lili Kotlerman]]|
|[[Kouklia, Charlotte|AUTHOR Charlotte Kouklia]]|
|[[Koutsogiannaki, Maria|AUTHOR Maria Koutsogiannaki]]|
|[[Kowalczyk, Konrad|AUTHOR Konrad Kowalczyk]]|
|[[Kozlov, Alexander|AUTHOR Alexander Kozlov]]|
|[[Krahmer, Emiel|AUTHOR Emiel Krahmer]]|
|[[Krajewski, Jarek|AUTHOR Jarek Krajewski]]|
|[[Kraljevski, Ivan|AUTHOR Ivan Kraljevski]]|
|[[Kreiman, Jody|AUTHOR Jody Kreiman]]|
|[[Krona, Andreas|AUTHOR Andreas Krona]]|
|[[Kronlid, Fredrik|AUTHOR Fredrik Kronlid]]|
|[[Kroos, Christian|AUTHOR Christian Kroos]]|
|[[Kuang, Jianjing|AUTHOR Jianjing Kuang]]|
|[[Kudashev, Oleg|AUTHOR Oleg Kudashev]]|
|[[Kumar, Aman|AUTHOR Aman Kumar]]|
|[[Kumar, Anish|AUTHOR Anish Kumar]]|
|[[Kumar, Anjishnu|AUTHOR Anjishnu Kumar]]|
|[[Kumar, Anurag|AUTHOR Anurag Kumar]]|
|[[Kumar, Avinash|AUTHOR Avinash Kumar]]|
|[[Kumar, Manoj|AUTHOR Manoj Kumar]]|
|[[Kumar, Nagendra|AUTHOR Nagendra Kumar]]|
|[[Kumar, Pranaw|AUTHOR Pranaw Kumar]]|
|[[Kumar, Shankar|AUTHOR Shankar Kumar]]|
|[[Kuo, Kuan-Ting|AUTHOR Kuan-Ting Kuo]]|
|[[Kuo, Li-Wei|AUTHOR Li-Wei Kuo]]|
|[[Kurata, Gakuto|AUTHOR Gakuto Kurata]]|
|[[Kurimo, Mikko|AUTHOR Mikko Kurimo]]|
|[[Kwak, Chan Woong|AUTHOR Chan Woong Kwak]]|
|[[Kwon, Haeyong|AUTHOR Haeyong Kwon]]|
|[[Kwon, Oh-Wook|AUTHOR Oh-Wook Kwon]]|
|[[Kyaw, Win Thuzar|AUTHOR Win Thuzar Kyaw]]|
|[[Kyriakopoulos, K.|AUTHOR K. Kyriakopoulos]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Laaridh, Imed|AUTHOR Imed Laaridh]]|
|[[Labatut, Vincent|AUTHOR Vincent Labatut]]|
|[[Laface, Pietro|AUTHOR Pietro Laface]]|
|[[Laha, Anirban|AUTHOR Anirban Laha]]|
|[[Lahiri, Aditi|AUTHOR Aditi Lahiri]]|
|[[Lai, Catherine|AUTHOR Catherine Lai]]|
|[[Lai, Jiahao|AUTHOR Jiahao Lai]]|
|[[Lai, Wei|AUTHOR Wei Lai]]|
|[[Lai, Ying-Hui|AUTHOR Ying-Hui Lai]]|
|[[Laine, Unto K.|AUTHOR Unto K. Laine]]|
|[[Laksana, Eugene|AUTHOR Eugene Laksana]]|
|[[Lalhminghlui, Wendy|AUTHOR Wendy Lalhminghlui]]|
|[[Lamalle, Laurent|AUTHOR Laurent Lamalle]]|
|[[Lamel, Lori|AUTHOR Lori Lamel]]|
|[[Lammert, Adam|AUTHOR Adam Lammert]]|
|[[Lancia, Leonardo|AUTHOR Leonardo Lancia]]|
|[[Landman, Rogier|AUTHOR Rogier Landman]]|
|[[Lane, Ian|AUTHOR Ian Lane]]|
|[[Lange, Patrick L.|AUTHOR Patrick L. Lange]]|
|[[Lapidot, Itshak|AUTHOR Itshak Lapidot]]|
|[[Laprie, Yves|AUTHOR Yves Laprie]]|
|[[Larcher, Anthony|AUTHOR Anthony Larcher]]|
|[[Larsen, Elin|AUTHOR Elin Larsen]]|
|[[Larsson, Staffan|AUTHOR Staffan Larsson]]|
|[[Laskowski, Kornel|AUTHOR Kornel Laskowski]]|
|[[Laures-Gore, Jacqueline|AUTHOR Jacqueline Laures-Gore]]|
|[[Lavrentyeva, Galina|AUTHOR Galina Lavrentyeva]]|
|[[Law, Thomas K.T.|AUTHOR Thomas K.T. Law]]|
|[[Lawson, Aaron|AUTHOR Aaron Lawson]]|
|[[Laycock, Stephen|AUTHOR Stephen Laycock]]|
|[[Le, Duc|AUTHOR Duc Le]]|
|[[Le, Phu Ngoc|AUTHOR Phu Ngoc Le]]|
|[[Le, Quoc|AUTHOR Quoc Le]]|
|[[Le Bruyn, Bert|AUTHOR Bert Le Bruyn]]|
|[[Lee, Chia-Fone|AUTHOR Chia-Fone Lee]]|
|[[Lee, Chi-Chun|AUTHOR Chi-Chun Lee]]|
|[[Lee, Chin-Hui|AUTHOR Chin-Hui Lee]]|
|[[Lee, Chong Min|AUTHOR Chong Min Lee]]|
|[[Lee, Huang-Yi|AUTHOR Huang-Yi Lee]]|
|[[Lee, Hung-Shin|AUTHOR Hung-Shin Lee]]|
|[[Lee, Hung-Yi|AUTHOR Hung-Yi Lee]]|
|[[Lee, Jin Won|AUTHOR Jin Won Lee]]|
|[[Lee, Jungwon|AUTHOR Jungwon Lee]]|
|[[Lee, Kai-Zhan|AUTHOR Kai-Zhan Lee]]|
|[[Lee, Kathy Y.S.|AUTHOR Kathy Y.S. Lee]]|
|[[Lee, Kong Aik|AUTHOR Kong Aik Lee]]|
|[[Lee, Lin-Shan|AUTHOR Lin-Shan Lee]]|
|[[Lee, Nayeon|AUTHOR Nayeon Lee]]|
|[[Lee, Shi-wook|AUTHOR Shi-wook Lee]]|
|[[Lee, Sungbok|AUTHOR Sungbok Lee]]|
|[[Lee, Tan|AUTHOR Tan Lee]]|
|[[Lefèvre, Fabrice|AUTHOR Fabrice Lefèvre]]|
|[[Lehtinen, Mona|AUTHOR Mona Lehtinen]]|
|[[Le Lan, Gaël|AUTHOR Gaël Le Lan]]|
|[[Le Maguer, Sébastien|AUTHOR Sébastien Le Maguer]]|
|[[Lenarczyk, Michał|AUTHOR Michał Lenarczyk]]|
|[[Leng, Yi Ren|AUTHOR Yi Ren Leng]]|
|[[Lennes, Mietta|AUTHOR Mietta Lennes]]|
|[[Leong, Chee Wee|AUTHOR Chee Wee Leong]]|
|[[Le Roux, Jonathan|AUTHOR Jonathan Le Roux]]|
|[[Lev, Guy|AUTHOR Guy Lev]]|
|[[Levin, Keith|AUTHOR Keith Levin]]|
|[[Levit, Michael|AUTHOR Michael Levit]]|
|[[Levitan, Rivka|AUTHOR Rivka Levitan]]|
|[[Levitan, Sarah Ita|AUTHOR Sarah Ita Levitan]]|
|[[Levitan, Yocheved|AUTHOR Yocheved Levitan]]|
|[[Levow, Gina-Anne|AUTHOR Gina-Anne Levow]]|
|[[Lewandowski, Natalie|AUTHOR Natalie Lewandowski]]|
|[[Lewis, Molly|AUTHOR Molly Lewis]]|
|[[Li, Aijun|AUTHOR Aijun Li]]|
|[[Li, Baoqing|AUTHOR Baoqing Li]]|
|[[Li, Bei|AUTHOR Bei Li]]|
|[[Li, Bo|AUTHOR Bo Li]]|
|[[Li, Gang|AUTHOR Gang Li]]|
|[[Li, Haizhou|AUTHOR Haizhou Li]]|
|[[Li, Hao|AUTHOR Hao Li]]|
|[[Li, Jiangchuan|AUTHOR Jiangchuan Li]]|
|[[Li, Jinyu|AUTHOR Jinyu Li]]|
|[[Li, Junfeng|AUTHOR Junfeng Li]]|
|[[Li, Kehuang|AUTHOR Kehuang Li]]|
|[[Li, Lantian|AUTHOR Lantian Li]]|
|[[Li, Li|AUTHOR Li Li]]|
|[[Li, Li-Jia|AUTHOR Li-Jia Li]]|
|[[Li, Ming|AUTHOR Ming Li]]|
|[[Li, Peng|AUTHOR Peng Li]]|
|[[Li, Ruizhi|AUTHOR Ruizhi Li]]|
|[[Li, Runnan|AUTHOR Runnan Li]]|
|[[Li, Sheng|AUTHOR Sheng Li]]|
|[[Li, Wei|AUTHOR Wei Li]]|
|[[Li, Weicong|AUTHOR Weicong Li]]|
|[[Li, Wenpeng|AUTHOR Wenpeng Li]]|
|[[Li, Xin|AUTHOR Xin Li]]|
|[[Li, Xu|AUTHOR Xu Li]]|
|[[Li, Ya|AUTHOR Ya Li]]|
|[[Li, Zhi-Yi|AUTHOR Zhi-Yi Li]]|
|[[Liang, Jiaen|AUTHOR Jiaen Liang]]|
|[[Liang, Zhi-Pei|AUTHOR Zhi-Pei Liang]]|
|[[Liao, Hank|AUTHOR Hank Liao]]|
|[[Liao, Yu-Hsien|AUTHOR Yu-Hsien Liao]]|
|[[Licata, Keli|AUTHOR Keli Licata]]|
|[[Liebson, Elizabeth S.|AUTHOR Elizabeth S. Liebson]]|
|[[Lilley, Jason|AUTHOR Jason Lilley]]|
|[[Lim, Boon Pang|AUTHOR Boon Pang Lim]]|
|[[Lim, Hyungjun|AUTHOR Hyungjun Lim]]|
|[[Lim, Lynn-Li|AUTHOR Lynn-Li Lim]]|
|[[Lin, Kin Wah Edward|AUTHOR Kin Wah Edward Lin]]|
|[[Lin, Ying|AUTHOR Ying Lin]]|
|[[Lin, Yun-Shao|AUTHOR Yun-Shao Lin]]|
|[[Linarès, Georges|AUTHOR Georges Linarès]]|
|[[Lindblom, Björn|AUTHOR Björn Lindblom]]|
|[[Ling, Zhen-Hua|AUTHOR Zhen-Hua Ling]]|
|[[Linhard, Klaus|AUTHOR Klaus Linhard]]|
|[[Lippus, Pärtel|AUTHOR Pärtel Lippus]]|
|[[Liss, Julie|AUTHOR Julie Liss]]|
|[[Litman, Diane|AUTHOR Diane Litman]]|
|[[Little, Max A.|AUTHOR Max A. Little]]|
|[[Liu, Bin|AUTHOR Bin Liu]]|
|[[Liu, Bing|AUTHOR Bing Liu]]|
|[[Liu, Chaoran|AUTHOR Chaoran Liu]]|
|[[Liu, Chunxi|AUTHOR Chunxi Liu]]|
|[[Liu, Daben|AUTHOR Daben Liu]]|
|[[Liu, Gang|AUTHOR Gang Liu]]|
|[[Liu, Hong|AUTHOR Hong Liu]]|
|[[Liu, Hongchao|AUTHOR Hongchao Liu]]|
|[[Liu, Shih-Hung|AUTHOR Shih-Hung Liu]]|
|[[Liu, Wenbo|AUTHOR Wenbo Liu]]|
|[[Liu, Wenju|AUTHOR Wenju Liu]]|
|[[Liu, X.|AUTHOR X. Liu]]|
|[[Liu, Xiaolin|AUTHOR Xiaolin Liu]]|
|[[Liu, Xunying|AUTHOR Xunying Liu]]|
|[[Liu, Yi-Wen|AUTHOR Yi-Wen Liu]]|
|[[Liu, Yuanyuan|AUTHOR Yuanyuan Liu]]|
|[[Liu, Yuzong|AUTHOR Yuzong Liu]]|
|[[Liu, Zheng|AUTHOR Zheng Liu]]|
|[[Livescu, Karen|AUTHOR Karen Livescu]]|
|[[Lleida, Eduardo|AUTHOR Eduardo Lleida]]|
|[[Llombart, Jorge|AUTHOR Jorge Llombart]]|
|[[Logan, Yash-Yee|AUTHOR Yash-Yee Logan]]|
|[[Lolive, Damien|AUTHOR Damien Lolive]]|
|[[Lopes, Carla|AUTHOR Carla Lopes]]|
|[[Lopez-Otero, Paula|AUTHOR Paula Lopez-Otero]]|
|[[Lord, Alekzandra|AUTHOR Alekzandra Lord]]|
|[[Lorenzo-Trueba, Jaime|AUTHOR Jaime Lorenzo-Trueba]]|
|[[Loweimi, Erfan|AUTHOR Erfan Loweimi]]|
|[[Lozano-Diez, Alicia|AUTHOR Alicia Lozano-Diez]]|
|[[Lu, Bo-Ru|AUTHOR Bo-Ru Lu]]|
|[[Lu, Di|AUTHOR Di Lu]]|
|[[Lu, Liang|AUTHOR Liang Lu]]|
|[[Lu, Xugang|AUTHOR Xugang Lu]]|
|[[Lu, Yu-Ding|AUTHOR Yu-Ding Lu]]|
|[[Lucero, Jorge C.|AUTHOR Jorge C. Lucero]]|
|[[Lui, Simon|AUTHOR Simon Lui]]|
|[[Luk, San-hei Kenny|AUTHOR San-hei Kenny Luk]]|
|[[Lunsford, Rebecca|AUTHOR Rebecca Lunsford]]|
|[[Luo, Dean|AUTHOR Dean Luo]]|
|[[Luo, Qinyi|AUTHOR Qinyi Luo]]|
|[[Luo, Ruxin|AUTHOR Ruxin Luo]]|
|[[Luo, Zhaojie|AUTHOR Zhaojie Luo]]|
|[[Luque, Jordi|AUTHOR Jordi Luque]]|
|[[Luz, Saturnino|AUTHOR Saturnino Luz]]|
|[[Lyon, Thomas D.|AUTHOR Thomas D. Lyon]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[M., Sasikumar|AUTHOR Sasikumar M.]]|
|[[Ma, Bin|AUTHOR Bin Ma]]|
|[[Ma, Feng|AUTHOR Feng Ma]]|
|[[Ma, Jeff|AUTHOR Jeff Ma]]|
|[[Ma, Jianbo|AUTHOR Jianbo Ma]]|
|[[Ma, Min|AUTHOR Min Ma]]|
|[[Ma, Mingbo|AUTHOR Mingbo Ma]]|
|[[Ma, Xi|AUTHOR Xi Ma]]|
|[[Ma, Zhanyu|AUTHOR Zhanyu Ma]]|
|[[Maas, Roland|AUTHOR Roland Maas]]|
|[[Maass, Marco|AUTHOR Marco Maass]]|
|[[Mackie, Scott|AUTHOR Scott Mackie]]|
|[[Maclagan, M.A.|AUTHOR M.A. Maclagan]]|
|[[Madhyastha, Pranava|AUTHOR Pranava Madhyastha]]|
|[[Madikeri, Srikanth|AUTHOR Srikanth Madikeri]]|
|[[Madureira, Sandra|AUTHOR Sandra Madureira]]|
|[[Maekawa, Kikuo|AUTHOR Kikuo Maekawa]]|
|[[Mahshie, James|AUTHOR James Mahshie]]|
|[[Mahto, Shivangi|AUTHOR Shivangi Mahto]]|
|[[Mahu, Rodrigo|AUTHOR Rodrigo Mahu]]|
|[[Maia, Ranniery|AUTHOR Ranniery Maia]]|
|[[Maier, Angelika|AUTHOR Angelika Maier]]|
|[[Maiti, Soumi|AUTHOR Soumi Maiti]]|
|[[Mak, Brian|AUTHOR Brian Mak]]|
|[[Mak, Man-Wai|AUTHOR Man-Wai Mak]]|
|[[Maki, Kotaro|AUTHOR Kotaro Maki]]|
|[[Makinae, Hisanori|AUTHOR Hisanori Makinae]]|
|[[Makino, Shoji|AUTHOR Shoji Makino]]|
|[[Malandrakis, Nikolaos|AUTHOR Nikolaos Malandrakis]]|
|[[Malinen, Jarmo|AUTHOR Jarmo Malinen]]|
|[[Malisz, Zofia|AUTHOR Zofia Malisz]]|
|[[Mallidi, Harish|AUTHOR Harish Mallidi]]|
|[[Malykh, Egor|AUTHOR Egor Malykh]]|
|[[Mandal, Tanumay|AUTHOR Tanumay Mandal]]|
|[[Mandel, Michael I.|AUTHOR Michael I. Mandel]]|
|[[Manohar, Vimal|AUTHOR Vimal Manohar]]|
|[[Manríquez, Rodrigo|AUTHOR Rodrigo Manríquez]]|
|[[Mansikkaniemi, André|AUTHOR André Mansikkaniemi]]|
|[[Marcel, Sébastien|AUTHOR Sébastien Marcel]]|
|[[Marcusson, Amelie|AUTHOR Amelie Marcusson]]|
|[[Marin, Alex|AUTHOR Alex Marin]]|
|[[Marklund, Ellen|AUTHOR Ellen Marklund]]|
|[[Markó, Alexandra|AUTHOR Alexandra Markó]]|
|[[Marques, Luciana|AUTHOR Luciana Marques]]|
|[[Marschik, Peter B.|AUTHOR Peter B. Marschik]]|
|[[Marteau, Pierre-François|AUTHOR Pierre-François Marteau]]|
|[[Martínez-Hinarejos, Carlos-D.|AUTHOR Carlos-D. Martínez-Hinarejos]]|
|[[Marxer, Ricard|AUTHOR Ricard Marxer]]|
|[[Masataki, Hirokazu|AUTHOR Hirokazu Masataki]]|
|[[Maslowski, Merel|AUTHOR Merel Maslowski]]|
|[[Mason, Lisa|AUTHOR Lisa Mason]]|
|[[Masuda-Katsuse, Ikuyo|AUTHOR Ikuyo Masuda-Katsuse]]|
|[[Masumura, Ryo|AUTHOR Ryo Masumura]]|
|[[Matassoni, Marco|AUTHOR Marco Matassoni]]|
|[[Matějka, Pavel|AUTHOR Pavel Matějka]]|
|[[Matoušek, Jindřich|AUTHOR Jindřich Matoušek]]|
|[[Matsoukas, Spyros|AUTHOR Spyros Matsoukas]]|
|[[Matsui, Toshie|AUTHOR Toshie Matsui]]|
|[[Matsuo, Yoshihiro|AUTHOR Yoshihiro Matsuo]]|
|[[Matthews, Iain|AUTHOR Iain Matthews]]|
|[[Matthiesen, Martin|AUTHOR Martin Matthiesen]]|
|[[Mau, Ted|AUTHOR Ted Mau]]|
|[[Mauranen, Anna|AUTHOR Anna Mauranen]]|
|[[May, Jonathan|AUTHOR Jonathan May]]|
|[[Mazur, Radoslaw|AUTHOR Radoslaw Mazur]]|
|[[McAllaster, Donald|AUTHOR Donald McAllaster]]|
|[[McAuliffe, Michael|AUTHOR Michael McAuliffe]]|
|[[McCree, Alan|AUTHOR Alan McCree]]|
|[[McDermott, Erik|AUTHOR Erik McDermott]]|
|[[McDonnell, Rachel|AUTHOR Rachel McDonnell]]|
|[[McGrath, Kathleen|AUTHOR Kathleen McGrath]]|
|[[McInnis, Melvin|AUTHOR Melvin McInnis]]|
|[[McLaren, Mitchell|AUTHOR Mitchell McLaren]]|
|[[McLoughlin, Ian|AUTHOR Ian McLoughlin]]|
|[[McQueen, James M.|AUTHOR James M. McQueen]]|
|[[McWilliams, Kelly|AUTHOR Kelly McWilliams]]|
|[[Medani, Takfarinas|AUTHOR Takfarinas Medani]]|
|[[Medennikov, Ivan|AUTHOR Ivan Medennikov]]|
|[[Meenakshi, G. Nisha|AUTHOR G. Nisha Meenakshi]]|
|[[Meermeier, Ralf|AUTHOR Ralf Meermeier]]|
|[[Mehta, Daryush D.|AUTHOR Daryush D. Mehta]]|
|[[Meignier, Sylvain|AUTHOR Sylvain Meignier]]|
|[[Meireles, Alexsandro R.|AUTHOR Alexsandro R. Meireles]]|
|[[Meister, Einar|AUTHOR Einar Meister]]|
|[[Menacer, M.A.|AUTHOR M.A. Menacer]]|
|[[Mendelev, Valentin|AUTHOR Valentin Mendelev]]|
|[[Mendels, Gideon|AUTHOR Gideon Mendels]]|
|[[Mendelson, Joseph|AUTHOR Joseph Mendelson]]|
|[[Meng, Helen|AUTHOR Helen Meng]]|
|[[Meng, Zhong|AUTHOR Zhong Meng]]|
|[[Menon, Anjali|AUTHOR Anjali Menon]]|
|[[Menon, Raghav|AUTHOR Raghav Menon]]|
|[[Merritt, Thomas|AUTHOR Thomas Merritt]]|
|[[Mertens, Julia|AUTHOR Julia Mertens]]|
|[[Mertins, Alfred|AUTHOR Alfred Mertins]]|
|[[Metze, Florian|AUTHOR Florian Metze]]|
|[[Meunier, Christine|AUTHOR Christine Meunier]]|
|[[Meyer, Antje S.|AUTHOR Antje S. Meyer]]|
|[[Meyer, Bernd T.|AUTHOR Bernd T. Meyer]]|
|[[Meyer, Werner|AUTHOR Werner Meyer]]|
|[[Michalsky, Jan|AUTHOR Jan Michalsky]]|
|[[Michel, Wilfried|AUTHOR Wilfried Michel]]|
|[[Michelas, Amandine|AUTHOR Amandine Michelas]]|
|[[Michelsanti, Daniel|AUTHOR Daniel Michelsanti]]|
|[[Miguel, Antonio|AUTHOR Antonio Miguel]]|
|[[Mihajlik, Péter|AUTHOR Péter Mihajlik]]|
|[[Mihkla, Meelis|AUTHOR Meelis Mihkla]]|
|[[Mihuc, Sarah|AUTHOR Sarah Mihuc]]|
|[[Mikušová, Nina|AUTHOR Nina Mikušová]]|
|[[Milde, Benjamin|AUTHOR Benjamin Milde]]|
|[[Milner, Ben|AUTHOR Ben Milner]]|
|[[Milošević, Milana|AUTHOR Milana Milošević]]|
|[[Mimura, Masato|AUTHOR Masato Mimura]]|
|[[Minagi, Shogo|AUTHOR Shogo Minagi]]|
|[[Minamiguchi, Ryo|AUTHOR Ryo Minamiguchi]]|
|[[Minato, Takashi|AUTHOR Takashi Minato]]|
|[[Minematsu, Nobuaki|AUTHOR Nobuaki Minematsu]]|
|[[Ming, Huaiping|AUTHOR Huaiping Ming]]|
|[[Minker, Wolfgang|AUTHOR Wolfgang Minker]]|
|[[Mirheidari, Bahman|AUTHOR Bahman Mirheidari]]|
|[[Mirkin, Shachar|AUTHOR Shachar Mirkin]]|
|[[Mirsamadi, Seyedmahdad|AUTHOR Seyedmahdad Mirsamadi]]|
|[[Misra, Abhinav|AUTHOR Abhinav Misra]]|
|[[Misra, Ananya|AUTHOR Ananya Misra]]|
|[[Mitchinson, Ben|AUTHOR Ben Mitchinson]]|
|[[Miura, Iori|AUTHOR Iori Miura]]|
|[[Miwa, Kenichiro|AUTHOR Kenichiro Miwa]]|
|[[Miyashita, Genta|AUTHOR Genta Miyashita]]|
|[[Miyoshi, Hiroyuki|AUTHOR Hiroyuki Miyoshi]]|
|[[Möbius, Bernd|AUTHOR Bernd Möbius]]|
|[[Mochihashi, Daichi|AUTHOR Daichi Mochihashi]]|
|[[Mohammadi, Amir|AUTHOR Amir Mohammadi]]|
|[[Mohammadi, Seyed Hamidreza|AUTHOR Seyed Hamidreza Mohammadi]]|
|[[Moinet, Alexis|AUTHOR Alexis Moinet]]|
|[[Mok, Peggy|AUTHOR Peggy Mok]]|
|[[Mokhtari, Parham|AUTHOR Parham Mokhtari]]|
|[[Möller, Sebastian|AUTHOR Sebastian Möller]]|
|[[Molloy, Hillary R.|AUTHOR Hillary R. Molloy]]|
|[[Moniz, Helena|AUTHOR Helena Moniz]]|
|[[Monta, Natsuki|AUTHOR Natsuki Monta]]|
|[[Montas, Eva|AUTHOR Eva Montas]]|
|[[Moon, Jung Min|AUTHOR Jung Min Moon]]|
|[[Moore, Elliot|AUTHOR Elliot Moore]]|
|[[Moore, Roger K.|AUTHOR Roger K. Moore]]|
|[[Moosmüller, Sylvia|AUTHOR Sylvia Moosmüller]]|
|[[Morales, Michelle Renee|AUTHOR Michelle Renee Morales]]|
|[[Morchid, Mohamed|AUTHOR Mohamed Morchid]]|
|[[Morency, Louis-Philippe|AUTHOR Louis-Philippe Morency]]|
|[[Morgan, Angela|AUTHOR Angela Morgan]]|
|[[Mori, Hiroki|AUTHOR Hiroki Mori]]|
|[[Mori, Takuma|AUTHOR Takuma Mori]]|
|[[Morise, Masanori|AUTHOR Masanori Morise]]|
|[[Moró, Anna|AUTHOR Anna Moró]]|
|[[Mostafa, Naziba|AUTHOR Naziba Mostafa]]|
|[[Motlicek, Petr|AUTHOR Petr Motlicek]]|
|[[Mower Provost, Emily|AUTHOR Emily Mower Provost]]|
|[[Mrkšić, Nikola|AUTHOR Nikola Mrkšić]]|
|[[Muddireddy, Pavankumar Reddy|AUTHOR Pavankumar Reddy Muddireddy]]|
|[[Mukherjee, Sankar|AUTHOR Sankar Mukherjee]]|
|[[Mulholland, Matthew|AUTHOR Matthew Mulholland]]|
|[[Müller, Luděk|AUTHOR Luděk Müller]]|
|[[Müller, Markus|AUTHOR Markus Müller]]|
|[[Mun, Seongkyu|AUTHOR Seongkyu Mun]]|
|[[Mundnich, Karel|AUTHOR Karel Mundnich]]|
|[[Munson, Benjamin|AUTHOR Benjamin Munson]]|
|[[Murphy, Andy|AUTHOR Andy Murphy]]|
|[[Murphy, Damian T.|AUTHOR Damian T. Murphy]]|
|[[Murthy, B.H.V.S. Narayana|AUTHOR B.H.V.S. Narayana Murthy]]|
|[[Murthy, Hema A.|AUTHOR Hema A. Murthy]]|
|[[Murtola, Tiina|AUTHOR Tiina Murtola]]|
|[[Murty, K. Sri Rama|AUTHOR K. Sri Rama Murty]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Naaman, Einat|AUTHOR Einat Naaman]]|
|[[Nadolski, Adam|AUTHOR Adam Nadolski]]|
|[[Nagaraja, Varun|AUTHOR Varun Nagaraja]]|
|[[Nagarsheth, Parav|AUTHOR Parav Nagarsheth]]|
|[[Nagesha, Venki|AUTHOR Venki Nagesha]]|
|[[Nagrani, Arsha|AUTHOR Arsha Nagrani]]|
|[[Nahamoo, David|AUTHOR David Nahamoo]]|
|[[Nair, Angelika|AUTHOR Angelika Nair]]|
|[[Najafian, Maryam|AUTHOR Maryam Najafian]]|
|[[Nakadai, Kazuhiro|AUTHOR Kazuhiro Nakadai]]|
|[[Nakagawa, Seiichi|AUTHOR Seiichi Nakagawa]]|
|[[Nakamura, Satoshi|AUTHOR Satoshi Nakamura]]|
|[[Nakamura, Shizuka|AUTHOR Shizuka Nakamura]]|
|[[Nakanishi, Ryosuke|AUTHOR Ryosuke Nakanishi]]|
|[[Nakashika, Toru|AUTHOR Toru Nakashika]]|
|[[Nakatani, Tomohiro|AUTHOR Tomohiro Nakatani]]|
|[[Namasivayam, Aravind|AUTHOR Aravind Namasivayam]]|
|[[Nankaku, Yoshihiko|AUTHOR Yoshihiko Nankaku]]|
|[[Nara, Kiranpreet|AUTHOR Kiranpreet Nara]]|
|[[Narayanan, Arun|AUTHOR Arun Narayanan]]|
|[[Narayanan, Shrikanth S.|AUTHOR Shrikanth S. Narayanan]]|
|[[Narendra, N.P.|AUTHOR N.P. Narendra]]|
|[[Narita, Tomohiro|AUTHOR Tomohiro Narita]]|
|[[Narwekar, Abhishek|AUTHOR Abhishek Narwekar]]|
|[[Nasir, Md.|AUTHOR Md. Nasir]]|
|[[Nataraj, K.S.|AUTHOR K.S. Nataraj]]|
|[[Navas, Eva|AUTHOR Eva Navas]]|
|[[Nayak, Krishna|AUTHOR Krishna Nayak]]|
|[[Nayak, Neha|AUTHOR Neha Nayak]]|
|[[Nayak, Shekhar|AUTHOR Shekhar Nayak]]|
|[[Neeracher, Matthias|AUTHOR Matthias Neeracher]]|
|[[Nellore, Bhanu Teja|AUTHOR Bhanu Teja Nellore]]|
|[[Németh, Géza|AUTHOR Géza Németh]]|
|[[Nercessian, Shahan|AUTHOR Shahan Nercessian]]|
|[[Nerpagar, Rachana|AUTHOR Rachana Nerpagar]]|
|[[Neubig, Graham|AUTHOR Graham Neubig]]|
|[[Neufeld, Chris|AUTHOR Chris Neufeld]]|
|[[Neumann, Michael|AUTHOR Michael Neumann]]|
|[[Neuschaefer-Rube, Christiane|AUTHOR Christiane Neuschaefer-Rube]]|
|[[Ney, Hermann|AUTHOR Hermann Ney]]|
|[[Ng, Raymond W.M.|AUTHOR Raymond W.M. Ng]]|
|[[Ng, Tim|AUTHOR Tim Ng]]|
|[[Ng, Wen Zheng Terence|AUTHOR Wen Zheng Terence Ng]]|
|[[Nguyen, Noël|AUTHOR Noël Nguyen]]|
|[[Ni, Jinfu|AUTHOR Jinfu Ni]]|
|[[Ni, Zhidong|AUTHOR Zhidong Ni]]|
|[[Ní Chasaide, Ailbhe|AUTHOR Ailbhe Ní Chasaide]]|
|[[Ní Chiaráin, Neasa|AUTHOR Neasa Ní Chiaráin]]|
|[[Nidadavolu, Phani Sankar|AUTHOR Phani Sankar Nidadavolu]]|
|[[Niebuhr, Oliver|AUTHOR Oliver Niebuhr]]|
|[[Niehues, Jan|AUTHOR Jan Niehues]]|
|[[Niesler, Thomas|AUTHOR Thomas Niesler]]|
|[[Nikulásdóttir, Anna Björk|AUTHOR Anna Björk Nikulásdóttir]]|
|[[Nikulin, Aleksander|AUTHOR Aleksander Nikulin]]|
|[[Nilsson Björkenstam, Kristina|AUTHOR Kristina Nilsson Björkenstam]]|
|[[Ning, Yishuang|AUTHOR Yishuang Ning]]|
|[[Nirschl, Michael|AUTHOR Michael Nirschl]]|
|[[Nishizaki, Hiromitsu|AUTHOR Hiromitsu Nishizaki]]|
|[[Nookala, Usha Amrutha|AUTHOR Usha Amrutha Nookala]]|
|[[Norel, Raquel|AUTHOR Raquel Norel]]|
|[[Nöth, Elmar|AUTHOR Elmar Nöth]]|
|[[Novák-Tót, Eszter|AUTHOR Eszter Novák-Tót]]|
|[[Novoa, José|AUTHOR José Novoa]]|
|[[Novoselov, Sergey|AUTHOR Sergey Novoselov]]|
|[[Novotný, M.|AUTHOR M. Novotný]]|
|[[Novotný, Ondřej|AUTHOR Ondřej Novotný]]|
|[[Nowicki, Jakub|AUTHOR Jakub Nowicki]]|
|[[Nwe, Tin Lay|AUTHOR Tin Lay Nwe]]|
|[[Nyström, Pär|AUTHOR Pär Nyström]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Obuchi, Yasunari|AUTHOR Yasunari Obuchi]]|
|[[Oertel, Catharine|AUTHOR Catharine Oertel]]|
|[[Ogawa, Atsunori|AUTHOR Atsunori Ogawa]]|
|[[Oh, Eunmi|AUTHOR Eunmi Oh]]|
|[[Ohashi, Hiroki|AUTHOR Hiroki Ohashi]]|
|[[Ohsugi, Yasuhito|AUTHOR Yasuhito Ohsugi]]|
|[[Öktem, Alp|AUTHOR Alp Öktem]]|
|[[Omologo, Maurizio|AUTHOR Maurizio Omologo]]|
|[[Öngür, Dost|AUTHOR Dost Öngür]]|
|[[Oplustil, Pilar|AUTHOR Pilar Oplustil]]|
|[[Orio, Patricio|AUTHOR Patricio Orio]]|
|[[Orozco-Arroyave, Juan Rafael|AUTHOR Juan Rafael Orozco-Arroyave]]|
|[[Ortega, Alfonso|AUTHOR Alfonso Ortega]]|
|[[Østergaard, Jan|AUTHOR Jan Østergaard]]|
|[[Östling, Robert|AUTHOR Robert Östling]]|
|[[Ostrand, Rachel|AUTHOR Rachel Ostrand]]|
|[[Ottl, Sandra|AUTHOR Sandra Ottl]]|
|[[Oualil, Youssef|AUTHOR Youssef Oualil]]|
|[[Ozawa, Kenji|AUTHOR Kenji Ozawa]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Paats, Andrus|AUTHOR Andrus Paats]]|
|[[Pagmar, David|AUTHOR David Pagmar]]|
|[[Pahuja, Vardaan|AUTHOR Vardaan Pahuja]]|
|[[Paiva, Ana|AUTHOR Ana Paiva]]|
|[[Paletz, Susannah|AUTHOR Susannah Paletz]]|
|[[Pałka, Szymon|AUTHOR Szymon Pałka]]|
|[[Palmer, Frederik|AUTHOR Frederik Palmer]]|
|[[Palomäki, Kalle|AUTHOR Kalle Palomäki]]|
|[[Pampouchidou, Anastasia|AUTHOR Anastasia Pampouchidou]]|
|[[Pan, Jielin|AUTHOR Jielin Pan]]|
|[[Pan, Jing|AUTHOR Jing Pan]]|
|[[Pan, Xiaoman|AUTHOR Xiaoman Pan]]|
|[[Pan, Yilin|AUTHOR Yilin Pan]]|
|[[Panchapagesan, Sankaran|AUTHOR Sankaran Panchapagesan]]|
|[[Pandey, Prem C.|AUTHOR Prem C. Pandey]]|
|[[Pandia, Karthik|AUTHOR Karthik Pandia]]|
|[[Pang, Cheng|AUTHOR Cheng Pang]]|
|[[Pantic, Maja|AUTHOR Maja Pantic]]|
|[[Papadopoulos, Pavlos|AUTHOR Pavlos Papadopoulos]]|
|[[Parada, Carolina|AUTHOR Carolina Parada]]|
|[[Parada-Cabaleiro, Emilia|AUTHOR Emilia Parada-Cabaleiro]]|
|[[Parcheta, Zuzanna|AUTHOR Zuzanna Parcheta]]|
|[[Parcollet, Titouan|AUTHOR Titouan Parcollet]]|
|[[Park, Ji Ho|AUTHOR Ji Ho Park]]|
|[[Park, Se Rim|AUTHOR Se Rim Park]]|
|[[Park, Soo Jin|AUTHOR Soo Jin Park]]|
|[[Park, Young-cheol|AUTHOR Young-cheol Park]]|
|[[Parlato-Oliveira, Erika|AUTHOR Erika Parlato-Oliveira]]|
|[[Parthasarathi, Sree Hari Krishnan|AUTHOR Sree Hari Krishnan Parthasarathi]]|
|[[Parthasarathy, Sarangarajan|AUTHOR Sarangarajan Parthasarathy]]|
|[[Parthasarathy, Srinivas|AUTHOR Srinivas Parthasarathy]]|
|[[Pascual, Santiago|AUTHOR Santiago Pascual]]|
|[[Patel, Rupal|AUTHOR Rupal Patel]]|
|[[Patel, Tanvina B.|AUTHOR Tanvina B. Patel]]|
|[[Patil, Hemant A.|AUTHOR Hemant A. Patil]]|
|[[Patil, Kailash|AUTHOR Kailash Patil]]|
|[[Patil, Nimisha|AUTHOR Nimisha Patil]]|
|[[Patterson, Roy D.|AUTHOR Roy D. Patterson]]|
|[[Paulik, Matthias|AUTHOR Matthias Paulik]]|
|[[Peddinti, Vijayaditya|AUTHOR Vijayaditya Peddinti]]|
|[[Pederson, Eric|AUTHOR Eric Pederson]]|
|[[Pȩdzima̧ż, Tomasz|AUTHOR Tomasz Pȩdzima̧ż]]|
|[[Pelachaud, Catherine|AUTHOR Catherine Pelachaud]]|
|[[Peng, Gang|AUTHOR Gang Peng]]|
|[[Pennant, Luciana|AUTHOR Luciana Pennant]]|
|[[Peperkamp, Sharon|AUTHOR Sharon Peperkamp]]|
|[[Perdigão, Fernando|AUTHOR Fernando Perdigão]]|
|[[Peres, Daniel Oliveira|AUTHOR Daniel Oliveira Peres]]|
|[[Pérez, Juan Manuel|AUTHOR Juan Manuel Pérez]]|
|[[Pernkopf, Franz|AUTHOR Franz Pernkopf]]|
|[[Peters, Judith|AUTHOR Judith Peters]]|
|[[Peterson, Sean D.|AUTHOR Sean D. Peterson]]|
|[[Pettorino, Massimo|AUTHOR Massimo Pettorino]]|
|[[Petukhova, Volha|AUTHOR Volha Petukhova]]|
|[[Pétursson, Matthías|AUTHOR Matthías Pétursson]]|
|[[Pfeifenberger, Lukas|AUTHOR Lukas Pfeifenberger]]|
|[[Phan, Huy|AUTHOR Huy Phan]]|
|[[Piccaluga, Myriam|AUTHOR Myriam Piccaluga]]|
|[[Piccinini, Page|AUTHOR Page Piccinini]]|
|[[Picheny, Michael|AUTHOR Michael Picheny]]|
|[[Piitulainen, Jussi|AUTHOR Jussi Piitulainen]]|
|[[Pirhosseinloo, Shadi|AUTHOR Shadi Pirhosseinloo]]|
|[[Plante-Hébert, Julien|AUTHOR Julien Plante-Hébert]]|
|[[Platek, Ondrej|AUTHOR Ondrej Platek]]|
|[[Plchot, Oldřich|AUTHOR Oldřich Plchot]]|
|[[Plug, Leendert|AUTHOR Leendert Plug]]|
|[[Plumbley, Mark D.|AUTHOR Mark D. Plumbley]]|
|[[Pokorny, Florian B.|AUTHOR Florian B. Pokorny]]|
|[[Pollet, Vincent|AUTHOR Vincent Pollet]]|
|[[Półrola, Paweł|AUTHOR Paweł Półrola]]|
|[[Pompili, Anna|AUTHOR Anna Pompili]]|
|[[Pontil, Massimiliano|AUTHOR Massimiliano Pontil]]|
|[[Poorjam, Amir Hossein|AUTHOR Amir Hossein Poorjam]]|
|[[Potard, Blaise|AUTHOR Blaise Potard]]|
|[[Pourdamghani, Nima|AUTHOR Nima Pourdamghani]]|
|[[Povey, Daniel|AUTHOR Daniel Povey]]|
|[[Prabhavalkar, Rohit|AUTHOR Rohit Prabhavalkar]]|
|[[Pradhan, Gayadhar|AUTHOR Gayadhar Pradhan]]|
|[[Prado, Pavel|AUTHOR Pavel Prado]]|
|[[Prahallad, Kishore|AUTHOR Kishore Prahallad]]|
|[[Prakash, Jeena J.|AUTHOR Jeena J. Prakash]]|
|[[Prasad, RaviShankar|AUTHOR RaviShankar Prasad]]|
|[[Prasanna, S.R. Mahadeva|AUTHOR S.R. Mahadeva Prasanna]]|
|[[Prateek, K.L.|AUTHOR K.L. Prateek]]|
|[[Prenger, Ryan|AUTHOR Ryan Prenger]]|
|[[Prévot, Laurent|AUTHOR Laurent Prévot]]|
|[[Proctor, Michael|AUTHOR Michael Proctor]]|
|[[Proença, Jorge|AUTHOR Jorge Proença]]|
|[[Prudnikov, Alexey|AUTHOR Alexey Prudnikov]]|
|[[Psutka, Josef V.|AUTHOR Josef V. Psutka]]|
|[[Puga, Karin|AUTHOR Karin Puga]]|
|[[Pugachevskiy, Sergey|AUTHOR Sergey Pugachevskiy]]|
|[[Pugh, Robert A.|AUTHOR Robert A. Pugh]]|
|[[Pullela, Keerthi|AUTHOR Keerthi Pullela]]|
|[[Pundak, Golan|AUTHOR Golan Pundak]]|
|[[Pusateri, Ernest|AUTHOR Ernest Pusateri]]|
|[[Pushpavathi, M.|AUTHOR M. Pushpavathi]]|
|[[Pust, Michael|AUTHOR Michael Pust]]|
|[[Putrycz, Bartosz|AUTHOR Bartosz Putrycz]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Qi, Xiaoke|AUTHOR Xiaoke Qi]]|
|[[Qian, Kaizhi|AUTHOR Kaizhi Qian]]|
|[[Qian, Kun|AUTHOR Kun Qian]]|
|[[Qian, Qi|AUTHOR Qi Qian]]|
|[[Qian, Yanmin|AUTHOR Yanmin Qian]]|
|[[Qian, Yao|AUTHOR Yao Qian]]|
|[[Quatieri, Thomas F.|AUTHOR Thomas F. Quatieri]]|
|[[Quinn, John|AUTHOR John Quinn]]|
|[[Quiroz, Sergio I.|AUTHOR Sergio I. Quiroz]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Rábai, Krisztina|AUTHOR Krisztina Rábai]]|
|[[Ragni, A.|AUTHOR A. Ragni]]|
|[[Rahimi, Zahra|AUTHOR Zahra Rahimi]]|
|[[Rahman, Md. Hafizur|AUTHOR Md. Hafizur Rahman]]|
|[[Raitio, Tuomo|AUTHOR Tuomo Raitio]]|
|[[Raj, Bhiksha|AUTHOR Bhiksha Raj]]|
|[[Raju, Manoj|AUTHOR Manoj Raju]]|
|[[Rallabandi, SaiKrishna|AUTHOR SaiKrishna Rallabandi]]|
|[[Ramabhadran, Bhuvana|AUTHOR Bhuvana Ramabhadran]]|
|[[Ramakrishnan, A.G.|AUTHOR A.G. Ramakrishnan]]|
|[[Ramanarayanan, Vikram|AUTHOR Vikram Ramanarayanan]]|
|[[Ramírez López, Ana|AUTHOR Ana Ramírez López]]|
|[[Ramos, Miguel Varela|AUTHOR Miguel Varela Ramos]]|
|[[Ranjan, Shivesh|AUTHOR Shivesh Ranjan]]|
|[[Rantula, Olli|AUTHOR Olli Rantula]]|
|[[Rao, Kanishka|AUTHOR Kanishka Rao]]|
|[[Rao, K. Sreenivasa|AUTHOR K. Sreenivasa Rao]]|
|[[Rao, Wei|AUTHOR Wei Rao]]|
|[[Rao M.V., Achuth|AUTHOR Achuth Rao M.V.]]|
|[[Raposo de Medeiros, Beatriz|AUTHOR Beatriz Raposo de Medeiros]]|
|[[Räsänen, Okko|AUTHOR Okko Räsänen]]|
|[[Rasipuram, Ramya|AUTHOR Ramya Rasipuram]]|
|[[Rastrow, Ariya|AUTHOR Ariya Rastrow]]|
|[[Ratajczak, Martin|AUTHOR Martin Ratajczak]]|
|[[Ratnagiri, Madhavi|AUTHOR Madhavi Ratnagiri]]|
|[[Rautara, Sarita|AUTHOR Sarita Rautara]]|
|[[Ravanelli, Mirco|AUTHOR Mirco Ravanelli]]|
|[[Raveh, Eran|AUTHOR Eran Raveh]]|
|[[Raykar, Vikas|AUTHOR Vikas Raykar]]|
|[[Raymond, Christian|AUTHOR Christian Raymond]]|
|[[Rayner, Manny|AUTHOR Manny Rayner]]|
|[[Reetz, Henning|AUTHOR Henning Reetz]]|
|[[Rehr, Robert|AUTHOR Robert Rehr]]|
|[[Reidy, Patrick F.|AUTHOR Patrick F. Reidy]]|
|[[Reinhold, Isabella|AUTHOR Isabella Reinhold]]|
|[[Reiss, Attila|AUTHOR Attila Reiss]]|
|[[Remes, Ulpu|AUTHOR Ulpu Remes]]|
|[[Renals, Steve|AUTHOR Steve Renals]]|
|[[Rendel, Asaf|AUTHOR Asaf Rendel]]|
|[[Renner, Lena F.|AUTHOR Lena F. Renner]]|
|[[Rennie, Steven J.|AUTHOR Steven J. Rennie]]|
|[[Reuber, Markus|AUTHOR Markus Reuber]]|
|[[Reverdy, Justine|AUTHOR Justine Reverdy]]|
|[[Reynolds, Douglas|AUTHOR Douglas Reynolds]]|
|[[Rialland, Annie|AUTHOR Annie Rialland]]|
|[[Ribeiro, Antonio Celso|AUTHOR Antonio Celso Ribeiro]]|
|[[Ribeiro, M. Sam|AUTHOR M. Sam Ribeiro]]|
|[[Ricard, Joseph|AUTHOR Joseph Ricard]]|
|[[Riccardi, Giuseppe|AUTHOR Giuseppe Riccardi]]|
|[[Richardson, Brigitte|AUTHOR Brigitte Richardson]]|
|[[Richardson, Fred|AUTHOR Fred Richardson]]|
|[[Ridouane, Rachid|AUTHOR Rachid Ridouane]]|
|[[Riou, Matthieu|AUTHOR Matthieu Riou]]|
|[[Rocha, Bruno|AUTHOR Bruno Rocha]]|
|[[Rodehorst, Mike|AUTHOR Mike Rodehorst]]|
|[[Roebel, Axel|AUTHOR Axel Roebel]]|
|[[Rognoni, Luca|AUTHOR Luca Rognoni]]|
|[[Rohdin, Johan|AUTHOR Johan Rohdin]]|
|[[Rojas-Barahona, Lina|AUTHOR Lina Rojas-Barahona]]|
|[[Romanenko, Aleksei|AUTHOR Aleksei Romanenko]]|
|[[Romøren, Anna Sara H.|AUTHOR Anna Sara H. Romøren]]|
|[[Ronanki, Srikanth|AUTHOR Srikanth Ronanki]]|
|[[Roomi, Bergul|AUTHOR Bergul Roomi]]|
|[[Rosa-Zurera, Manuel|AUTHOR Manuel Rosa-Zurera]]|
|[[Rose, Richard|AUTHOR Richard Rose]]|
|[[Rosenberg, Andrew|AUTHOR Andrew Rosenberg]]|
|[[Rossato, Solange|AUTHOR Solange Rossato]]|
|[[Rosset, Sophie|AUTHOR Sophie Rosset]]|
|[[Rosti, Antti-Veikko|AUTHOR Antti-Veikko Rosti]]|
|[[Rouat, Jean|AUTHOR Jean Rouat]]|
|[[Rouhe, Aku|AUTHOR Aku Rouhe]]|
|[[Roustan, Benjamin|AUTHOR Benjamin Roustan]]|
|[[Rouvier, Mickael|AUTHOR Mickael Rouvier]]|
|[[Roux, Justus|AUTHOR Justus Roux]]|
|[[Rozen, Piotr|AUTHOR Piotr Rozen]]|
|[[Rozenberg, Shai|AUTHOR Shai Rozenberg]]|
|[[Ruede, Robin|AUTHOR Robin Ruede]]|
|[[Ruhs, Mirko|AUTHOR Mirko Ruhs]]|
|[[Ruiz, Nicholas|AUTHOR Nicholas Ruiz]]|
|[[Russell, Martin|AUTHOR Martin Russell]]|
|[[Russell, Scott|AUTHOR Scott Russell]]|
|[[Rusz, Jan|AUTHOR Jan Rusz]]|
|[[Růžička, Evžen|AUTHOR Evžen Růžička]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[S., Irfan|AUTHOR Irfan S.]]|
|[[Sadamitsu, Kugatsu|AUTHOR Kugatsu Sadamitsu]]|
|[[Sadeghian, Roozbeh|AUTHOR Roozbeh Sadeghian]]|
|[[Sadjadi, Seyed Omid|AUTHOR Seyed Omid Sadjadi]]|
|[[Saeb, Armin|AUTHOR Armin Saeb]]|
|[[Sagha, Hesam|AUTHOR Hesam Sagha]]|
|[[Sagisaka, Yoshinori|AUTHOR Yoshinori Sagisaka]]|
|[[Sahidullah, Md.|AUTHOR Md. Sahidullah]]|
|[[Sahkai, Heete|AUTHOR Heete Sahkai]]|
|[[Sahu, Saurabh|AUTHOR Saurabh Sahu]]|
|[[Sailor, Hardik B.|AUTHOR Hardik B. Sailor]]|
|[[Sainath, Tara N.|AUTHOR Tara N. Sainath]]|
|[[Saito, Daisuke|AUTHOR Daisuke Saito]]|
|[[Saito, Yuki|AUTHOR Yuki Saito]]|
|[[Sak, Haşim|AUTHOR Haşim Sak]]|
|[[Sakai, Shinsuke|AUTHOR Shinsuke Sakai]]|
|[[Sakakibara, Ken-Ichi|AUTHOR Ken-Ichi Sakakibara]]|
|[[Sakti, Sakriani|AUTHOR Sakriani Sakti]]|
|[[Salim, Fahim A.|AUTHOR Fahim A. Salim]]|
|[[Salimbajevs, Askars|AUTHOR Askars Salimbajevs]]|
|[[Salvi, Giampiero|AUTHOR Giampiero Salvi]]|
|[[Samaddar, A.B.|AUTHOR A.B. Samaddar]]|
|[[Samarakoon, Lahiru|AUTHOR Lahiru Samarakoon]]|
|[[Sameti, Hossein|AUTHOR Hossein Sameti]]|
|[[Samui, Suman|AUTHOR Suman Samui]]|
|[[Sanabria, Ramon|AUTHOR Ramon Sanabria]]|
|[[Sánchez, Ariadna|AUTHOR Ariadna Sánchez]]|
|[[Sanchez, Jon|AUTHOR Jon Sanchez]]|
|[[Sánchez-Martín, Patricia|AUTHOR Patricia Sánchez-Martín]]|
|[[Sandhan, Tushar|AUTHOR Tushar Sandhan]]|
|[[Sandsten, Maria|AUTHOR Maria Sandsten]]|
|[[Sangwan, Abhijeet|AUTHOR Abhijeet Sangwan]]|
|[[San Segundo, Eugenia|AUTHOR Eugenia San Segundo]]|
|[[Saon, George|AUTHOR George Saon]]|
|[[Saraclar, Murat|AUTHOR Murat Saraclar]]|
|[[Sarasola, Xabier|AUTHOR Xabier Sarasola]]|
|[[Sarkar, Achintya Kr.|AUTHOR Achintya Kr. Sarkar]]|
|[[Sarmah, Priyankoo|AUTHOR Priyankoo Sarmah]]|
|[[Saruwatari, Hiroshi|AUTHOR Hiroshi Saruwatari]]|
|[[Saryazdi, Raheleh|AUTHOR Raheleh Saryazdi]]|
|[[Sato, Masaaki|AUTHOR Masaaki Sato]]|
|[[Satt, Aharon|AUTHOR Aharon Satt]]|
|[[Saurous, Rif A.|AUTHOR Rif A. Saurous]]|
|[[Sawada, Naoki|AUTHOR Naoki Sawada]]|
|[[Saz Torralba, Oscar|AUTHOR Oscar Saz Torralba]]|
|[[S.B., Sunil Kumar|AUTHOR Sunil Kumar S.B.]]|
|[[Scarborough, Rebecca|AUTHOR Rebecca Scarborough]]|
|[[Schaffer, J. David|AUTHOR J. David Schaffer]]|
|[[Schatz, Thomas|AUTHOR Thomas Schatz]]|
|[[Scherer, Stefan|AUTHOR Stefan Scherer]]|
|[[Scheutz, Hannes|AUTHOR Hannes Scheutz]]|
|[[Schieder, Sebastian|AUTHOR Sebastian Schieder]]|
|[[Schiller, Dominik|AUTHOR Dominik Schiller]]|
|[[Schlangen, David|AUTHOR David Schlangen]]|
|[[Schlüter, Ralf|AUTHOR Ralf Schlüter]]|
|[[Schmidhuber, Jürgen|AUTHOR Jürgen Schmidhuber]]|
|[[Schmidt, Christoph|AUTHOR Christoph Schmidt]]|
|[[Schmidt, Gerhard|AUTHOR Gerhard Schmidt]]|
|[[Schmitt, Maximilian|AUTHOR Maximilian Schmitt]]|
|[[Schneider, Gerold|AUTHOR Gerold Schneider]]|
|[[Schnieder, Sebastian|AUTHOR Sebastian Schnieder]]|
|[[Schoffelen, Jan-Mathijs|AUTHOR Jan-Mathijs Schoffelen]]|
|[[Schoormann, Heike|AUTHOR Heike Schoormann]]|
|[[Schötz, Susanne|AUTHOR Susanne Schötz]]|
|[[Schröder, Anne|AUTHOR Anne Schröder]]|
|[[Schuller, Björn|AUTHOR Björn Schuller]]|
|[[Schultz, Tanja|AUTHOR Tanja Schultz]]|
|[[Schwarz, Iris-Corinna|AUTHOR Iris-Corinna Schwarz]]|
|[[Schweitzer, Antje|AUTHOR Antje Schweitzer]]|
|[[Schweitzer, Katrin|AUTHOR Katrin Schweitzer]]|
|[[Seelamantula, Chandra Sekhar|AUTHOR Chandra Sekhar Seelamantula]]|
|[[Seeram, Tejaswi|AUTHOR Tejaswi Seeram]]|
|[[Segura, Carlos|AUTHOR Carlos Segura]]|
|[[Seiderer, Andreas|AUTHOR Andreas Seiderer]]|
|[[Seidl, Amanda|AUTHOR Amanda Seidl]]|
|[[Selamtzis, Andreas|AUTHOR Andreas Selamtzis]]|
|[[Sell, Gregory|AUTHOR Gregory Sell]]|
|[[Seltzer, Michael L.|AUTHOR Michael L. Seltzer]]|
|[[Seo, Jeongil|AUTHOR Jeongil Seo]]|
|[[Sercu, Tom|AUTHOR Tom Sercu]]|
|[[Serigos, Jacqueline|AUTHOR Jacqueline Serigos]]|
|[[Serrà, Joan|AUTHOR Joan Serrà]]|
|[[Serrano, Luis|AUTHOR Luis Serrano]]|
|[[Serrurier, Antoine|AUTHOR Antoine Serrurier]]|
|[[Seshadri, Shreyas|AUTHOR Shreyas Seshadri]]|
|[[Sethu, Vidhyasaharan|AUTHOR Vidhyasaharan Sethu]]|
|[[Sethy, Abhinav|AUTHOR Abhinav Sethy]]|
|[[Setter, Jane|AUTHOR Jane Setter]]|
|[[Settle, Shane|AUTHOR Shane Settle]]|
|[[Sezgin, Metin|AUTHOR Metin Sezgin]]|
|[[Shafran, Izhak|AUTHOR Izhak Shafran]]|
|[[Shahnawazuddin, S.|AUTHOR S. Shahnawazuddin]]|
|[[Shakhnarovich, Gregory|AUTHOR Gregory Shakhnarovich]]|
|[[Shanmugam, Aswin|AUTHOR Aswin Shanmugam]]|
|[[Shannon, Matt|AUTHOR Matt Shannon]]|
|[[Sharma, Bidisha|AUTHOR Bidisha Sharma]]|
|[[Sharma, Jitendra|AUTHOR Jitendra Sharma]]|
|[[Sharma, Shubham|AUTHOR Shubham Sharma]]|
|[[Shaw, Francesca|AUTHOR Francesca Shaw]]|
|[[Shaw, Jason A.|AUTHOR Jason A. Shaw]]|
|[[Shchemelinin, Vadim|AUTHOR Vadim Shchemelinin]]|
|[[Shechtman, Slava|AUTHOR Slava Shechtman]]|
|[[Sheena, Yaniv|AUTHOR Yaniv Sheena]]|
|[[Shen, Chen|AUTHOR Chen Shen]]|
|[[Shen, Peng|AUTHOR Peng Shen]]|
|[[Shen, Xiaoyu|AUTHOR Xiaoyu Shen]]|
|[[Shi, Ying|AUTHOR Ying Shi]]|
|[[Shiga, Yoshinori|AUTHOR Yoshinori Shiga]]|
|[[Shih, Chin-Hong|AUTHOR Chin-Hong Shih]]|
|[[Shimada, Kazuki|AUTHOR Kazuki Shimada]]|
|[[Shinozaki, Takahiro|AUTHOR Takahiro Shinozaki]]|
|[[Shiozawa, Fumiya|AUTHOR Fumiya Shiozawa]]|
|[[Shirley, Ben|AUTHOR Ben Shirley]]|
|[[Shokouhi, Navid|AUTHOR Navid Shokouhi]]|
|[[Shon, Suwon|AUTHOR Suwon Shon]]|
|[[Shosted, Ryan|AUTHOR Ryan Shosted]]|
|[[Shoul, Karim|AUTHOR Karim Shoul]]|
|[[Shriberg, Elizabeth E.|AUTHOR Elizabeth E. Shriberg]]|
|[[Shyu, Frank|AUTHOR Frank Shyu]]|
|[[Sidorov, Maxim|AUTHOR Maxim Sidorov]]|
|[[Signorello, Rosario|AUTHOR Rosario Signorello]]|
|[[Silen, Hanna|AUTHOR Hanna Silen]]|
|[[Silnova, Anna|AUTHOR Anna Silnova]]|
|[[Silva, Samuel|AUTHOR Samuel Silva]]|
|[[Silvera-Tawil, David|AUTHOR David Silvera-Tawil]]|
|[[Sim, Khe Chai|AUTHOR Khe Chai Sim]]|
|[[Simantiraki, Olympia|AUTHOR Olympia Simantiraki]]|
|[[Simko, Gabor|AUTHOR Gabor Simko]]|
|[[Šimko, Juraj|AUTHOR Juraj Šimko]]|
|[[Simões, Ant^onio R.M.|AUTHOR Ant^onio R.M. Simões]]|
|[[Simon, Anne Catherine|AUTHOR Anne Catherine Simon]]|
|[[Simonnet, Edwin|AUTHOR Edwin Simonnet]]|
|[[Simpson, Adrian P.|AUTHOR Adrian P. Simpson]]|
|[[Sinclair, Mark|AUTHOR Mark Sinclair]]|
|[[Singer, Elliot|AUTHOR Elliot Singer]]|
|[[Singh, Mittul|AUTHOR Mittul Singh]]|
|[[Sinha, Ashok Kumar|AUTHOR Ashok Kumar Sinha]]|
|[[Sinha, Rohit|AUTHOR Rohit Sinha]]|
|[[Sini, Aghilas|AUTHOR Aghilas Sini]]|
|[[Siniscalchi, Sabato Marco|AUTHOR Sabato Marco Siniscalchi]]|
|[[Siohan, Olivier|AUTHOR Olivier Siohan]]|
|[[Sitaram, Sunayana|AUTHOR Sunayana Sitaram]]|
|[[Siu, Man-Hung|AUTHOR Man-Hung Siu]]|
|[[Sivaraman, Ganesh|AUTHOR Ganesh Sivaraman]]|
|[[Sjons, Johan|AUTHOR Johan Sjons]]|
|[[Skarnitzl, Radek|AUTHOR Radek Skarnitzl]]|
|[[Skerry-Ryan, R.J.|AUTHOR R.J. Skerry-Ryan]]|
|[[Skordilis, Zisis|AUTHOR Zisis Skordilis]]|
|[[Skrelin, Pavel|AUTHOR Pavel Skrelin]]|
|[[Sloetjes, Han|AUTHOR Han Sloetjes]]|
|[[Smaïli, Kamel|AUTHOR Kamel Smaïli]]|
|[[Šmídl, Luboš|AUTHOR Luboš Šmídl]]|
|[[Smit, Peter|AUTHOR Peter Smit]]|
|[[Smith, Daniel|AUTHOR Daniel Smith]]|
|[[Smith, Noah A.|AUTHOR Noah A. Smith]]|
|[[Smith, Rachel|AUTHOR Rachel Smith]]|
|[[Smolander, Anna-Riikka|AUTHOR Anna-Riikka Smolander]]|
|[[Sneddon, Alex|AUTHOR Alex Sneddon]]|
|[[Snyder, David|AUTHOR David Snyder]]|
|[[So, Clifford|AUTHOR Clifford So]]|
|[[Socolof, Michaela|AUTHOR Michaela Socolof]]|
|[[Soderstrom, Melanie|AUTHOR Melanie Soderstrom]]|
|[[Sohel, Ferdous|AUTHOR Ferdous Sohel]]|
|[[Solera-Ureña, Rubén|AUTHOR Rubén Solera-Ureña]]|
|[[Solewicz, Yosef A.|AUTHOR Yosef A. Solewicz]]|
|[[Soltau, Hagen|AUTHOR Hagen Soltau]]|
|[[Somandepalli, Krishna|AUTHOR Krishna Somandepalli]]|
|[[Sonderegger, Morgan|AUTHOR Morgan Sonderegger]]|
|[[Song, Inchul|AUTHOR Inchul Song]]|
|[[Song, Yan|AUTHOR Yan Song]]|
|[[Song, Zhanmei|AUTHOR Zhanmei Song]]|
|[[Soni, Meet H.|AUTHOR Meet H. Soni]]|
|[[Sonowal, Sukanya|AUTHOR Sukanya Sonowal]]|
|[[Soong, Frank K.|AUTHOR Frank K. Soong]]|
|[[Sorensen, Tanner|AUTHOR Tanner Sorensen]]|
|[[Sorin, Alexander|AUTHOR Alexander Sorin]]|
|[[Soto, Victor|AUTHOR Victor Soto]]|
|[[Spálenka, K.|AUTHOR K. Spálenka]]|
|[[Spechbach, Hervé|AUTHOR Hervé Spechbach]]|
|[[Specia, Lucia|AUTHOR Lucia Specia]]|
|[[Sperber, Matthias|AUTHOR Matthias Sperber]]|
|[[Spille, Constantin|AUTHOR Constantin Spille]]|
|[[Sproat, Richard|AUTHOR Richard Sproat]]|
|[[Sreeram, Victor|AUTHOR Victor Sreeram]]|
|[[Sridharan, Sridha|AUTHOR Sridha Sridharan]]|
|[[Srinivasamurthy, Ajay|AUTHOR Ajay Srinivasamurthy]]|
|[[Sriskandaraja, Kaavya|AUTHOR Kaavya Sriskandaraja]]|
|[[Stafylakis, Themos|AUTHOR Themos Stafylakis]]|
|[[Stanton, Daisy|AUTHOR Daisy Stanton]]|
|[[Starkhammar, Josefin|AUTHOR Josefin Starkhammar]]|
|[[Stasak, Brian|AUTHOR Brian Stasak]]|
|[[Stehwien, Sabrina|AUTHOR Sabrina Stehwien]]|
|[[Steidl, Stefan|AUTHOR Stefan Steidl]]|
|[[Steiner, Ingmar|AUTHOR Ingmar Steiner]]|
|[[Steiner, Peter|AUTHOR Peter Steiner]]|
|[[Stemmer, Georg|AUTHOR Georg Stemmer]]|
|[[Stengel-Eskin, Elias|AUTHOR Elias Stengel-Eskin]]|
|[[Stepanov, Evgeny A.|AUTHOR Evgeny A. Stepanov]]|
|[[Stern, Richard M.|AUTHOR Richard M. Stern]]|
|[[Stolcke, Andreas|AUTHOR Andreas Stolcke]]|
|[[Stone, Maureen|AUTHOR Maureen Stone]]|
|[[Stone, Simon|AUTHOR Simon Stone]]|
|[[Strasly, Irene|AUTHOR Irene Strasly]]|
|[[Strassel, Stephanie|AUTHOR Stephanie Strassel]]|
|[[Strik, Helmer|AUTHOR Helmer Strik]]|
|[[Strom, Nikko|AUTHOR Nikko Strom]]|
|[[Strömbergsson, Sofia|AUTHOR Sofia Strömbergsson]]|
|[[Stüker, Sebastian|AUTHOR Sebastian Stüker]]|
|[[Sturim, Douglas|AUTHOR Douglas Sturim]]|
|[[Šturm, Pavel|AUTHOR Pavel Šturm]]|
|[[Stylianou, Yannis|AUTHOR Yannis Stylianou]]|
|[[Su, Pei-Hao|AUTHOR Pei-Hao Su]]|
|[[Suendermann-Oeft, David|AUTHOR David Suendermann-Oeft]]|
|[[Sugai, Kosuke|AUTHOR Kosuke Sugai]]|
|[[Sun, Lei|AUTHOR Lei Sun]]|
|[[Sun, Lifa|AUTHOR Lifa Sun]]|
|[[Sun, Ming|AUTHOR Ming Sun]]|
|[[Sun, Sining|AUTHOR Sining Sun]]|
|[[Sun, Wen|AUTHOR Wen Sun]]|
|[[Suni, Antti|AUTHOR Antti Suni]]|
|[[Sur, Mriganka|AUTHOR Mriganka Sur]]|
|[[Suthokumar, Gajan|AUTHOR Gajan Suthokumar]]|
|[[Sutton, Brad|AUTHOR Brad Sutton]]|
|[[Suzuki, Kyori|AUTHOR Kyori Suzuki]]|
|[[Suzuki, Masayuki|AUTHOR Masayuki Suzuki]]|
|[[Švec, Jan|AUTHOR Jan Švec]]|
|[[Svensson Lundmark, Malin|AUTHOR Malin Svensson Lundmark]]|
|[[Swart, Albert|AUTHOR Albert Swart]]|
|[[Swerts, Marc|AUTHOR Marc Swerts]]|
|[[Szabó, Lili|AUTHOR Lili Szabó]]|
|[[Szaszák, György|AUTHOR György Szaszák]]|
|[[Székely, Éva|AUTHOR Éva Székely]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Tabain, Marija|AUTHOR Marija Tabain]]|
|[[Tachioka, Yuuki|AUTHOR Yuuki Tachioka]]|
|[[Tajima, Keiichi|AUTHOR Keiichi Tajima]]|
|[[Tak, Rishabh|AUTHOR Rishabh Tak]]|
|[[Takaki, Shinji|AUTHOR Shinji Takaki]]|
|[[Takamichi, Shinnosuke|AUTHOR Shinnosuke Takamichi]]|
|[[Takanashi, Katsuya|AUTHOR Katsuya Takanashi]]|
|[[Takeda, Kazuya|AUTHOR Kazuya Takeda]]|
|[[Takeda, Ryu|AUTHOR Ryu Takeda]]|
|[[Takemoto, Hironori|AUTHOR Hironori Takemoto]]|
|[[Takiguchi, Izumi|AUTHOR Izumi Takiguchi]]|
|[[Takiguchi, Tetsuya|AUTHOR Tetsuya Takiguchi]]|
|[[Takimoto, Eri|AUTHOR Eri Takimoto]]|
|[[Tamamori, Akira|AUTHOR Akira Tamamori]]|
|[[Tan, Zheng-Hua|AUTHOR Zheng-Hua Tan]]|
|[[Tan, Zhili|AUTHOR Zhili Tan]]|
|[[Tanaka, Hiroki|AUTHOR Hiroki Tanaka]]|
|[[Tanaka, Kazuyo|AUTHOR Kazuyo Tanaka]]|
|[[Tanaka, Kei|AUTHOR Kei Tanaka]]|
|[[Tanaka, Kou|AUTHOR Kou Tanaka]]|
|[[Tang, Hao|AUTHOR Hao Tang]]|
|[[Tang, Keyi|AUTHOR Keyi Tang]]|
|[[Tang, Qingming|AUTHOR Qingming Tang]]|
|[[Tang, Yan|AUTHOR Yan Tang]]|
|[[Tang, Zhiyuan|AUTHOR Zhiyuan Tang]]|
|[[Tao, Fei|AUTHOR Fei Tao]]|
|[[Tao, Jianhua|AUTHOR Jianhua Tao]]|
|[[Tarján, Balázs|AUTHOR Balázs Tarján]]|
|[[Tasaki, Hiroto|AUTHOR Hiroto Tasaki]]|
|[[Tatman, Rachael|AUTHOR Rachael Tatman]]|
|[[Tavarez, David|AUTHOR David Tavarez]]|
|[[Teixeira, António|AUTHOR António Teixeira]]|
|[[ten Bosch, L.|AUTHOR L. ten Bosch]]|
|[[Teraoka, Takehiro|AUTHOR Takehiro Teraoka]]|
|[[te Rietmolen, Noémie|AUTHOR Noémie te Rietmolen]]|
|[[Ternström, Sten|AUTHOR Sten Ternström]]|
|[[Thangthai, Kwanchiva|AUTHOR Kwanchiva Thangthai]]|
|[[Thomas, Samuel|AUTHOR Samuel Thomas]]|
|[[Tidelius, Henrik|AUTHOR Henrik Tidelius]]|
|[[Tihelka, Daniel|AUTHOR Daniel Tihelka]]|
|[[Tiwari, Gautam|AUTHOR Gautam Tiwari]]|
|[[Tjaden, Kris|AUTHOR Kris Tjaden]]|
|[[Tjalve, Michael|AUTHOR Michael Tjalve]]|
|[[Tkachman, Oksana|AUTHOR Oksana Tkachman]]|
|[[Toda, Tomoki|AUTHOR Tomoki Toda]]|
|[[Todisco, Massimiliano|AUTHOR Massimiliano Todisco]]|
|[[Töger, Johannes|AUTHOR Johannes Töger]]|
|[[Togneri, Roberto|AUTHOR Roberto Togneri]]|
|[[Tokuda, Keiichi|AUTHOR Keiichi Tokuda]]|
|[[Tomashenko, Natalia|AUTHOR Natalia Tomashenko]]|
|[[Tong, Audrey|AUTHOR Audrey Tong]]|
|[[Tong, Rong|AUTHOR Rong Tong]]|
|[[Tong, Sibo|AUTHOR Sibo Tong]]|
|[[Toribio, Almeida Jacqueline|AUTHOR Almeida Jacqueline Toribio]]|
|[[Torres-Carrasquillo, Pedro A.|AUTHOR Pedro A. Torres-Carrasquillo]]|
|[[Toshniwal, Shubham|AUTHOR Shubham Toshniwal]]|
|[[Tóth, László|AUTHOR László Tóth]]|
|[[Toutios, Asterios|AUTHOR Asterios Toutios]]|
|[[Townsend, Greg|AUTHOR Greg Townsend]]|
|[[Toyama, Shohei|AUTHOR Shohei Toyama]]|
|[[Tran, Dung T.|AUTHOR Dung T. Tran]]|
|[[Tran, Huy Dat|AUTHOR Huy Dat Tran]]|
|[[Trancoso, Isabel|AUTHOR Isabel Trancoso]]|
|[[Travadi, Ruchir|AUTHOR Ruchir Travadi]]|
|[[Trigeorgis, George|AUTHOR George Trigeorgis]]|
|[[Trmal, Jan|AUTHOR Jan Trmal]]|
|[[Trnka, Marian|AUTHOR Marian Trnka]]|
|[[Tronstad, Tron V.|AUTHOR Tron V. Tronstad]]|
|[[Trouvain, Jürgen|AUTHOR Jürgen Trouvain]]|
|[[Truong, Khiet P.|AUTHOR Khiet P. Truong]]|
|[[Tsao, Yu|AUTHOR Yu Tsao]]|
|[[Tschiatschek, Sebastian|AUTHOR Sebastian Tschiatschek]]|
|[[Tschöpe, Constanze|AUTHOR Constanze Tschöpe]]|
|[[Tseng, Shao-Yen|AUTHOR Shao-Yen Tseng]]|
|[[Tseng, Xian-Hong|AUTHOR Xian-Hong Tseng]]|
|[[Tsiaras, Vassilios|AUTHOR Vassilios Tsiaras]]|
|[[Tsiartas, Andreas|AUTHOR Andreas Tsiartas]]|
|[[Tsiknakis, Manolis|AUTHOR Manolis Tsiknakis]]|
|[[Tsourakis, Nikos|AUTHOR Nikos Tsourakis]]|
|[[Tsuchiya, Masatoshi|AUTHOR Masatoshi Tsuchiya]]|
|[[Tsuji, Sho|AUTHOR Sho Tsuji]]|
|[[Tsujimura, Shoko|AUTHOR Shoko Tsujimura]]|
|[[Tsunoo, Emiru|AUTHOR Emiru Tsunoo]]|
|[[Tu, Jung-Yueh|AUTHOR Jung-Yueh Tu]]|
|[[Tu, Ming|AUTHOR Ming Tu]]|
|[[Tu, Yan-Hui|AUTHOR Yan-Hui Tu]]|
|[[Tür, Gokhan|AUTHOR Gokhan Tür]]|
|[[Turco, Giuseppina|AUTHOR Giuseppina Turco]]|
|[[Türker, Bekir Berker|AUTHOR Bekir Berker Türker]]|
|[[Turnbull, Rory|AUTHOR Rory Turnbull]]|
|[[Tüske, Zoltán|AUTHOR Zoltán Tüske]]|
|[[Tykalová, Tereza|AUTHOR Tereza Tykalová]]|
|[[Tzimiropoulos, Georgios|AUTHOR Georgios Tzimiropoulos]]|
|[[Tzirakis, Panagiotis|AUTHOR Panagiotis Tzirakis]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Uchida, Hidetsugu|AUTHOR Hidetsugu Uchida]]|
|[[Uenohara, Shingo|AUTHOR Shingo Uenohara]]|
|[[Uezu, Yasufumi|AUTHOR Yasufumi Uezu]]|
|[[Ultes, Stefan|AUTHOR Stefan Ultes]]|
|[[Umbert, Martí|AUTHOR Martí Umbert]]|
|[[Umesh, S.|AUTHOR S. Umesh]]|
|[[Unoki, Masashi|AUTHOR Masashi Unoki]]|
|[[Uramoto, Takanobu|AUTHOR Takanobu Uramoto]]|
|[[Uther, Maria|AUTHOR Maria Uther]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Vachhani, Bhavik|AUTHOR Bhavik Vachhani]]|
|[[Vainio, Martti|AUTHOR Martti Vainio]]|
|[[Vair, Claudio|AUTHOR Claudio Vair]]|
|[[Vaizman, Yonatan|AUTHOR Yonatan Vaizman]]|
|[[Valentini Botinhao, Cassia|AUTHOR Cassia Valentini Botinhao]]|
|[[VanDam, Mark|AUTHOR Mark VanDam]]|
|[[van den Heuvel, Henk|AUTHOR Henk van den Heuvel]]|
|[[van der Vloed, David|AUTHOR David van der Vloed]]|
|[[van der Westhuizen, Ewald|AUTHOR Ewald van der Westhuizen]]|
|[[Van de Velde, Hans|AUTHOR Hans Van de Velde]]|
|[[van Esch, Daan|AUTHOR Daan van Esch]]|
|[[Van hamme, Hugo|AUTHOR Hugo Van hamme]]|
|[[van Heerden, Charl|AUTHOR Charl van Heerden]]|
|[[Van Leeuwen, David|AUTHOR David Van Leeuwen]]|
|[[van Maastricht, Lieke|AUTHOR Lieke van Maastricht]]|
|[[van Niekerk, Daniel|AUTHOR Daniel van Niekerk]]|
|[[van Santen, Jan|AUTHOR Jan van Santen]]|
|[[Variani, Ehsan|AUTHOR Ehsan Variani]]|
|[[Vásquez-Correa, J.C.|AUTHOR J.C. Vásquez-Correa]]|
|[[Vasudevan, Arvind|AUTHOR Arvind Vasudevan]]|
|[[Vattam, Swaroop|AUTHOR Swaroop Vattam]]|
|[[Vaz, Colin|AUTHOR Colin Vaz]]|
|[[Venneri, Annalena|AUTHOR Annalena Venneri]]|
|[[Verma, Sakshi|AUTHOR Sakshi Verma]]|
|[[Veselý, Karel|AUTHOR Karel Veselý]]|
|[[Vestman, Ville|AUTHOR Ville Vestman]]|
|[[Vetchinnikova, Svetlana|AUTHOR Svetlana Vetchinnikova]]|
|[[Vialatte, François-Beno^ıt|AUTHOR François-Beno^ıt Vialatte]]|
|[[Viggen, Erlend Magnus|AUTHOR Erlend Magnus Viggen]]|
|[[Vignesh, Rupak|AUTHOR Rupak Vignesh]]|
|[[Viitanen, Vertti|AUTHOR Vertti Viitanen]]|
|[[Vijayan, Karthika|AUTHOR Karthika Vijayan]]|
|[[Vikram, C.M.|AUTHOR C.M. Vikram]]|
|[[Villalba, Jesús|AUTHOR Jesús Villalba]]|
|[[Viñals, Ignacio|AUTHOR Ignacio Viñals]]|
|[[Virpioja, Sami|AUTHOR Sami Virpioja]]|
|[[Vít, Jakub|AUTHOR Jakub Vít]]|
|[[Vitaladevuni, Shiv|AUTHOR Shiv Vitaladevuni]]|
|[[Vlasenko, Bogdan|AUTHOR Bogdan Vlasenko]]|
|[[Vogel, Carl|AUTHOR Carl Vogel]]|
|[[Vogel, Irene|AUTHOR Irene Vogel]]|
|[[Voisin, Sylvie|AUTHOR Sylvie Voisin]]|
|[[Volín, Jan|AUTHOR Jan Volín]]|
|[[Voße, Jana|AUTHOR Jana Voße]]|
|[[Vu, Ngoc Thang|AUTHOR Ngoc Thang Vu]]|
|[[Vukotic, Vedran|AUTHOR Vedran Vukotic]]|
|[[Vuppala, Anil Kumar|AUTHOR Anil Kumar Vuppala]]|
|[[Vyas, Manan|AUTHOR Manan Vyas]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Wagner, Johannes|AUTHOR Johannes Wagner]]|
|[[Wagner, Michael|AUTHOR Michael Wagner]]|
|[[Wagner, Petra|AUTHOR Petra Wagner]]|
|[[Waibel, Alex|AUTHOR Alex Waibel]]|
|[[Walker, Kevin|AUTHOR Kevin Walker]]|
|[[Walker, Marilyn|AUTHOR Marilyn Walker]]|
|[[Walker, Traci|AUTHOR Traci Walker]]|
|[[Walsh, Michael|AUTHOR Michael Walsh]]|
|[[Walter, Oliver|AUTHOR Oliver Walter]]|
|[[Wan, Vincent|AUTHOR Vincent Wan]]|
|[[Wand, Michael|AUTHOR Michael Wand]]|
|[[Wang, Chengxia|AUTHOR Chengxia Wang]]|
|[[Wang, DeLiang|AUTHOR DeLiang Wang]]|
|[[Wang, Dong|AUTHOR Dong Wang]]|
|[[Wang, Dongmei|AUTHOR Dongmei Wang]]|
|[[Wang, Hsin-Min|AUTHOR Hsin-Min Wang]]|
|[[Wang, Jun|AUTHOR Jun Wang]]|
|[[Wang, Lan|AUTHOR Lan Wang]]|
|[[Wang, Lei|AUTHOR Lei Wang]]|
|[[Wang, Lixin|AUTHOR Lixin Wang]]|
|[[Wang, Qiongqiong|AUTHOR Qiongqiong Wang]]|
|[[Wang, Shi-yu|AUTHOR Shi-yu Wang]]|
|[[Wang, Shuai|AUTHOR Shuai Wang]]|
|[[Wang, Syu-Siang|AUTHOR Syu-Siang Wang]]|
|[[Wang, Tianzhou|AUTHOR Tianzhou Wang]]|
|[[Wang, Weiran|AUTHOR Weiran Wang]]|
|[[Wang, Wenwu|AUTHOR Wenwu Wang]]|
|[[Wang, Xi|AUTHOR Xi Wang]]|
|[[Wang, Xianliang|AUTHOR Xianliang Wang]]|
|[[Wang, Xianyun|AUTHOR Xianyun Wang]]|
|[[Wang, Xiao|AUTHOR Xiao Wang]]|
|[[Wang, Xihao|AUTHOR Xihao Wang]]|
|[[Wang, Xin|AUTHOR Xin Wang]]|
|[[Wang, Xinhao|AUTHOR Xinhao Wang]]|
|[[Wang, Xinyue|AUTHOR Xinyue Wang]]|
|[[Wang, Xuan|AUTHOR Xuan Wang]]|
|[[Wang, Y.|AUTHOR Y. Wang]]|
|[[Wang, Yannan|AUTHOR Yannan Wang]]|
|[[Wang, Yiming|AUTHOR Yiming Wang]]|
|[[Wang, Yu-Hsuan|AUTHOR Yu-Hsuan Wang]]|
|[[Wang, Yun|AUTHOR Yun Wang]]|
|[[Wang, Yuxuan|AUTHOR Yuxuan Wang]]|
|[[Wang, Zhibin|AUTHOR Zhibin Wang]]|
|[[Wankerl, Sebastian|AUTHOR Sebastian Wankerl]]|
|[[Wanner, Leo|AUTHOR Leo Wanner]]|
|[[Ward, Lauren|AUTHOR Lauren Ward]]|
|[[Ward, Nigel G.|AUTHOR Nigel G. Ward]]|
|[[Wardle, Margaret|AUTHOR Margaret Wardle]]|
|[[Warlaumont, Anne S.|AUTHOR Anne S. Warlaumont]]|
|[[Watanabe, Hayato|AUTHOR Hayato Watanabe]]|
|[[Watanabe, Hiroki|AUTHOR Hiroki Watanabe]]|
|[[Watanabe, Shinji|AUTHOR Shinji Watanabe]]|
|[[Watson, C.I.|AUTHOR C.I. Watson]]|
|[[Watt, Dominic|AUTHOR Dominic Watt]]|
|[[Watts, Oliver|AUTHOR Oliver Watts]]|
|[[Weber, Andrea|AUTHOR Andrea Weber]]|
|[[Weber, Philip|AUTHOR Philip Weber]]|
|[[Websdale, Danny|AUTHOR Danny Websdale]]|
|[[Weiner, Jochen|AUTHOR Jochen Weiner]]|
|[[Weintraub, Mitchel|AUTHOR Mitchel Weintraub]]|
|[[Weirich, Melanie|AUTHOR Melanie Weirich]]|
|[[Weiss, Benjamin|AUTHOR Benjamin Weiss]]|
|[[Weiss, Ron J.|AUTHOR Ron J. Weiss]]|
|[[Wen, Tsung-Hsien|AUTHOR Tsung-Hsien Wen]]|
|[[Wen, Zhengqi|AUTHOR Zhengqi Wen]]|
|[[Wendemuth, Andreas|AUTHOR Andreas Wendemuth]]|
|[[Wendler, Christoph|AUTHOR Christoph Wendler]]|
|[[Weninger, Felix|AUTHOR Felix Weninger]]|
|[[Werner, Tina|AUTHOR Tina Werner]]|
|[[Wester, Mirjam|AUTHOR Mirjam Wester]]|
|[[Wieling, Martijn|AUTHOR Martijn Wieling]]|
|[[Wiener, Seth|AUTHOR Seth Wiener]]|
|[[Wiesner, Matthew|AUTHOR Matthew Wiesner]]|
|[[Wijenayake, Chamith|AUTHOR Chamith Wijenayake]]|
|[[Williams, Ian|AUTHOR Ian Williams]]|
|[[Williams, Shanna|AUTHOR Shanna Williams]]|
|[[Williamson, Becci|AUTHOR Becci Williamson]]|
|[[Williamson, James R.|AUTHOR James R. Williamson]]|
|[[Wilson, Ian|AUTHOR Ian Wilson]]|
|[[Wilson, Kevin W.|AUTHOR Kevin W. Wilson]]|
|[[Winarsky, David|AUTHOR David Winarsky]]|
|[[Winata, Genta Indra|AUTHOR Genta Indra Winata]]|
|[[Winkler, Jana|AUTHOR Jana Winkler]]|
|[[Wirén, Mats|AUTHOR Mats Wirén]]|
|[[Wisler, Alan|AUTHOR Alan Wisler]]|
|[[Wisniewksi, Guillaume|AUTHOR Guillaume Wisniewksi]]|
|[[Witkowski, Marcin|AUTHOR Marcin Witkowski]]|
|[[Włodarczak, Marcin|AUTHOR Marcin Włodarczak]]|
|[[Wolf, Arthur|AUTHOR Arthur Wolf]]|
|[[Wolff, Matthias|AUTHOR Matthias Wolff]]|
|[[Wong, Janice Wing-Sze|AUTHOR Janice Wing-Sze Wong]]|
|[[Wong, Jeremy H.M.|AUTHOR Jeremy H.M. Wong]]|
|[[Woo, Jonghye|AUTHOR Jonghye Woo]]|
|[[Wood, Sean U.N.|AUTHOR Sean U.N. Wood]]|
|[[Wörtwein, Torsten|AUTHOR Torsten Wörtwein]]|
|[[Wright, Jonathan|AUTHOR Jonathan Wright]]|
|[[Wright, Richard A.|AUTHOR Richard A. Wright]]|
|[[Wu, Bo|AUTHOR Bo Wu]]|
|[[Wu, Chia-Lung|AUTHOR Chia-Lung Wu]]|
|[[Wu, Chunyang|AUTHOR Chunyang Wu]]|
|[[Wu, Dan|AUTHOR Dan Wu]]|
|[[Wu, Ji|AUTHOR Ji Wu]]|
|[[Wu, Jie|AUTHOR Jie Wu]]|
|[[Wu, Tsung-Chen|AUTHOR Tsung-Chen Wu]]|
|[[Wu, Yaru|AUTHOR Yaru Wu]]|
|[[Wu, Ya-Tse|AUTHOR Ya-Tse Wu]]|
|[[Wu, Yi-Chiao|AUTHOR Yi-Chiao Wu]]|
|[[Wu, Yonghui|AUTHOR Yonghui Wu]]|
|[[Wu, Zhiyong|AUTHOR Zhiyong Wu]]|
|[[Wu, Zhizheng|AUTHOR Zhizheng Wu]]|
|[[Wuth, Jorge|AUTHOR Jorge Wuth]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Xia, Xianjun|AUTHOR Xianjun Xia]]|
|[[Xiang, Bing|AUTHOR Bing Xiang]]|
|[[Xiang, Xu|AUTHOR Xu Xiang]]|
|[[Xiao, Xiong|AUTHOR Xiong Xiao]]|
|[[Xiao, Yanhong|AUTHOR Yanhong Xiao]]|
|[[Xiao, Ying|AUTHOR Ying Xiao]]|
|[[Xiao, Yujia|AUTHOR Yujia Xiao]]|
|[[Xie, Lei|AUTHOR Lei Xie]]|
|[[Xie, Xurong|AUTHOR Xurong Xie]]|
|[[Xie, Yanlu|AUTHOR Yanlu Xie]]|
|[[Xie, Zhifeng|AUTHOR Zhifeng Xie]]|
|[[Xu, Anqi|AUTHOR Anqi Xu]]|
|[[Xu, Bo|AUTHOR Bo Xu]]|
|[[Xu, Chenglin|AUTHOR Chenglin Xu]]|
|[[Xu, Hainan|AUTHOR Hainan Xu]]|
|[[Xu, Li|AUTHOR Li Xu]]|
|[[Xu, Mingxing|AUTHOR Mingxing Xu]]|
|[[Xu, Mingyu|AUTHOR Mingyu Xu]]|
|[[Xu, Ning|AUTHOR Ning Xu]]|
|[[Xu, Shuang|AUTHOR Shuang Xu]]|
|[[Xu, Xiangmin|AUTHOR Xiangmin Xu]]|
|[[Xu, Yi|AUTHOR Yi Xu]]|
|[[Xu, Yong|AUTHOR Yong Xu]]|
|[[Xu, Yong|AUTHOR Yong Xu]]|
|[[Xue, Jian|AUTHOR Jian Xue]]|
|[[Xue, Wei|AUTHOR Wei Xue]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Yadav, Shivani|AUTHOR Shivani Yadav]]|
|[[Yamagishi, Junichi|AUTHOR Junichi Yamagishi]]|
|[[Yamaguchi, Tetsutaro|AUTHOR Tetsutaro Yamaguchi]]|
|[[Yamamoto, Hitoshi|AUTHOR Hitoshi Yamamoto]]|
|[[Yamamoto, Katsuhiko|AUTHOR Katsuhiko Yamamoto]]|
|[[Yamamoto, Kazumasa|AUTHOR Kazumasa Yamamoto]]|
|[[Yamamoto, Kodai|AUTHOR Kodai Yamamoto]]|
|[[Yamauchi, Yutaka|AUTHOR Yutaka Yamauchi]]|
|[[Yan, Bi-Cheng|AUTHOR Bi-Cheng Yan]]|
|[[Yan, Yonghong|AUTHOR Yonghong Yan]]|
|[[Yang, Bing|AUTHOR Bing Yang]]|
|[[Yang, IL-ho|AUTHOR IL-ho Yang]]|
|[[Yang, Jing|AUTHOR Jing Yang]]|
|[[Yang, Jun|AUTHOR Jun Yang]]|
|[[Yang, Ming-Han|AUTHOR Ming-Han Yang]]|
|[[Yang, Xuesong|AUTHOR Xuesong Yang]]|
|[[Yang, Yang|AUTHOR Yang Yang]]|
|[[Yang, Yike|AUTHOR Yike Yang]]|
|[[Yang, Zongheng|AUTHOR Zongheng Yang]]|
|[[Yanushevskaya, Irena|AUTHOR Irena Yanushevskaya]]|
|[[Yegnanarayana, B.|AUTHOR B. Yegnanarayana]]|
|[[Yemez, Yücel|AUTHOR Yücel Yemez]]|
|[[Yeung, Gary|AUTHOR Gary Yeung]]|
|[[Yi, Hua|AUTHOR Hua Yi]]|
|[[Yi, Jiangyan|AUTHOR Jiangyan Yi]]|
|[[Yılmaz, Emre|AUTHOR Emre Yılmaz]]|
|[[Yin, Jiao|AUTHOR Jiao Yin]]|
|[[Yin, Ruiqing|AUTHOR Ruiqing Yin]]|
|[[Ying, Dongwen|AUTHOR Dongwen Ying]]|
|[[Ying, Jia|AUTHOR Jia Ying]]|
|[[Ylinen, Sari|AUTHOR Sari Ylinen]]|
|[[Yoma, Nestor Becerra|AUTHOR Nestor Becerra Yoma]]|
|[[Yoneyama, Kiyoko|AUTHOR Kiyoko Yoneyama]]|
|[[Yoon, Sung-hyun|AUTHOR Sung-hyun Yoon]]|
|[[Yoon, Su-Youn|AUTHOR Su-Youn Yoon]]|
|[[Yoshii, Kazuyoshi|AUTHOR Kazuyoshi Yoshii]]|
|[[Yoshimura, Takenori|AUTHOR Takenori Yoshimura]]|
|[[Young, Steve|AUTHOR Steve Young]]|
|[[Yu, Chengzhu|AUTHOR Chengzhu Yu]]|
|[[Yu, Dong|AUTHOR Dong Yu]]|
|[[Yu, Ha-jin|AUTHOR Ha-jin Yu]]|
|[[Yu, Hong|AUTHOR Hong Yu]]|
|[[Yu, Kai|AUTHOR Kai Yu]]|
|[[Yu, Mingzhi|AUTHOR Mingzhi Yu]]|
|[[Yu, Seunghak|AUTHOR Seunghak Yu]]|
|[[Yu, Shi|AUTHOR Shi Yu]]|
|[[Yu, Xinguo|AUTHOR Xinguo Yu]]|
|[[Yuan, Xiaobing|AUTHOR Xiaobing Yuan]]|
|[[Yue, Junwei|AUTHOR Junwei Yue]]|
|[[Yuen, Chun Wah|AUTHOR Chun Wah Yuen]]|
|[[Yun, Sungrack|AUTHOR Sungrack Yun]]|
|[[Yunusova, Yana|AUTHOR Yana Yunusova]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Zafeiriou, Stefanos|AUTHOR Stefanos Zafeiriou]]|
|[[Zahner, Katharina|AUTHOR Katharina Zahner]]|
|[[Zahorian, Stephen A.|AUTHOR Stephen A. Zahorian]]|
|[[Zajíc, Zbyněk|AUTHOR Zbyněk Zajíc]]|
|[[Zañartu, Matías|AUTHOR Matías Zañartu]]|
|[[Zane, Emily|AUTHOR Emily Zane]]|
|[[Zappi, Victor|AUTHOR Victor Zappi]]|
|[[Zarrieß, Sina|AUTHOR Sina Zarrieß]]|
|[[Zatvornitsky, Alexander|AUTHOR Alexander Zatvornitsky]]|
|[[Zee, Tim|AUTHOR Tim Zee]]|
|[[Zegers, Jeroen|AUTHOR Jeroen Zegers]]|
|[[Zeghidour, Neil|AUTHOR Neil Zeghidour]]|
|[[Żelasko, Piotr|AUTHOR Piotr Żelasko]]|
|[[Zellers, Margaret|AUTHOR Margaret Zellers]]|
|[[Zenkel, Thomas|AUTHOR Thomas Zenkel]]|
|[[Zequeira Jiménez, Rafael|AUTHOR Rafael Zequeira Jiménez]]|
|[[Zeyer, Albert|AUTHOR Albert Zeyer]]|
|[[Zhang, Binbin|AUTHOR Binbin Zhang]]|
|[[Zhang, Boliang|AUTHOR Boliang Zhang]]|
|[[Zhang, Chunlei|AUTHOR Chunlei Zhang]]|
|[[Zhang, Gaoyan|AUTHOR Gaoyan Zhang]]|
|[[Zhang, Hepeng|AUTHOR Hepeng Zhang]]|
|[[Zhang, Hua|AUTHOR Hua Zhang]]|
|[[Zhang, Hui|AUTHOR Hui Zhang]]|
|[[Zhang, Jinsong|AUTHOR Jinsong Zhang]]|
|[[Zhang, Kaile|AUTHOR Kaile Zhang]]|
|[[Zhang, Pengyuan|AUTHOR Pengyuan Zhang]]|
|[[Zhang, Qi|AUTHOR Qi Zhang]]|
|[[Zhang, Qian|AUTHOR Qian Zhang]]|
|[[Zhang, Ruo|AUTHOR Ruo Zhang]]|
|[[Zhang, Shiliang|AUTHOR Shiliang Zhang]]|
|[[Zhang, Wei|AUTHOR Wei Zhang]]|
|[[Zhang, Weibin|AUTHOR Weibin Zhang]]|
|[[Zhang, Xiaohui|AUTHOR Xiaohui Zhang]]|
|[[Zhang, Xueliang|AUTHOR Xueliang Zhang]]|
|[[Zhang, Yang|AUTHOR Yang Zhang]]|
|[[Zhang, Yanhui|AUTHOR Yanhui Zhang]]|
|[[Zhang, Yuanyuan|AUTHOR Yuanyuan Zhang]]|
|[[Zhang, Yue|AUTHOR Yue Zhang]]|
|[[Zhang, Yu|AUTHOR Yu Zhang]]|
|[[Zhang, Yu|AUTHOR Yu Zhang]]|
|[[Zhang, Yu|AUTHOR Yu Zhang]]|
|[[Zhang, Zixing|AUTHOR Zixing Zhang]]|
|[[Zhao, Bin|AUTHOR Bin Zhao]]|
|[[Zhao, Faru|AUTHOR Faru Zhao]]|
|[[Zhao, Kai|AUTHOR Kai Zhao]]|
|[[Zhao, Qingen|AUTHOR Qingen Zhao]]|
|[[Zhao, Rui|AUTHOR Rui Zhao]]|
|[[Zhao, Tuo|AUTHOR Tuo Zhao]]|
|[[Zhao, Yuanyuan|AUTHOR Yuanyuan Zhao]]|
|[[Zheng, Thomas Fang|AUTHOR Thomas Fang Zheng]]|
|[[Zheng, Yibin|AUTHOR Yibin Zheng]]|
|[[Zhong, Jinghua|AUTHOR Jinghua Zhong]]|
|[[Zhou, Bowen|AUTHOR Bowen Zhou]]|
|[[Zhou, Shiyu|AUTHOR Shiyu Zhou]]|
|[[Zhu, Manman|AUTHOR Manman Zhu]]|
|[[Zhu, Shenghuo|AUTHOR Shenghuo Zhu]]|
|[[Zhu, Weiwu|AUTHOR Weiwu Zhu]]|
|[[Zhu, Xuan|AUTHOR Xuan Zhu]]|
|[[Zhu, Yinghua|AUTHOR Yinghua Zhu]]|
|[[Zhuang, Xiaodan|AUTHOR Xiaodan Zhuang]]|
|[[Zibrek, Katja|AUTHOR Katja Zibrek]]|
|[[Zihlmann, Urban|AUTHOR Urban Zihlmann]]|
|[[Zimmerer, Frank|AUTHOR Frank Zimmerer]]|
|[[Zinman, Lorne|AUTHOR Lorne Zinman]]|
|[[Ziółko, Bartosz|AUTHOR Bartosz Ziółko]]|
|[[Zisserman, Andrew|AUTHOR Andrew Zisserman]]|
|[[Žmolíková, Kateřina|AUTHOR Kateřina Žmolíková]]|
|[[Zöhrer, Matthias|AUTHOR Matthias Zöhrer]]|
|[[Zorilă, Tudor-Cătălin|AUTHOR Tudor-Cătălin Zorilă]]|
|[[Zovato, Enrico|AUTHOR Enrico Zovato]]|
|[[Żygis, Marzena|AUTHOR Marzena Żygis]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-3|PAPER Wed-P-8-1-3 — Reanalyze Fundamental Frequency Peak Delay in Mandarin]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reanalyze Fundamental Frequency Peak Delay in Mandarin</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-9|PAPER Tue-P-5-1-9 — Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-6|PAPER Mon-SS-1-11-6 — Addressing Code-Switching in French/Algerian Arabic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Addressing Code-Switching in French/Algerian Arabic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170470.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-3|PAPER Thu-O-10-8-3 — Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-3|PAPER Tue-P-5-2-3 — Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-6|PAPER Tue-O-5-8-6 — Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-13|PAPER Wed-P-6-3-13 — A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-5|PAPER Tue-O-4-10-5 — A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-5|PAPER Tue-O-4-6-5 — Pashto Intonation Patterns]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pashto Intonation Patterns</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170530.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-1|PAPER Wed-P-6-2-1 — Calibration Approaches for Language Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Calibration Approaches for Language Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-12|PAPER Mon-P-2-1-12 — Perception and Acoustics of Vowel Nasality in Brazilian Portuguese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception and Acoustics of Vowel Nasality in Brazilian Portuguese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-1|PAPER Thu-O-9-2-1 — Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-6|PAPER Tue-O-3-4-6 — DNN Bottleneck Features for Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN Bottleneck Features for Speaker Clustering</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-6|PAPER Wed-P-7-3-6 — Laryngeal Articulation During Trumpet Performance: An Exploratory Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Laryngeal Articulation During Trumpet Performance: An Exploratory Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-6|PAPER Mon-O-2-1-6 — Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-8|PAPER Tue-P-3-2-8 — Alternative Approaches to Neural Network Based Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternative Approaches to Neural Network Based Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171385.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-2|PAPER Thu-O-10-1-2 — Semi-Supervised DNN Training with Word Selection for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised DNN Training with Word Selection for ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-1|PAPER Wed-O-8-6-1 — Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-3|PAPER Wed-O-8-6-3 — Eigenvector-Based Speech Mask Estimation Using Logistic Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eigenvector-Based Speech Mask Estimation Using Logistic Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-2|PAPER Tue-O-5-6-2 — Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-3|PAPER Tue-O-4-2-3 — The Recognition of Compounds: A Computational Account]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Recognition of Compounds: A Computational Account</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-10|PAPER Mon-P-2-4-10 — Learning Word Vector Representations Based on Acoustic Counts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Word Vector Representations Based on Acoustic Counts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-1|PAPER Thu-O-10-4-1 — CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-11|PAPER Tue-P-5-2-11 — Prediction of Speech Delay from Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prediction of Speech Delay from Acoustic Measurements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-3|PAPER Mon-SS-1-8-3 — Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171393.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-2|PAPER Wed-O-8-1-2 — Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-1|PAPER Mon-O-2-2-1 — Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-3|PAPER Mon-O-2-6-3 — Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-4|PAPER Wed-O-6-8-4 — Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-3|PAPER Mon-P-2-1-3 — Simultaneous Articulatory and Acoustic Distortion in L1 and L2 Listening: Locally Time-Reversed “Fast” Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simultaneous Articulatory and Acoustic Distortion in L1 and L2 Listening: Locally Time-Reversed “Fast” Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-13|PAPER Wed-P-8-1-13 — Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-6|PAPER Tue-P-3-2-6 — i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-4|PAPER Wed-P-8-3-4 — Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-1|PAPER Mon-O-1-2-1 — Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-11|PAPER Thu-P-9-3-11 — Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-14|PAPER Thu-P-9-3-14 — Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-5|PAPER Mon-O-1-4-5 — Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-2|PAPER Thu-SS-9-11-2 — A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-6|PAPER Thu-SS-9-11-6 — Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170554.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-4|PAPER Tue-P-4-1-4 — Forward-Backward Convolutional LSTM for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Forward-Backward Convolutional LSTM for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170543.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-3|PAPER Tue-P-5-4-3 — Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-5|PAPER Thu-P-9-1-5 — Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-2|PAPER Tue-P-3-1-2 — Content Normalization for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Content Normalization for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-4|PAPER Mon-O-2-6-4 — L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-5|PAPER Thu-O-10-2-5 — Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-3|PAPER Wed-O-8-4-3 — Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-11|PAPER Mon-P-2-2-11 — When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch]]</div>|^<div class="cpauthorindexpersoncardpapertitle">When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-2|PAPER Tue-P-4-3-2 — Improving Prediction of Speech Activity Using Multi-Participant Respiratory State]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Prediction of Speech Activity Using Multi-Participant Respiratory State</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-4|PAPER Wed-O-7-4-4 — Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-5|PAPER Mon-P-2-3-5 — Optimizing DNN Adaptation for Recognition of Enhanced Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing DNN Adaptation for Recognition of Enhanced Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-1|PAPER Thu-O-9-2-1 — Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-3|PAPER Wed-O-6-6-3 — An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170450.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-2|PAPER Mon-SS-1-8-2 — Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-5|PAPER Tue-P-5-4-5 — Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-3|PAPER Wed-P-7-3-3 — Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-2|PAPER Wed-O-6-6-2 — Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-8|PAPER Mon-P-2-2-8 — The Effect of Gesture on Persuasive Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Gesture on Persuasive Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-7|PAPER Wed-P-7-2-7 — An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-8|PAPER Wed-P-8-3-8 — To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-2|PAPER Wed-O-8-8-2 — Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-6|PAPER Mon-O-1-2-6 — Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-4|PAPER Mon-P-2-1-4 — Lexically Guided Perceptual Learning in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexically Guided Perceptual Learning in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-10|PAPER Mon-P-1-1-10 — Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-7|PAPER Mon-P-2-3-7 — Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-9|PAPER Tue-P-5-3-9 — Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-5|PAPER Wed-SS-6-2-5 — Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-6|PAPER Wed-O-8-6-6 — Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-11|PAPER Wed-P-6-1-11 — Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-5|PAPER Thu-SS-9-10-5 — It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170145.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-2|PAPER Mon-O-1-1-2 — Student-Teacher Training with Diverse Decision Tree Ensembles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Student-Teacher Training with Diverse Decision Tree Ensembles</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-3|PAPER Mon-O-2-1-3 — Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-6|PAPER Tue-P-4-1-6 — Deep Activation Mixture Model for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Activation Mixture Model for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-5|PAPER Mon-S&T-2-A-5 — A System for Real Time Collaborative Transcription Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A System for Real Time Collaborative Transcription Correction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-4|PAPER Mon-S&T-2-A-4 — HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-4|PAPER Wed-S&T-6-A-4 — Voice Conservation and TTS System for People Facing Total Laryngectomy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conservation and TTS System for People Facing Total Laryngectomy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171254.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-5|PAPER Wed-O-6-4-5 — Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-9|PAPER Wed-P-6-3-9 — Zero-Shot Learning Across Heterogeneous Overlapping Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning Across Heterogeneous Overlapping Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-3|PAPER Mon-S&T-2-A-3 — Extending the EMU Speech Database Management System: Cloud Hosting, Team Collaboration, Automatic Revision Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending the EMU Speech Database Management System: Cloud Hosting, Team Collaboration, Automatic Revision Control</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-10|PAPER Wed-P-7-2-10 — What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171606.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-4|PAPER Tue-SS-4-11-4 — Enhancing Backchannel Prediction Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Backchannel Prediction Using Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-2|PAPER Tue-O-3-2-2 — Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-3|PAPER Tue-O-4-6-3 — Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172059.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-5|PAPER Mon-S&T-2-B-5 — Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-5|PAPER Tue-O-5-1-5 — Frame and Segment Level Recurrent Neural Networks for Phone Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame and Segment Level Recurrent Neural Networks for Phone Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-6|PAPER Mon-SS-1-11-6 — Addressing Code-Switching in French/Algerian Arabic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Addressing Code-Switching in French/Algerian Arabic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-3|PAPER Wed-O-7-4-3 — Investigating the Effect of ASR Tuning on Named Entity Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Effect of ASR Tuning on Named Entity Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170470.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-3|PAPER Thu-O-10-8-3 — Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-2|PAPER Tue-O-4-6-2 — Comparing Languages Using Hierarchical Prosodic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Languages Using Hierarchical Prosodic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-4|PAPER Tue-P-5-1-4 — A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-1|PAPER Tue-O-4-6-1 — The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-2|PAPER Thu-P-9-1-2 — Factored Deep Convolutional Neural Networks for Noise Robust Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factored Deep Convolutional Neural Networks for Noise Robust Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-1|PAPER Mon-P-1-1-1 — Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170068.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-6|PAPER Wed-O-6-4-6 — Harvest: A High-Performance Fundamental Frequency Estimator from Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Harvest: A High-Performance Fundamental Frequency Estimator from Speech Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-4|PAPER Wed-O-6-4-4 — Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-7|PAPER Tue-P-4-3-7 — Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171752.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-7|PAPER Wed-P-6-3-7 — Automatic Alignment Between Classroom Lecture Utterances and Slide Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Alignment Between Classroom Lecture Utterances and Slide Components</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Symbol Sequence Search from Telephone Conversation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-12|PAPER Wed-P-7-4-12 — Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-12|PAPER Thu-P-9-3-12 — Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170112.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-1|PAPER Wed-P-7-4-1 — Manual and Automatic Transcriptions in Dementia Detection from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manual and Automatic Transcriptions in Dementia Detection from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-3|PAPER Wed-SS-7-11-3 — Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-4|PAPER Mon-SS-2-8-4 — Replay Attack Detection Using DNN for Channel Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Replay Attack Detection Using DNN for Channel Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-1|PAPER Tue-O-5-1-1 — Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-8|PAPER Tue-P-5-3-8 — Improved End-of-Query Detection for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved End-of-Query Detection for Streaming Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170639.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-2|PAPER Thu-O-9-1-2 — Optimizing Expected Word Error Rate via Sampling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing Expected Word Error Rate via Sampling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-5|PAPER Mon-O-1-10-5 — Beyond the Listening Test: An Interactive Approach to TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Beyond the Listening Test: An Interactive Approach to TTS Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Topic Identification for Speech Without ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building ASR Corpora Using Eyra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-8|PAPER Mon-P-1-2-8 — Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-3|PAPER Wed-O-8-6-3 — Eigenvector-Based Speech Mask Estimation Using Logistic Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eigenvector-Based Speech Mask Estimation Using Logistic Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-9|PAPER Wed-P-8-3-9 — Online Adaptation of an Attention-Based Neural Network for Natural Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Adaptation of an Attention-Based Neural Network for Natural Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-3|PAPER Wed-O-8-4-3 — Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-2|PAPER Tue-P-4-3-2 — Improving Prediction of Speech Activity Using Multi-Participant Respiratory State]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Prediction of Speech Activity Using Multi-Participant Respiratory State</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-4|PAPER Wed-P-8-1-4 — How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-3|PAPER Tue-O-5-1-3 — Improving Speech Recognition by Revising Gated Recurrent Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognition by Revising Gated Recurrent Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-1|PAPER Mon-O-2-2-1 — Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-1|PAPER Tue-O-4-2-1 — A Comparison of Sentence-Level Speech Intelligibility Metrics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sentence-Level Speech Intelligibility Metrics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-4|PAPER Wed-O-7-6-4 — Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-4|PAPER Wed-O-6-8-4 — Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-10|PAPER Tue-P-3-2-10 — Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-11|PAPER Wed-P-7-4-11 — Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-4|PAPER Wed-O-8-1-4 — Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-2|PAPER Wed-P-8-1-2 — Intonation of Contrastive Topic in Estonian]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intonation of Contrastive Topic in Estonian</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-3|PAPER Mon-SS-1-8-3 — Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-16|PAPER Wed-P-7-3-16 — Novel Shifted Real Spectrum for Exact Signal Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Shifted Real Spectrum for Exact Signal Reconstruction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-7|PAPER Tue-SS-5-11-7 — Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-6|PAPER Mon-P-2-1-6 — Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171420.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-10|PAPER Thu-P-9-4-10 — A Neural Parametric Singing Synthesizer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural Parametric Singing Synthesizer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-6|PAPER Thu-O-10-4-6 — Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-1|PAPER Tue-P-5-1-1 — An Automatically Aligned Corpus of Child-Directed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatically Aligned Corpus of Child-Directed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-1|PAPER Wed-SS-6-11-1 — SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-6|PAPER Tue-P-5-4-6 — On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-6|PAPER Thu-O-9-6-6 — Concatenative Resynthesis Using Twin Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Concatenative Resynthesis Using Twin Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-10|PAPER Wed-P-6-2-10 — Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-13|PAPER Thu-P-9-3-13 — Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-5|PAPER Wed-P-8-1-5 — Three Dimensions of Sentence Prosody and Their (Non-)Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three Dimensions of Sentence Prosody and Their (Non-)Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-2|PAPER Thu-P-9-3-2 — Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170917.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-6|PAPER Tue-O-4-8-6 — Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-1|PAPER Mon-O-2-1-1 — Approaches for Neural-Network Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaches for Neural-Network Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-5|PAPER Wed-P-8-1-5 — Three Dimensions of Sentence Prosody and Their (Non-)Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three Dimensions of Sentence Prosody and Their (Non-)Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-6|PAPER Tue-SS-5-11-6 — To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-3|PAPER Thu-O-9-8-3 — Improving Speaker-Independent Lipreading with Domain-Adversarial Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker-Independent Lipreading with Domain-Adversarial Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-2|PAPER Thu-P-9-3-2 — Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-4|PAPER Mon-O-2-2-4 — Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-3|PAPER Tue-S&T-3-A-3 — Real Time Pitch Shifting with Formant Structure Preservation Using the Phase Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real Time Pitch Shifting with Formant Structure Preservation Using the Phase Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-11|PAPER Wed-P-8-3-11 — OpenMM: An Open-Source Multimodal Feature Extraction Tool]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenMM: An Open-Source Multimodal Feature Extraction Tool</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-3|PAPER Tue-P-4-2-3 — End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-3|PAPER Tue-P-3-2-3 — Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172059.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-5|PAPER Mon-S&T-2-B-5 — Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-5|PAPER Tue-P-4-3-5 — End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-6|PAPER Tue-S&T-3-B-6 — Reading Validation for Pronunciation Evaluation in the Digitala Project]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reading Validation for Pronunciation Evaluation in the Digitala Project</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-4|PAPER Wed-O-7-8-4 — Improved Subword Modeling for WFST-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Subword Modeling for WFST-Based Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171115.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-5|PAPER Thu-O-10-4-5 — Automatic Construction of the Finnish Parliament Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Construction of the Finnish Parliament Speech Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-2|PAPER Tue-S&T-3-B-2 — Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bob Speaks Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-1|PAPER Mon-O-2-1-1 — Approaches for Neural-Network Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaches for Neural-Network Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-9|PAPER Tue-P-5-2-9 — Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-7|PAPER Wed-P-6-2-7 — LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-3|PAPER Tue-O-5-1-3 — Improving Speech Recognition by Revising Gated Recurrent Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognition by Revising Gated Recurrent Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172034.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-1|PAPER Mon-S&T-2-A-1 — Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Prosody to Classify Discourse Relations</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-2|PAPER Wed-S&T-6-A-2 — A Thematicity-Based Prosody Enrichment Tool for CTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thematicity-Based Prosody Enrichment Tool for CTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-5|PAPER Tue-O-4-6-5 — Pashto Intonation Patterns]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pashto Intonation Patterns</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-5|PAPER Tue-O-3-6-5 — Automatic Measurement of Pre-Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Measurement of Pre-Aspiration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170530.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-1|PAPER Wed-P-6-2-1 — Calibration Approaches for Language Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Calibration Approaches for Language Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-3|PAPER Wed-O-8-10-3 — Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-3|PAPER Wed-O-7-4-3 — Investigating the Effect of ASR Tuning on Named Entity Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Effect of ASR Tuning on Named Entity Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-3|PAPER Wed-P-8-3-3 — Internal Memory Gate for Recurrent Neural Networks with Application to Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Internal Memory Gate for Recurrent Neural Networks with Application to Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-5|PAPER Wed-P-8-3-5 — Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171292.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-3|PAPER Mon-O-1-6-3 — Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170829.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-5|PAPER Tue-O-3-4-5 — Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-1|PAPER Wed-O-8-10-1 — Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-6|PAPER Mon-P-1-1-6 — Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-4|PAPER Tue-O-4-2-4 — Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-9|PAPER Wed-P-7-2-9 — Kinematic Signatures of Prosody in Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Kinematic Signatures of Prosody in Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-2|PAPER Wed-S&T-6-A-2 — A Thematicity-Based Prosody Enrichment Tool for CTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thematicity-Based Prosody Enrichment Tool for CTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-2|PAPER Thu-P-9-3-2 — Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-2|PAPER Tue-P-4-1-2 — Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-12|PAPER Wed-P-6-3-12 — Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-6|PAPER Mon-O-2-1-6 — Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-6|PAPER Thu-O-9-4-6 — Similarity Learning Based Query Modeling for Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Similarity Learning Based Query Modeling for Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170952.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-7|PAPER Wed-P-6-1-7 — Multiview Representation Learning via Deep CCA for Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiview Representation Learning via Deep CCA for Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-11|PAPER Thu-P-9-3-11 — Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-4|PAPER Tue-P-5-2-4 — Zero Frequency Filter Based Analysis of Voice Disorders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero Frequency Filter Based Analysis of Voice Disorders</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-3|PAPER Wed-O-6-4-3 — A Spectro-Temporal Demodulation Technique for Pitch Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Demodulation Technique for Pitch Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-6|PAPER Thu-P-9-4-6 — Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-2|PAPER Tue-O-3-4-2 — Tied Variational Autoencoder Backends for i-Vector Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Variational Autoencoder Backends for i-Vector Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170982.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-6|PAPER Mon-P-1-2-6 — Audio Classification Using Class-Specific Learned Descriptors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Classification Using Class-Specific Learned Descriptors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-7|PAPER Mon-P-2-3-7 — Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-1|PAPER Wed-SS-7-11-1 — Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-5|PAPER Wed-P-6-1-5 — Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-11|PAPER Wed-P-6-1-11 — Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170269.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-2|PAPER Wed-P-8-3-2 — Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-3|PAPER Wed-P-6-3-3 — Fast and Accurate OOV Decoder on High-Level Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast and Accurate OOV Decoder on High-Level Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170833.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-5|PAPER Wed-SS-8-11-5 — Social Attractiveness in Dialogs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Attractiveness in Dialogs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-11|PAPER Wed-P-8-1-11 — What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Error Management for Improving Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-4|PAPER Mon-P-1-4-4 — Towards Better Decoding and Language Model Integration in Sequence to Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Better Decoding and Language Model Integration in Sequence to Sequence Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-1|PAPER Mon-P-2-4-1 — An RNN Model of Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An RNN Model of Text Normalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-14|PAPER Wed-P-7-3-14 — A Note Based Query By Humming System Using Convolutional Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Note Based Query By Humming System Using Convolutional Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-8|PAPER Mon-P-2-3-8 — Generalized Distillation Framework for Speaker Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Distillation Framework for Speaker Normalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-9|PAPER Wed-SS-7-1-9 — Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-4|PAPER Wed-O-8-8-4 — On Improving Acoustic Models for TORGO Dysarthric Speech Database]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Improving Acoustic Models for TORGO Dysarthric Speech Database</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-8|PAPER Wed-P-8-3-8 — To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-6|PAPER Wed-SS-7-11-6 — Learning Weakly Supervised Multimodal Phoneme Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Weakly Supervised Multimodal Phoneme Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170917.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-6|PAPER Tue-O-4-8-6 — Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-1|PAPER Wed-O-6-6-1 — Prosodic Event Recognition Using Convolutional Neural Networks with Context Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Event Recognition Using Convolutional Neural Networks with Context Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-3|PAPER Tue-S&T-3-B-3 — “Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-5|PAPER Wed-P-7-4-5 — Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-5|PAPER Wed-P-8-2-5 — Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-3|PAPER Wed-O-8-4-3 — Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-9|PAPER Wed-P-8-2-9 — Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-12|PAPER Wed-P-8-3-12 — Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-3|PAPER Wed-O-8-4-3 — Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-3|PAPER Wed-P-7-2-3 — Vowel and Consonant Sequences in three Bavarian Dialects of Austria]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowel and Consonant Sequences in three Bavarian Dialects of Austria</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170293.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-2|PAPER Tue-SS-5-11-2 — Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-2|PAPER Tue-O-3-4-2 — Tied Variational Autoencoder Backends for i-Vector Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied Variational Autoencoder Backends for i-Vector Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-1|PAPER Tue-P-3-1-1 — A Generative Model for Score Normalization in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Generative Model for Score Normalization in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170226.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-3|PAPER Wed-SS-7-1-3 — Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-3|PAPER Wed-O-6-1-3 — Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-5|PAPER Wed-O-6-1-5 — Sounds of the Human Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sounds of the Human Vocal Tract</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-2|PAPER Mon-S&T-2-A-2 — ChunkitApp: Investigating the Relevant Units of Online Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ChunkitApp: Investigating the Relevant Units of Online Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-3|PAPER Mon-P-1-2-3 — Attention Based CLDNNs for Short-Duration Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Based CLDNNs for Short-Duration Acoustic Scene Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-4|PAPER Tue-S&T-3-A-4 — A Signal Processing Approach for Speaker Separation Using SFF Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Signal Processing Approach for Speaker Separation Using SFF Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170071.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-4|PAPER Tue-O-3-1-4 — Multitask Learning with CTC and Segmental CRF for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with CTC and Segmental CRF for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-8|PAPER Mon-P-1-4-8 — Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-5|PAPER Tue-O-3-2-5 — Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170961.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-3|PAPER Tue-O-4-10-3 — Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-3|PAPER Mon-P-2-4-3 — Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-4|PAPER Tue-O-3-8-4 — DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-6|PAPER Tue-SS-3-11-6 — Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170934.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-5|PAPER Wed-O-7-1-5 — The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-9|PAPER Wed-P-8-4-9 — Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-4|PAPER Tue-SS-5-11-4 — Prosodic Analysis of Attention-Drawing Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Analysis of Attention-Drawing Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-2|PAPER Tue-P-5-1-2 — A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-12|PAPER Wed-P-6-3-12 — Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-2|PAPER Wed-P-6-1-2 — Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-8|PAPER Wed-P-8-1-8 — Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-1|PAPER Wed-SS-6-11-1 — SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inferring Stance from Prosody</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-4|PAPER Wed-O-7-6-4 — Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-1|PAPER Wed-O-6-8-1 — Emotional Features for Speech Overlaps Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Features for Speech Overlaps Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-11|PAPER Mon-P-2-1-11 — The Relative Cueing Power of F0 and Duration in German Prominence Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relative Cueing Power of F0 and Duration in German Prominence Perception</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-1|PAPER Tue-SS-5-11-1 — Clear Speech — Mere Speech? How Segmental and Prosodic Speech Reduction Shape the Impression That Speakers Create on Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clear Speech — Mere Speech? How Segmental and Prosodic Speech Reduction Shape the Impression That Speakers Create on Listeners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-6|PAPER Wed-SS-8-11-6 — A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-5|PAPER Thu-O-10-8-5 — How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-7|PAPER Wed-SS-7-1-7 — Leveraging Text Data for Word Segmentation for Underresourced Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Text Data for Word Segmentation for Underresourced Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-10|PAPER Mon-P-2-4-10 — Learning Word Vector Representations Based on Acoustic Counts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Word Vector Representations Based on Acoustic Counts</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170628.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-4|PAPER Tue-O-4-1-4 — A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-16|PAPER Wed-SS-7-1-16 — Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-3|PAPER Wed-O-7-4-3 — Investigating the Effect of ASR Tuning on Named Entity Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Effect of ASR Tuning on Named Entity Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170505.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-2|PAPER Mon-P-2-3-2 — CTC Training of Multi-Phone Acoustic Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CTC Training of Multi-Phone Acoustic Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-3|PAPER Thu-O-9-1-3 — Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-2|PAPER Thu-SS-9-11-2 — A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-6|PAPER Thu-SS-9-11-6 — Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-6|PAPER Thu-O-10-4-6 — Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170226.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-3|PAPER Wed-SS-7-1-3 — Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nora the Empathetic Psychologist</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-2|PAPER Thu-SS-9-11-2 — A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-6|PAPER Thu-SS-9-11-6 — Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-3|PAPER Mon-O-2-10-3 — Factorial Modeling for Effective Suppression of Directional Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorial Modeling for Effective Suppression of Directional Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-2|PAPER Mon-P-1-1-2 — Robust Source-Filter Separation of Speech Signal in the Phase Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Source-Filter Separation of Speech Signal in the Phase Domain</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-6|PAPER Tue-O-5-6-6 — The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-13|PAPER Wed-P-8-1-13 — Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-1|PAPER Wed-O-6-4-1 — A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170848.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-3|PAPER Tue-O-5-4-3 — Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-8|PAPER Wed-P-8-1-8 — Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-7|PAPER Wed-P-8-4-7 — Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-11|PAPER Thu-P-9-3-11 — Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-14|PAPER Thu-P-9-3-14 — Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-4|PAPER Thu-P-9-3-4 — Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-6|PAPER Wed-O-6-6-6 — Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-6|PAPER Mon-O-1-6-6 — Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-11|PAPER Wed-P-7-4-11 — Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-10|PAPER Wed-P-8-2-10 — Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-2|PAPER Thu-O-9-2-2 — Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-6|PAPER Tue-O-3-4-6 — DNN Bottleneck Features for Speaker Clustering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN Bottleneck Features for Speaker Clustering</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-4|PAPER Mon-SS-2-8-4 — Replay Attack Detection Using DNN for Channel Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Replay Attack Detection Using DNN for Channel Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-4|PAPER Tue-O-3-6-4 — Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-1|PAPER Tue-O-3-6-1 — Creak as a Feature of Lexical Stress in Estonian]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creak as a Feature of Lexical Stress in Estonian</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-14|PAPER Wed-P-7-3-14 — A Note Based Query By Humming System Using Convolutional Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Note Based Query By Humming System Using Convolutional Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-6|PAPER Wed-P-8-2-6 — Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nora the Empathetic Psychologist</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-13|PAPER Mon-P-2-2-13 — Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-4|PAPER Tue-P-5-1-4 — A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-9|PAPER Tue-P-3-1-9 — Deep Speaker Embeddings for Short-Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Embeddings for Short-Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-5|PAPER Thu-O-10-2-5 — Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-10|PAPER Tue-P-4-3-10 — Sequence to Sequence Modeling for User Simulation in Dialog Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence to Sequence Modeling for User Simulation in Dialog Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-1|PAPER Thu-O-10-8-1 — What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171183.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-8|PAPER Wed-P-6-3-8 — Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-9|PAPER Wed-P-7-4-9 — Depression Detection Using Automatic Transcriptions of De-Identified Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Depression Detection Using Automatic Transcriptions of De-Identified Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-9|PAPER Wed-P-6-3-9 — Zero-Shot Learning Across Heterogeneous Overlapping Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning Across Heterogeneous Overlapping Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170855.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-4|PAPER Wed-SS-7-1-4 — Eliciting Meaningful Units from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eliciting Meaningful Units from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-6|PAPER Wed-P-7-2-6 — Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-3|PAPER Thu-P-9-1-3 — Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-12|PAPER Wed-P-7-4-12 — Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171279.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-14|PAPER Wed-P-8-1-14 — The Perception of English Intonation Patterns by German L2 Speakers of English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of English Intonation Patterns by German L2 Speakers of English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-3|PAPER Wed-P-6-2-3 — Conditional Generative Adversarial Nets Classifier for Spoken Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Nets Classifier for Spoken Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170805.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-6|PAPER Thu-P-9-1-6 — Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-1|PAPER Mon-P-1-4-1 — Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-3|PAPER Tue-P-4-3-3 — Turn-Taking Offsets and Dialogue Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turn-Taking Offsets and Dialogue Context</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-3|PAPER Thu-SS-9-11-3 — A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171365.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-10|PAPER Mon-P-2-3-10 — Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-5|PAPER Mon-S&T-2-A-5 — A System for Real Time Collaborative Transcription Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A System for Real Time Collaborative Transcription Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-10|PAPER Wed-P-6-3-10 — Hierarchical Recurrent Neural Network for Story Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Recurrent Neural Network for Story Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-3|PAPER Mon-P-1-1-3 — A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-5|PAPER Tue-O-5-10-5 — The Sound of Deception — What Makes a Speaker Credible?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sound of Deception — What Makes a Speaker Credible?</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Does Posh English Sound Attractive?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-1|PAPER Mon-O-1-4-1 — Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Recalibration of Vowel Categories</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|^<div class="cpauthorindexpersoncardpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-6|PAPER Tue-S&T-3-B-6 — Reading Validation for Pronunciation Evaluation in the Digitala Project]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reading Validation for Pronunciation Evaluation in the Digitala Project</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-4|PAPER Wed-O-7-8-4 — Improved Subword Modeling for WFST-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Subword Modeling for WFST-Based Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171115.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-5|PAPER Thu-O-10-4-5 — Automatic Construction of the Finnish Parliament Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Construction of the Finnish Parliament Speech Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-3|PAPER Mon-P-1-1-3 — A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170166.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-1|PAPER Wed-P-6-1-1 — Developing On-Line Speaker Diarization System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developing On-Line Speaker Diarization System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-2|PAPER Tue-P-3-1-2 — Content Normalization for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Content Normalization for Text-Dependent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-6|PAPER Tue-O-3-8-6 — Increasing Recall of Lengthening Detection via Semi-Automatic Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Increasing Recall of Lengthening Detection via Semi-Automatic Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-11|PAPER Wed-P-8-1-11 — What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-3|PAPER Tue-O-5-1-3 — Improving Speech Recognition by Revising Gated Recurrent Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognition by Revising Gated Recurrent Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-3|PAPER Mon-P-2-3-3 — An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-2|PAPER Mon-O-1-4-2 — Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-9|PAPER Thu-P-9-3-9 — Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-2|PAPER Mon-O-2-4-2 — An Investigation of Crowd Speech for Room Occupancy Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Crowd Speech for Room Occupancy Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-3|PAPER Thu-P-9-4-3 — Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-8|PAPER Wed-P-8-2-8 — Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-3|PAPER Tue-P-3-2-3 — Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-16|PAPER Wed-SS-7-1-16 — Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-12|PAPER Wed-P-7-4-12 — Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-9|PAPER Thu-P-9-3-9 — Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171784.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-5|PAPER Thu-O-9-1-5 — Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170999.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-4|PAPER Mon-O-1-2-4 — An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-10|PAPER Mon-P-1-2-10 — A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170954.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-12|PAPER Tue-P-5-3-12 — Subband Selection for Binaural Speech Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subband Selection for Binaural Speech Source Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171550.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-8|PAPER Thu-SS-9-10-8 — Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-1|PAPER Thu-SS-10-10-1 — A Dual Source-Filter Model of Snore Audio for Snorer Group Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Dual Source-Filter Model of Snore Audio for Snorer Group Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-2|PAPER Thu-O-10-4-2 — PRAV: A Phonetically Rich Audio Visual Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PRAV: A Phonetically Rich Audio Visual Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-5|PAPER Mon-O-2-4-5 — Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-6|PAPER Mon-O-1-6-6 — Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170901.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-1|PAPER Wed-O-7-2-1 — Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-6|PAPER Mon-P-2-2-6 — The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-3|PAPER Wed-O-7-10-3 — Dialect Recognition Based on Unsupervised Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Recognition Based on Unsupervised Bottleneck Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-6|PAPER Tue-P-4-2-6 — Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-4|PAPER Tue-O-5-10-4 — Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170727.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-4|PAPER Thu-O-10-2-4 — Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170109.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-2|PAPER Thu-O-9-6-2 — Improved Codebook-Based Speech Enhancement Based on MBE Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Codebook-Based Speech Enhancement Based on MBE Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-4|PAPER Wed-O-8-4-4 — Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-9|PAPER Tue-SS-5-11-9 — Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-4|PAPER Mon-O-1-10-4 — VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170325.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-1|PAPER Mon-O-1-10-1 — The Influence of Synthetic Voice on the Evaluation of a Virtual Character]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence of Synthetic Voice on the Evaluation of a Virtual Character</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170987.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-4|PAPER Tue-O-5-6-4 — Phonological Complexity, Segment Rate and Speech Tempo Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Complexity, Segment Rate and Speech Tempo Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-4|PAPER Mon-O-1-6-4 — How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-6|PAPER Wed-P-7-2-6 — Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-10|PAPER Wed-P-8-1-10 — The Acoustics of Word Stress in Czech as a Function of Speaking Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Acoustics of Word Stress in Czech as a Function of Speaking Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170934.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-5|PAPER Wed-O-7-1-5 — The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-3|PAPER Wed-SS-8-11-3 — Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-2|PAPER Tue-SS-4-11-2 — Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-6|PAPER Wed-SS-7-11-6 — Learning Weakly Supervised Multimodal Phoneme Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Weakly Supervised Multimodal Phoneme Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-4|PAPER Tue-O-5-10-4 — Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-2|PAPER Wed-P-7-4-2 — An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-5|PAPER Tue-O-4-2-5 — Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-12|PAPER Wed-P-7-3-12 — Music Tempo Estimation Using Sub-Band Synchrony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Music Tempo Estimation Using Sub-Band Synchrony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-1|PAPER Tue-S&T-3-A-1 — Applications of the BBN Sage Speech Processing Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applications of the BBN Sage Speech Processing Platform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-2|PAPER Tue-O-3-1-2 — CTC in the Context of Generalized Full-Sum HMM Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CTC in the Context of Generalized Full-Sum HMM Training</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-5|PAPER Tue-P-4-2-5 — Parallel Neural Network Features for Improved Tandem Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Neural Network Features for Improved Tandem Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-9|PAPER Wed-P-8-4-9 — Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-2|PAPER Wed-O-6-6-2 — Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-9|PAPER Wed-P-8-4-9 — Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-4|PAPER Wed-O-6-8-4 — Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-3|PAPER Tue-P-4-3-3 — Turn-Taking Offsets and Dialogue Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Turn-Taking Offsets and Dialogue Context</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-12|PAPER Mon-P-2-1-12 — Perception and Acoustics of Vowel Nasality in Brazilian Portuguese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception and Acoustics of Vowel Nasality in Brazilian Portuguese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-3|PAPER Wed-O-6-1-3 — Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-5|PAPER Wed-O-6-1-5 — Sounds of the Human Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sounds of the Human Vocal Tract</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-6|PAPER Tue-S&T-3-B-6 — Reading Validation for Pronunciation Evaluation in the Digitala Project]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reading Validation for Pronunciation Evaluation in the Digitala Project</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-7|PAPER Wed-SS-7-1-7 — Leveraging Text Data for Word Segmentation for Underresourced Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Text Data for Word Segmentation for Underresourced Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-1|PAPER Wed-O-8-6-1 — Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Error Management for Improving Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-2|PAPER Tue-O-5-6-2 — Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171257.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-8|PAPER Tue-P-5-4-8 — Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-5|PAPER Tue-SS-5-11-5 — Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171706.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-2|PAPER Tue-O-5-10-2 — Exploring Dynamic Measures of Stance in Spoken Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Dynamic Measures of Stance in Spoken Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-7|PAPER Wed-P-7-2-7 — An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170106.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-2|PAPER Thu-O-9-8-2 — Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171665.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-9|PAPER Thu-P-9-1-9 — Robust Speech Recognition Based on Binaural Auditory Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition Based on Binaural Auditory Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171791.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-10|PAPER Thu-P-9-1-10 — Adaptive Multichannel Dereverberation for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Multichannel Dereverberation for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-1|PAPER Mon-P-2-4-1 — An RNN Model of Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An RNN Model of Text Normalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-6|PAPER Wed-SS-6-2-6 — Areal and Phylogenetic Features for Multilingual Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Areal and Phylogenetic Features for Multilingual Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-13|PAPER Thu-P-9-4-13 — An Expanded Taxonomy of Semiotic Classes for Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Expanded Taxonomy of Semiotic Classes for Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-16|PAPER Wed-P-7-3-16 — Novel Shifted Real Spectrum for Exact Signal Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Shifted Real Spectrum for Exact Signal Reconstruction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-11|PAPER Wed-P-8-3-11 — OpenMM: An Open-Source Multimodal Feature Extraction Tool]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenMM: An Open-Source Multimodal Feature Extraction Tool</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-2|PAPER Mon-O-2-6-2 — Dialect Perception by Older Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Perception by Older Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-5|PAPER Thu-SS-10-10-5 — DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171279.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-14|PAPER Wed-P-8-1-14 — The Perception of English Intonation Patterns by German L2 Speakers of English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of English Intonation Patterns by German L2 Speakers of English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-11|PAPER Wed-SS-7-1-11 — Building an ASR Corpus Using Althingi’s Parliamentary Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building an ASR Corpus Using Althingi’s Parliamentary Speeches</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building ASR Corpora Using Eyra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-13|PAPER Tue-P-5-1-13 — Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-3|PAPER Wed-SS-7-11-3 — Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-7|PAPER Tue-P-5-4-7 — MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170450.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-2|PAPER Mon-SS-1-8-2 — Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-5|PAPER Mon-O-1-4-5 — Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-4|PAPER Mon-P-1-2-4 — Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-4|PAPER Tue-P-5-3-4 — A Contrast Function and Algorithm for Blind Separation of Audio Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Contrast Function and Algorithm for Blind Separation of Audio Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171606.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-4|PAPER Tue-SS-4-11-4 — Enhancing Backchannel Prediction Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Backchannel Prediction Using Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-1|PAPER Wed-S&T-6-A-1 — Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-5|PAPER Mon-SS-1-8-5 — Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-5|PAPER Mon-SS-1-8-5 — Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-1|PAPER Tue-SS-3-11-1 — Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-1|PAPER Tue-O-3-10-1 — Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-1|PAPER Wed-SS-7-11-1 — Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-6|PAPER Wed-O-8-8-6 — Speech Processing Approach for Diagnosing Dementia in an Early Stage]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Processing Approach for Diagnosing Dementia in an Early Stage</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-13|PAPER Wed-P-7-2-13 — A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-1|PAPER Wed-O-6-1-1 — Aerodynamic Features of French Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aerodynamic Features of French Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170196.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-2|PAPER Tue-O-4-2-2 — An Auditory Model of Speaker Size Perception for Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Auditory Model of Speaker Size Perception for Voiced Speech Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170219.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-9|PAPER Tue-P-3-2-9 — A Distribution Free Formulation of the Total Variability Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Distribution Free Formulation of the Total Variability Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-3|PAPER Thu-P-9-1-3 — Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-6|PAPER Thu-O-10-11-6 — Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170713.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-2|PAPER Tue-O-3-10-2 — Interaction and Transition Model for Speech Emotion Recognition in Dialogue]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interaction and Transition Model for Speech Emotion Recognition in Dialogue</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-4|PAPER Thu-O-10-11-4 — Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-9|PAPER Tue-P-5-1-9 — Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-3|PAPER Wed-P-8-4-3 — Phoneme-Discriminative Features for Dysarthric Speech Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Discriminative Features for Dysarthric Speech Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-3|PAPER Mon-P-2-4-3 — Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170269.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-2|PAPER Wed-P-8-3-2 — Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171752.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-7|PAPER Wed-P-6-3-7 — Automatic Alignment Between Classroom Lecture Utterances and Slide Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Alignment Between Classroom Lecture Utterances and Slide Components</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-14|PAPER Tue-P-4-3-14 — Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170779.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-2|PAPER Tue-P-4-2-2 — Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170638.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-1|PAPER Wed-P-8-3-1 — Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170624.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-5|PAPER Mon-P-1-1-5 — Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-3|PAPER Tue-P-5-3-3 — Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-8|PAPER Mon-P-2-3-8 — Generalized Distillation Framework for Speaker Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Distillation Framework for Speaker Normalization</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-9|PAPER Wed-SS-7-1-9 — Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-10|PAPER Wed-SS-7-1-10 — Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-4|PAPER Wed-O-8-8-4 — On Improving Acoustic Models for TORGO Dysarthric Speech Database]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Improving Acoustic Models for TORGO Dysarthric Speech Database</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-5|PAPER Mon-SS-1-8-5 — Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170790.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-8|PAPER Mon-P-1-1-8 — Vowel Onset Point Detection Using Sonority Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowel Onset Point Detection Using Sonority Information</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-2|PAPER Mon-P-1-2-2 — Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-4|PAPER Tue-P-5-2-4 — Zero Frequency Filter Based Analysis of Voice Disorders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero Frequency Filter Based Analysis of Voice Disorders</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-6|PAPER Thu-P-9-4-6 — Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170596.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-4|PAPER Wed-O-7-10-4 — Investigating Scalability in Hierarchical Language Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Scalability in Hierarchical Language Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-5|PAPER Wed-P-6-1-5 — Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-1|PAPER Wed-O-6-6-1 — Prosodic Event Recognition Using Convolutional Neural Networks with Context Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Event Recognition Using Convolutional Neural Networks with Context Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-2|PAPER Thu-SS-9-11-2 — A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-6|PAPER Thu-SS-9-11-6 — Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Error Management for Improving Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-4|PAPER Mon-SS-1-11-4 — On Building Mixed Lingual Speech Synthesis Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Building Mixed Lingual Speech Synthesis Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-5|PAPER Mon-SS-1-11-5 — Speech Synthesis for Mixed-Language Navigation Instructions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Synthesis for Mixed-Language Navigation Instructions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170854.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-4|PAPER Wed-O-7-1-4 — Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-2|PAPER Wed-O-8-4-2 — Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-4|PAPER Wed-O-8-4-4 — Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discovering Language in Marmoset Vocalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-4|PAPER Wed-O-7-8-4 — Improved Subword Modeling for WFST-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Subword Modeling for WFST-Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170396.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-1|PAPER Tue-SS-4-11-1 — Adjusting the Frame: Biphasic Performative Control of Speech Rhythm]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adjusting the Frame: Biphasic Performative Control of Speech Rhythm</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-1|PAPER Mon-P-2-2-1 — Critical Articulators Identification from RT-MRI of the Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Critical Articulators Identification from RT-MRI of the Vocal Tract</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-4|PAPER Mon-P-2-1-4 — Lexically Guided Perceptual Learning in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexically Guided Perceptual Learning in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-8|PAPER Mon-P-2-3-8 — Generalized Distillation Framework for Speaker Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Distillation Framework for Speaker Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-9|PAPER Thu-P-9-3-9 — Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-12|PAPER Wed-P-6-3-12 — Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171680.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-3|PAPER Mon-P-1-4-3 — Phone Duration Modeling for LVCSR Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Duration Modeling for LVCSR Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-1|PAPER Tue-O-3-4-1 — Deep Neural Network Embeddings for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Embeddings for Text-Independent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Topic Identification for Speech Without ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-2|PAPER Wed-O-7-8-2 — Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-5|PAPER Thu-O-9-6-5 — SEGAN: Speech Enhancement Generative Adversarial Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SEGAN: Speech Enhancement Generative Adversarial Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-6|PAPER Tue-O-5-10-6 — Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-12|PAPER Wed-P-6-1-12 — Experiments in Character-Level Neural Network Models for Punctuation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Experiments in Character-Level Neural Network Models for Punctuation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-5|PAPER Mon-SS-1-8-5 — Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-2|PAPER Wed-P-6-2-2 — Bidirectional Modelling for Short Duration Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Modelling for Short Duration Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-3|PAPER Mon-P-2-2-3 — Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170713.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-2|PAPER Tue-O-3-10-2 — Interaction and Transition Model for Speech Emotion Recognition in Dialogue]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interaction and Transition Model for Speech Emotion Recognition in Dialogue</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-3|PAPER Tue-O-3-8-3 — Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170854.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-4|PAPER Wed-O-7-1-4 — Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-2|PAPER Wed-O-8-4-2 — Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-4|PAPER Wed-O-8-4-4 — Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-9|PAPER Wed-P-8-2-9 — Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-2|PAPER Wed-P-7-4-2 — An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171476.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-5|PAPER Wed-SS-7-1-5 — Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170485.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-9|PAPER Wed-P-7-3-9 — An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-1|PAPER Wed-SS-6-11-1 — SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-9|PAPER Tue-P-5-4-9 — A Fully Convolutional Neural Network for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fully Convolutional Neural Network for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-1|PAPER Tue-S&T-3-A-1 — Applications of the BBN Sage Speech Processing Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applications of the BBN Sage Speech Processing Platform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-5|PAPER Tue-S&T-3-B-5 — Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-4|PAPER Wed-O-8-6-4 — Real-Time Speech Enhancement with GCC-NMF]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speech Enhancement with GCC-NMF</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-3|PAPER Wed-SS-8-11-3 — Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-1|PAPER Wed-P-6-4-1 — Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-2|PAPER Thu-SS-9-10-2 — Description of the Upper Respiratory Tract Infection Corpus (URTIC)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Description of the Upper Respiratory Tract Infection Corpus (URTIC)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171606.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-4|PAPER Tue-SS-4-11-4 — Enhancing Backchannel Prediction Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Backchannel Prediction Using Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-5|PAPER Tue-O-5-1-5 — Frame and Segment Level Recurrent Neural Networks for Phone Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame and Segment Level Recurrent Neural Networks for Phone Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-10|PAPER Wed-P-7-4-10 — An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-3|PAPER Mon-O-1-10-3 — An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bob Speaks Kaldi</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-6|PAPER Wed-P-6-1-6 — Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-4|PAPER Tue-O-3-4-4 — Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-14|PAPER Wed-P-6-2-14 — Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-1|PAPER Wed-O-6-1-1 — Aerodynamic Features of French Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aerodynamic Features of French Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-6-1|PAPER Tue-O-4-6-1 — The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-7|PAPER Tue-P-5-1-7 — Changes in Early L2 Cue-Weighting of Non-Native Speech: Evidence from Learners of Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Changes in Early L2 Cue-Weighting of Non-Native Speech: Evidence from Learners of Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-2|PAPER Wed-O-7-6-2 — Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-6|PAPER Tue-O-4-10-6 — Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-5|PAPER Tue-P-3-2-5 — Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-6|PAPER Mon-O-2-10-6 — On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-3|PAPER Tue-O-4-4-3 — Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-1|PAPER Tue-O-3-10-1 — Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-1|PAPER Wed-P-6-3-1 — Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-6|PAPER Thu-O-9-8-6 — Visually Grounded Learning of Keyword Prediction from Untranscribed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visually Grounded Learning of Keyword Prediction from Untranscribed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-1|PAPER Mon-O-2-1-1 — Approaches for Neural-Network Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaches for Neural-Network Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-10|PAPER Wed-P-8-2-10 — Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171476.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-5|PAPER Wed-SS-7-1-5 — Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-3|PAPER Wed-P-6-2-3 — Conditional Generative Adversarial Nets Classifier for Spoken Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Nets Classifier for Spoken Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-6|PAPER Thu-O-10-4-6 — Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-2|PAPER Mon-P-2-1-2 — Phonetic Restoration of Temporally Reversed Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Restoration of Temporally Reversed Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170554.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-4|PAPER Tue-P-4-1-4 — Forward-Backward Convolutional LSTM for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Forward-Backward Convolutional LSTM for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-4|PAPER Thu-O-9-6-4 — Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170751.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-3|PAPER Thu-O-10-1-3 — Gaussian Prediction Based Attention for Online End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gaussian Prediction Based Attention for Online End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-1|PAPER Tue-O-3-8-1 — An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-3|PAPER Tue-O-4-1-3 — Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-6|PAPER Wed-P-8-4-6 — Generative Adversarial Network-Based Postfilter for STFT Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Postfilter for STFT Spectrograms</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-14|PAPER Thu-P-9-4-14 — Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-3|PAPER Tue-O-3-1-3 — Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171081.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-3|PAPER Wed-O-7-8-3 — Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-1|PAPER Tue-O-4-10-1 — Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-2|PAPER Thu-P-9-4-2 — Sampling-Based Speech Parameter Generation Using Moment-Matching Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sampling-Based Speech Parameter Generation Using Moment-Matching Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-3|PAPER Thu-O-10-2-3 — i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-1|PAPER Thu-SS-10-10-1 — A Dual Source-Filter Model of Snore Audio for Snorer Group Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Dual Source-Filter Model of Snore Audio for Snorer Group Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-3|PAPER Tue-O-3-4-3 — Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-13|PAPER Wed-P-6-2-13 — Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-2|PAPER Thu-O-10-2-2 — Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-1|PAPER Mon-P-2-3-1 — Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-14|PAPER Tue-P-4-3-14 — Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-5|PAPER Wed-SS-6-11-5 — Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171468.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-11-6|PAPER Wed-SS-6-11-6 — Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-8|PAPER Mon-P-1-4-8 — Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-10|PAPER Tue-P-5-4-10 — Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-6|PAPER Wed-P-6-1-6 — Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-12|PAPER Wed-P-7-3-12 — Music Tempo Estimation Using Sub-Band Synchrony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Music Tempo Estimation Using Sub-Band Synchrony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-2|PAPER Wed-P-6-1-2 — Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-4|PAPER Mon-O-1-10-4 — VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-2|PAPER Mon-P-2-2-2 — Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-4|PAPER Tue-O-5-10-4 — Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170219.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-9|PAPER Tue-P-3-2-9 — A Distribution Free Formulation of the Total Variability Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Distribution Free Formulation of the Total Variability Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170226.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-3|PAPER Wed-SS-7-1-3 — Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-3|PAPER Wed-O-6-1-3 — Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-5|PAPER Wed-O-6-1-5 — Sounds of the Human Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sounds of the Human Vocal Tract</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-2|PAPER Wed-P-7-4-2 — An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-11|PAPER Wed-P-7-4-11 — Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-3|PAPER Thu-P-9-1-3 — Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-5|PAPER Tue-P-3-1-5 — What Does the Speaker Embedding Encode?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What Does the Speaker Embedding Encode?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-1|PAPER Mon-P-2-3-1 — Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-13|PAPER Thu-P-9-3-13 — Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-4|PAPER Mon-P-2-2-4 — An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171118.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-1|PAPER Thu-O-9-1-1 — Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-8|PAPER Tue-P-5-3-8 — Improved End-of-Query Detection for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved End-of-Query Detection for Streaming Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-3|PAPER Mon-P-2-3-3 — An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-3|PAPER Thu-SS-9-11-3 — A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-6|PAPER Tue-O-3-8-6 — Increasing Recall of Lengthening Detection via Semi-Automatic Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Increasing Recall of Lengthening Detection via Semi-Automatic Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171254.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-5|PAPER Wed-O-6-4-5 — Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170628.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-4|PAPER Tue-O-4-1-4 — A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171647.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-6|PAPER Tue-O-5-4-6 — Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-16|PAPER Wed-SS-7-1-16 — Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Building ASR Corpora Using Eyra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-1|PAPER Wed-P-7-3-1 — Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-3|PAPER Mon-P-1-1-3 — A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-5|PAPER Tue-O-5-10-5 — The Sound of Deception — What Makes a Speaker Credible?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sound of Deception — What Makes a Speaker Credible?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-5|PAPER Wed-P-7-4-5 — Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170406.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-15|PAPER Thu-P-9-3-15 — Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-6|PAPER Tue-O-3-8-6 — Increasing Recall of Lengthening Detection via Semi-Automatic Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Increasing Recall of Lengthening Detection via Semi-Automatic Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-2|PAPER Mon-O-2-4-2 — An Investigation of Crowd Speech for Room Occupancy Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Crowd Speech for Room Occupancy Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-4|PAPER Wed-SS-6-2-4 — On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-4|PAPER Tue-O-5-4-4 — Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-3|PAPER Tue-O-5-10-3 — Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-5|PAPER Wed-SS-7-11-5 — Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-8|PAPER Wed-P-8-1-8 — Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-3|PAPER Wed-O-7-4-3 — Investigating the Effect of ASR Tuning on Named Entity Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Effect of ASR Tuning on Named Entity Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-6|PAPER Thu-O-9-6-6 — Concatenative Resynthesis Using Twin Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Concatenative Resynthesis Using Twin Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-1|PAPER Thu-O-9-6-1 — Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-1|PAPER Tue-O-5-2-1 — The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171342.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-4|PAPER Wed-O-6-10-4 — Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-10|PAPER Tue-P-3-2-10 — Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-2|PAPER Tue-P-3-1-2 — Content Normalization for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Content Normalization for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170628.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-4|PAPER Tue-O-4-1-4 — A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171494.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-4|PAPER Tue-O-3-10-4 — Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171550.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-8|PAPER Thu-SS-9-10-8 — Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170901.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-1|PAPER Wed-O-7-2-1 — Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-1|PAPER Wed-S&T-6-B-1 — Integrating the Talkamatic Dialogue Manager with Alexa]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating the Talkamatic Dialogue Manager with Alexa</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-6-2|PAPER Wed-O-6-6-2 — Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-9|PAPER Wed-P-7-2-9 — Kinematic Signatures of Prosody in Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Kinematic Signatures of Prosody in Lombard Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-10|PAPER Wed-P-7-4-10 — An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-11|PAPER Wed-P-8-3-11 — OpenMM: An Open-Source Multimodal Feature Extraction Tool]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenMM: An Open-Source Multimodal Feature Extraction Tool</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-7|PAPER Thu-SS-10-10-7 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: A Summary of Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: A Summary of Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-9|PAPER Wed-P-8-3-9 — Online Adaptation of an Attention-Based Neural Network for Natural Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Adaptation of an Attention-Based Neural Network for Natural Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-1|PAPER Mon-O-1-4-1 — Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-6|PAPER Wed-O-8-8-6 — Speech Processing Approach for Diagnosing Dementia in an Early Stage]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Processing Approach for Diagnosing Dementia in an Early Stage</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-8|PAPER Thu-P-9-4-8 — Predicting Head Pose from Speech with a Conditional Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Head Pose from Speech with a Conditional Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171365.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-10|PAPER Mon-P-2-3-10 — Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-10|PAPER Wed-P-6-3-10 — Hierarchical Recurrent Neural Network for Story Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Recurrent Neural Network for Story Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-3|PAPER Mon-O-2-10-3 — Factorial Modeling for Effective Suppression of Directional Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorial Modeling for Effective Suppression of Directional Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-2|PAPER Tue-P-3-1-2 — Content Normalization for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Content Normalization for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-1|PAPER Mon-O-2-6-1 — End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-3|PAPER Thu-P-9-4-3 — Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170982.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-6|PAPER Mon-P-1-2-6 — Audio Classification Using Class-Specific Learned Descriptors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Classification Using Class-Specific Learned Descriptors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-1|PAPER Thu-O-9-6-1 — Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-5|PAPER Mon-SS-1-11-5 — Speech Synthesis for Mixed-Language Navigation Instructions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Synthesis for Mixed-Language Navigation Instructions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-5|PAPER Wed-P-6-2-5 — Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-10|PAPER Tue-P-5-2-10 — Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-15|PAPER Mon-P-2-2-15 — Accurate Synchronization of Speech and EGG Signal Using Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Accurate Synchronization of Speech and EGG Signal Using Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-2|PAPER Wed-O-6-8-2 — Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-6|PAPER Tue-O-5-6-6 — The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-4|PAPER Thu-P-9-3-4 — Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-4|PAPER Tue-O-3-4-4 — Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-14|PAPER Wed-P-6-2-14 — Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-8|PAPER Thu-P-9-1-8 — End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-A-2|PAPER Mon-S&T-2-A-2 — ChunkitApp: Investigating the Relevant Units of Online Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ChunkitApp: Investigating the Relevant Units of Online Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-15|PAPER Tue-P-4-3-15 — Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-3|PAPER Thu-O-9-2-3 — A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-3|PAPER Wed-P-7-2-3 — Vowel and Consonant Sequences in three Bavarian Dialects of Austria]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowel and Consonant Sequences in three Bavarian Dialects of Austria</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170268.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-6|PAPER Wed-SS-7-1-6 — Machine Assisted Analysis of Vowel Length Contrasts in Wolof]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Machine Assisted Analysis of Vowel Length Contrasts in Wolof</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-1|PAPER Thu-S&T-9-A-1 — Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-4|PAPER Mon-P-2-2-4 — An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-5|PAPER Wed-P-6-2-5 — Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-6|PAPER Wed-O-6-10-6 — Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-6|PAPER Mon-O-1-4-6 — Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-3|PAPER Mon-P-2-4-3 — Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-3|PAPER Tue-O-3-1-3 — Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-3|PAPER Thu-O-10-2-3 — i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170727.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-4|PAPER Thu-O-10-2-4 — Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171081.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-3|PAPER Wed-O-7-8-3 — Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-3|PAPER Mon-O-2-10-3 — Factorial Modeling for Effective Suppression of Directional Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorial Modeling for Effective Suppression of Directional Noise</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-3|PAPER Tue-SS-4-11-3 — Motion Analysis in Vocalized Surprise Expressions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Motion Analysis in Vocalized Surprise Expressions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-2|PAPER Wed-O-8-4-2 — Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170617.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-3|PAPER Tue-O-3-2-3 — Integrated Mechanical Model of [r]-[l] and [b]-[m]-[w] Producing Consonant Cluster [br].]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrated Mechanical Model of [r]-[l] and [b]-[m]-[w] Producing Consonant Cluster [br]</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-2|PAPER Thu-S&T-9-A-2 — Vocal-Tract Model with Static Articulators: Lips, Teeth, Tongue, and More]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal-Tract Model with Static Articulators: Lips, Teeth, Tongue, and More</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-10|PAPER Mon-P-2-2-10 — Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-5|PAPER Tue-P-4-3-5 — End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-4|PAPER Tue-O-4-10-4 — Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-6|PAPER Wed-P-8-4-6 — Generative Adversarial Network-Based Postfilter for STFT Spectrograms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generative Adversarial Network-Based Postfilter for STFT Spectrograms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-6|PAPER Mon-P-1-1-6 — Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-8|PAPER Tue-P-4-1-8 — Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-9|PAPER Tue-P-4-1-9 — A Comparative Evaluation of GMM-Free State Tying Methods for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Evaluation of GMM-Free State Tying Methods for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-5|PAPER Thu-SS-10-10-5 — DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-4|PAPER Wed-SS-6-2-4 — On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-3|PAPER Wed-O-6-10-3 — RNN-LDA Clustering for Feature Based DNN Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RNN-LDA Clustering for Feature Based DNN Adaptation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-12|PAPER Wed-P-7-3-12 — Music Tempo Estimation Using Sub-Band Synchrony]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Music Tempo Estimation Using Sub-Band Synchrony</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-12|PAPER Wed-SS-7-1-12 — Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170112.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-1|PAPER Wed-P-7-4-1 — Manual and Automatic Transcriptions in Dementia Detection from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manual and Automatic Transcriptions in Dementia Detection from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-4|PAPER Mon-O-1-10-4 — VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-15|PAPER Mon-P-2-2-15 — Accurate Synchronization of Speech and EGG Signal Using Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Accurate Synchronization of Speech and EGG Signal Using Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-3|PAPER Mon-SS-1-8-3 — Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Modeling for Google Home</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-6|PAPER Tue-O-3-1-6 — Reducing the Computational Complexity of Two-Dimensional LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing the Computational Complexity of Two-Dimensional LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-2|PAPER Tue-O-5-1-2 — Highway-LSTM and Recurrent Highway Networks for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Highway-LSTM and Recurrent Highway Networks for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-3|PAPER Thu-O-9-1-3 — Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170855.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-4|PAPER Wed-SS-7-1-4 — Eliciting Meaningful Units from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Eliciting Meaningful Units from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-7|PAPER Tue-P-4-3-7 — Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-14|PAPER Tue-P-4-3-14 — Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170952.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-7|PAPER Wed-P-6-1-7 — Multiview Representation Learning via Deep CCA for Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiview Representation Learning via Deep CCA for Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-10|PAPER Wed-SS-7-1-10 — Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-5|PAPER Thu-P-9-3-5 — Stability of Prosodic Characteristics Across Age and Gender Groups]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stability of Prosodic Characteristics Across Age and Gender Groups</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-5|PAPER Mon-P-2-4-5 — Prosody Control of Utterance Sequence for Information Delivering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Control of Utterance Sequence for Information Delivering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170961.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-3|PAPER Tue-O-4-10-3 — Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-3|PAPER Wed-P-8-4-3 — Phoneme-Discriminative Features for Dysarthric Speech Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Discriminative Features for Dysarthric Speech Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-8|PAPER Wed-P-8-4-8 — Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-8-1|PAPER Thu-O-9-8-1 — Combining Residual Networks with LSTMs for Lipreading]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Residual Networks with LSTMs for Lipreading</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-6|PAPER Tue-SS-3-11-6 — Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-13|PAPER Mon-P-2-2-13 — Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-7|PAPER Wed-P-8-1-7 — Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-3|PAPER Mon-SS-2-8-3 — A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-7|PAPER Wed-SS-7-1-7 — Leveraging Text Data for Word Segmentation for Underresourced Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Text Data for Word Segmentation for Underresourced Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-2|PAPER Mon-P-1-1-2 — Robust Source-Filter Separation of Speech Signal in the Phase Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Source-Filter Separation of Speech Signal in the Phase Domain</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-5|PAPER Wed-O-7-2-5 — Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-12|PAPER Tue-P-5-1-12 — Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-13|PAPER Tue-P-5-1-13 — Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-8|PAPER Mon-SS-1-11-8 — Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-13|PAPER Wed-P-7-2-13 — A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-8|PAPER Mon-P-2-4-8 — Discrete Duration Model for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discrete Duration Model for Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170875.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-4|PAPER Thu-SS-9-11-4 — Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-3|PAPER Mon-O-1-4-3 — A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-4|PAPER Mon-O-2-6-4 — L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-9|PAPER Wed-SS-8-11-9 — Large-Scale Speaker Ranking from Crowdsourced Pairwise Listener Ratings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Speaker Ranking from Crowdsourced Pairwise Listener Ratings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-7|PAPER Tue-P-5-4-7 — MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2016 NIST Speaker Recognition Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-3|PAPER Wed-O-6-1-3 — Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-5|PAPER Wed-O-6-1-5 — Sounds of the Human Vocal Tract]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sounds of the Human Vocal Tract</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-9|PAPER Thu-SS-9-10-9 — An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-3|PAPER Tue-O-4-8-3 — An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-5|PAPER Wed-P-8-3-5 — Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171254.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-4-5|PAPER Wed-O-6-4-5 — Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171675.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-6|PAPER Wed-O-6-1-6 — A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170389.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-4-6|PAPER Mon-O-2-4-6 — Estimation of the Probability Distribution of Spectral Fine Structure in the Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of the Probability Distribution of Spectral Fine Structure in the Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-3|PAPER Tue-P-4-2-3 — End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-6|PAPER Wed-P-7-2-6 — Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-5|PAPER Thu-P-9-3-5 — Stability of Prosodic Characteristics Across Age and Gender Groups]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stability of Prosodic Characteristics Across Age and Gender Groups</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-1|PAPER Thu-S&T-9-A-1 — Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-4|PAPER Wed-O-8-1-4 — Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170554.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-4|PAPER Tue-P-4-1-4 — Forward-Backward Convolutional LSTM for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Forward-Backward Convolutional LSTM for Acoustic Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170543.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-3|PAPER Tue-P-5-4-3 — Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-5|PAPER Thu-P-9-1-5 — Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Dependent WaveNet Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-5|PAPER Tue-O-4-1-5 — Statistical Voice Conversion with WaveNet-Based Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Statistical Voice Conversion with WaveNet-Based Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-2|PAPER Thu-P-9-4-2 — Sampling-Based Speech Parameter Generation Using Moment-Matching Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sampling-Based Speech Parameter Generation Using Moment-Matching Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-3|PAPER Tue-O-3-8-3 — Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Dependent WaveNet Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-5|PAPER Tue-O-4-1-5 — Statistical Voice Conversion with WaveNet-Based Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Statistical Voice Conversion with WaveNet-Based Waveform Generation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-10|PAPER Tue-P-5-4-10 — Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170893.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-5|PAPER Wed-P-6-3-5 — Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-2|PAPER Wed-P-8-4-2 — CAB: An Energy-Based Speaker Clustering Model for Rapid Adaptation in Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CAB: An Energy-Based Speaker Clustering Model for Rapid Adaptation in Non-Parallel Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-14|PAPER Thu-P-9-4-14 — Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170196.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-2|PAPER Tue-O-4-2-2 — An Auditory Model of Speaker Size Perception for Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Auditory Model of Speaker Size Perception for Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170196.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-2-2|PAPER Tue-O-4-2-2 — An Auditory Model of Speaker Size Perception for Voiced Speech Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Auditory Model of Speaker Size Perception for Voiced Speech Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-11-4|PAPER Wed-SS-7-11-4 — Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-2|PAPER Tue-P-5-1-2 — A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-4|PAPER Tue-P-5-4-4 — Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-4-6|PAPER Mon-O-1-4-6 — Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-15|PAPER Tue-P-5-4-15 — On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170982.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-6|PAPER Mon-P-1-2-6 — Audio Classification Using Class-Specific Learned Descriptors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Classification Using Class-Specific Learned Descriptors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-2|PAPER Wed-P-6-1-2 — Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-S&T-3-B-2|PAPER Tue-S&T-3-B-2 — Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-9|PAPER Mon-P-1-1-9 — Analytic Filter Bank for Speech Analysis, Feature Extraction and Perceptual Studies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analytic Filter Bank for Speech Analysis, Feature Extraction and Perceptual Studies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-1|PAPER Thu-P-9-3-1 — The Effects of Real and Placebo Alcohol on Deaffrication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effects of Real and Placebo Alcohol on Deaffrication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-2-1|PAPER Thu-O-10-2-1 — CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-3|PAPER Mon-O-1-1-3 — Embedding-Based Speaker Adaptive Training of Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Embedding-Based Speaker Adaptive Training of Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-5|PAPER Tue-O-4-4-5 — Detecting Overlapped Speech on Short Timeframes Using Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Overlapped Speech on Short Timeframes Using Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-10-3|PAPER Tue-O-5-10-3 — Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-7|PAPER Mon-P-2-3-7 — Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-6|PAPER Tue-O-3-10-6 — Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-2|PAPER Wed-O-7-4-2 — ClockWork-RNN Based Architectures for Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ClockWork-RNN Based Architectures for Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-2|PAPER Wed-O-7-4-2 — ClockWork-RNN Based Architectures for Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ClockWork-RNN Based Architectures for Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-2|PAPER Wed-O-7-4-2 — ClockWork-RNN Based Architectures for Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ClockWork-RNN Based Architectures for Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-4|PAPER Wed-O-7-4-4 — Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-4-4|PAPER Thu-O-10-4-4 — The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-6|PAPER Thu-P-9-3-6 — Electrophysiological Correlates of Familiar Voice Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Electrophysiological Correlates of Familiar Voice Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171663.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-9|PAPER Mon-SS-1-11-9 — Crowdsourcing Universal Part-of-Speech Tags for Code-Switching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crowdsourcing Universal Part-of-Speech Tags for Code-Switching</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-4|PAPER Tue-P-5-3-4 — A Contrast Function and Algorithm for Blind Separation of Audio Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Contrast Function and Algorithm for Blind Separation of Audio Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-11-3|PAPER Thu-SS-9-11-3 — A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-3|PAPER Tue-O-4-8-3 — An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-6|PAPER Tue-P-3-1-6 — Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170596.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-4|PAPER Wed-O-7-10-4 — Investigating Scalability in Hierarchical Language Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Scalability in Hierarchical Language Identification System</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-2|PAPER Wed-P-6-2-2 — Bidirectional Modelling for Short Duration Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Modelling for Short Duration Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-3|PAPER Thu-O-9-1-3 — Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-4|PAPER Tue-P-5-2-4 — Zero Frequency Filter Based Analysis of Voice Disorders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero Frequency Filter Based Analysis of Voice Disorders</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-11-3|PAPER Mon-SS-1-11-3 — Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171205.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-5|PAPER Wed-O-7-6-5 — Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?]]</div>|^<div class="cpauthorindexpersoncardpapertitle"> Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-2|PAPER Wed-O-7-8-2 — Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171760.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-16|PAPER Tue-P-5-3-16 — Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-1|PAPER Thu-O-10-8-1 — What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-10|PAPER Mon-P-2-2-10 — Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-3|PAPER Thu-P-9-4-3 — Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-6|PAPER Tue-O-4-1-6 — Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171350.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-4|PAPER Tue-O-5-8-4 — Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-9|PAPER Tue-P-5-2-9 — Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-1|PAPER Mon-O-1-2-1 — Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-4-11-6|PAPER Tue-SS-4-11-6 — Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-6|PAPER Tue-P-5-2-6 — Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-7|PAPER Mon-P-2-1-7 — Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-4|PAPER Tue-P-5-3-4 — A Contrast Function and Algorithm for Blind Separation of Audio Signals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Contrast Function and Algorithm for Blind Separation of Audio Signals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-9|PAPER Mon-P-2-2-9 — Auditory-Visual Integration of Talker Gender in Cantonese Tone Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auditory-Visual Integration of Talker Gender in Cantonese Tone Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-5|PAPER Wed-P-6-1-5 — Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-1|PAPER Mon-P-1-2-1 — Multilingual i-Vector Based Statistical Modeling for Music Genre Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual i-Vector Based Statistical Modeling for Music Genre Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-3|PAPER Wed-P-8-1-3 — Reanalyze Fundamental Frequency Peak Delay in Mandarin]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reanalyze Fundamental Frequency Peak Delay in Mandarin</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-2|PAPER Tue-O-4-10-2 — Learning Latent Representations for Speech Generation and Transformation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Latent Representations for Speech Generation and Transformation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-5|PAPER Mon-SS-2-8-5 — ResNet and Model Fusion for Automatic Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ResNet and Model Fusion for Automatic Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-5|PAPER Mon-O-1-6-5 — Vowels in the Barunga Variety of North Australian Kriol]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vowels in the Barunga Variety of North Australian Kriol</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-6|PAPER Tue-P-4-2-6 — Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171790.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-6|PAPER Wed-O-8-10-6 — Using Knowledge Graph and Search Query Click Logs in Statistical Language Model for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Knowledge Graph and Search Query Click Logs in Statistical Language Model for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-12|PAPER Tue-P-5-2-12 — The Frequency Range of “The Ling Six Sounds” in Standard Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Frequency Range of “The Ling Six Sounds” in Standard Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-9|PAPER Thu-SS-9-10-9 — An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-2|PAPER Thu-O-10-11-2 — Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-11|PAPER Wed-P-6-1-11 — Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-1|PAPER Mon-P-1-2-1 — Multilingual i-Vector Based Statistical Modeling for Music Genre Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual i-Vector Based Statistical Modeling for Music Genre Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-5|PAPER Mon-P-1-4-5 — Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-7|PAPER Tue-P-3-1-7 — DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-13|PAPER Tue-P-5-2-13 — Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-12|PAPER Wed-P-7-4-12 — Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-S&T-9-A-5|PAPER Thu-S&T-9-A-5 — Visible Vowels: A Tool for the Visualization of Vowel Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visible Vowels: A Tool for the Visualization of Vowel Variation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-5|PAPER Tue-P-4-2-5 — Parallel Neural Network Features for Improved Tandem Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Neural Network Features for Improved Tandem Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-3|PAPER Tue-O-3-1-3 — Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-12|PAPER Wed-P-6-1-12 — Experiments in Character-Level Neural Network Models for Punctuation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Experiments in Character-Level Neural Network Models for Punctuation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-5|PAPER Wed-P-6-4-5 — The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-12|PAPER Mon-P-2-2-12 — Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-6-4|PAPER Wed-O-7-6-4 — Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-4-4|PAPER Tue-O-3-4-4 — Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-3|PAPER Mon-O-2-1-3 — Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-3|PAPER Mon-O-2-1-3 — Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-2|PAPER Wed-O-6-8-2 — Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-5|PAPER Mon-SS-2-8-5 — ResNet and Model Fusion for Automatic Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ResNet and Model Fusion for Automatic Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-4|PAPER Mon-P-1-2-4 — Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-7|PAPER Mon-SS-1-8-7 — Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170271.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-7|PAPER Tue-P-5-3-7 — A Mask Estimation Method Integrating Data Field Model for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Mask Estimation Method Integrating Data Field Model for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170109.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-2|PAPER Thu-O-9-6-2 — Improved Codebook-Based Speech Enhancement Based on MBE Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Codebook-Based Speech Enhancement Based on MBE Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-2|PAPER Wed-O-7-1-2 — Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-3|PAPER Mon-O-1-1-3 — Embedding-Based Speaker Adaptive Training of Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Embedding-Based Speaker Adaptive Training of Deep Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-2|PAPER Wed-O-7-8-2 — Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-5|PAPER Wed-P-7-3-5 — A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-1|PAPER Tue-O-3-8-1 — An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-1|PAPER Thu-P-9-4-1 — Principles for Learning Controllable TTS from Annotated and Latent Variation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Principles for Learning Controllable TTS from Annotated and Latent Variation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-6|PAPER Tue-O-4-4-6 — Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-6|PAPER Mon-P-1-4-6 — Binary Deep Neural Networks for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Deep Neural Networks for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-7|PAPER Mon-SS-1-8-7 — Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-3|PAPER Wed-O-7-2-3 — Recognizing Multi-Talker Speech with Permutation Invariant Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recognizing Multi-Talker Speech with Permutation Invariant Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170485.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-9|PAPER Wed-P-7-3-9 — An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-2|PAPER Tue-P-5-4-2 — Multi-Target Ensemble Learning for Monaural Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Target Ensemble Learning for Monaural Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-14|PAPER Tue-P-5-4-14 — Binaural Reverberant Speech Separation Based on Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binaural Reverberant Speech Separation Based on Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-3|PAPER Wed-P-6-2-3 — Conditional Generative Adversarial Nets Classifier for Spoken Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Nets Classifier for Spoken Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-3|PAPER Wed-O-6-10-3 — RNN-LDA Clustering for Feature Based DNN Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RNN-LDA Clustering for Feature Based DNN Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-3|PAPER Wed-O-6-10-3 — RNN-LDA Clustering for Feature Based DNN Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RNN-LDA Clustering for Feature Based DNN Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-9|PAPER Wed-P-6-1-9 — Distilling Knowledge from an Ensemble of Models for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distilling Knowledge from an Ensemble of Models for Punctuation Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-4|PAPER Wed-P-6-4-4 — Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170515.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-3|PAPER Thu-O-9-6-3 — Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-13|PAPER Thu-P-9-3-13 — Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-5|PAPER Wed-P-6-4-5 — The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171371.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-14|PAPER Mon-P-2-2-14 — Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-3|PAPER Tue-P-5-2-3 — Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nora the Empathetic Psychologist</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement Using Bayesian Wavenet</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-6|PAPER Wed-O-8-6-6 — Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-1-8-7|PAPER Mon-SS-1-8-7 — Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-1-2|PAPER Wed-O-7-1-2 — Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-5|PAPER Tue-O-3-6-5 — Automatic Measurement of Pre-Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Measurement of Pre-Aspiration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-6|PAPER Mon-P-2-2-6 — The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-3|PAPER Wed-P-8-1-3 — Reanalyze Fundamental Frequency Peak Delay in Mandarin]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reanalyze Fundamental Frequency Peak Delay in Mandarin</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-6|PAPER Mon-P-1-4-6 — Binary Deep Neural Networks for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Deep Neural Networks for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-5|PAPER Tue-P-3-1-5 — What Does the Speaker Embedding Encode?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">What Does the Speaker Embedding Encode?</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-3|PAPER Wed-O-7-2-3 — Recognizing Multi-Talker Speech with Permutation Invariant Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recognizing Multi-Talker Speech with Permutation Invariant Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170830.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-1|PAPER Tue-O-4-4-1 — A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Error Management for Improving Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-6|PAPER Tue-O-4-1-6 — Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-15|PAPER Tue-P-5-4-15 — On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-1|PAPER Thu-P-9-1-1 — Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170470.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-8-3|PAPER Thu-O-10-8-3 — Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171675.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-1-6|PAPER Wed-O-6-1-6 — A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-4|PAPER Tue-O-3-8-4 — DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-1|PAPER Wed-SS-8-11-1 — Personalized Quantification of Voice Attractiveness in Multidimensional Merit Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Quantification of Voice Attractiveness in Multidimensional Merit Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-3|PAPER Wed-P-8-4-3 — Phoneme-Discriminative Features for Dysarthric Speech Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Discriminative Features for Dysarthric Speech Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-8|PAPER Wed-P-8-4-8 — Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-2|PAPER Thu-O-10-11-2 — Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Does Posh English Sound Attractive?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-13|PAPER Tue-P-5-3-13 — Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170515.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-3|PAPER Thu-O-9-6-3 — Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-13|PAPER Thu-P-9-3-13 — Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-1|PAPER Tue-P-3-2-1 — Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-8|PAPER Tue-P-5-1-8 — Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170612.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-4|PAPER Wed-P-6-3-4 — Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170564.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-4|PAPER Mon-O-2-1-4 — Fast Neural Network Language Model Lookups at N-Gram Speeds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Neural Network Language Model Lookups at N-Gram Speeds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-4|PAPER Thu-O-9-2-4 — Estimating Speaker Clustering Quality Using Logistic Regression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Speaker Clustering Quality Using Logistic Regression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-3|PAPER Mon-SS-2-8-3 — A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-4|PAPER Wed-P-6-4-4 — Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-4-6|PAPER Tue-O-4-4-6 — Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of Dropout with LSTMs</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170805.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-6|PAPER Thu-P-9-1-6 — Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-10|PAPER Wed-P-6-2-10 — Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170638.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-1|PAPER Wed-P-8-3-1 — Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-7|PAPER Wed-P-8-2-7 — Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-12|PAPER Mon-P-2-2-12 — Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170669.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-4|PAPER Mon-P-2-4-4 — Global Syllable Vectors for Building TTS Front-End with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Global Syllable Vectors for Building TTS Front-End with Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-1-3|PAPER Tue-O-5-1-3 — Improving Speech Recognition by Revising Gated Recurrent Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speech Recognition by Revising Gated Recurrent Units</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-6|PAPER Wed-O-6-10-6 — Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-6-5|PAPER Tue-O-3-6-5 — Automatic Measurement of Pre-Aspiration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Measurement of Pre-Aspiration</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-8-6|PAPER Wed-O-7-8-6 — Learning Similarity Functions for Pronunciation Variations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Similarity Functions for Pronunciation Variations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-5|PAPER Wed-O-8-6-5 — Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-6-5|PAPER Wed-O-8-6-5 — Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-6|PAPER Mon-P-2-3-6 — Deep Least Squares Regression for Speaker Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Least Squares Regression for Speaker Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-2-5|PAPER Mon-P-1-2-5 — Enhanced Feature Extraction for Speech Detection in Media Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhanced Feature Extraction for Speech Detection in Media Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-1-2|PAPER Mon-O-2-1-2 — A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-10-3|PAPER Wed-O-8-10-3 — Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-2|PAPER Tue-O-4-1-2 — Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-6-5|PAPER Tue-O-5-6-5 — On the Duration of Mandarin Tones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Duration of Mandarin Tones</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170805.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-1-6|PAPER Thu-P-9-1-6 — Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-1-3|PAPER Tue-O-3-1-3 — Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-2|PAPER Tue-O-4-10-2 — Learning Latent Representations for Speech Generation and Transformation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Latent Representations for Speech Generation and Transformation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Autoencoders for Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-5|PAPER Thu-O-10-11-5 — Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-2-4|PAPER Mon-O-2-2-4 — Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-1-12|PAPER Wed-P-8-1-12 — Focus Acoustics in Mandarin Nominals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Focus Acoustics in Mandarin Nominals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-6|PAPER Mon-O-2-6-6 — A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170876.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-2-12|PAPER Wed-P-7-2-12 — A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-3-1|PAPER Mon-P-2-3-1 — Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-4-8|PAPER Wed-P-7-4-8 — Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-5|PAPER Tue-P-4-3-5 — End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-5|PAPER Tue-P-5-1-5 — Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-10-1|PAPER Tue-O-4-10-1 — Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170638.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-1|PAPER Wed-P-8-3-1 — Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-13|PAPER Wed-P-7-3-13 — A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-6-8-3|PAPER Wed-O-6-8-3 — Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-3-3|PAPER Wed-P-6-3-3 — Fast and Accurate OOV Decoder on High-Level Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast and Accurate OOV Decoder on High-Level Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170713.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-2|PAPER Tue-O-3-10-2 — Interaction and Transition Model for Speech Emotion Recognition in Dialogue]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interaction and Transition Model for Speech Emotion Recognition in Dialogue</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-3|PAPER Mon-P-2-4-3 — Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-4|PAPER Tue-O-3-8-4 — DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-12|PAPER Wed-P-8-3-12 — Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speech Recognition via Anchor Word Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-2|PAPER Mon-O-1-6-2 — Glottal Opening and Strategies of Production of Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Opening and Strategies of Production of Fricatives</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-2-6-1|PAPER Mon-O-2-6-1 — End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-4|PAPER Wed-P-8-3-4 — Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-10-5|PAPER Tue-O-3-10-5 — Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-2-1|PAPER Thu-O-9-2-1 — Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-S&T-6-A-4|PAPER Wed-S&T-6-A-4 — Voice Conservation and TTS System for People Facing Total Laryngectomy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Conservation and TTS System for People Facing Total Laryngectomy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-4|PAPER Tue-P-3-1-4 — Adversarial Network Bottleneck Features for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Network Bottleneck Features for Noise Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-2-12|PAPER Wed-P-8-2-12 — An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-8|PAPER Wed-P-8-4-8 — Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-1-2|PAPER Tue-O-4-1-2 — Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-1-4|PAPER Tue-P-3-1-4 — Adversarial Network Bottleneck Features for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Network Bottleneck Features for Noise Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-4-12|PAPER Tue-P-5-4-12 — Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-1-4|PAPER Wed-O-8-1-4 — Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-4-6|PAPER Wed-P-6-4-6 — On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-1-9|PAPER Wed-P-6-1-9 — Distilling Knowledge from an Ensemble of Models for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distilling Knowledge from an Ensemble of Models for Punctuation Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-5|PAPER Mon-SS-2-8-5 — ResNet and Model Fusion for Automatic Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ResNet and Model Fusion for Automatic Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-6|PAPER Tue-P-3-2-6 — i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-4-1|PAPER Thu-O-9-4-1 — A Rescoring Approach for Keyword Search Using Lattice Context Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Rescoring Approach for Keyword Search Using Lattice Context Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-O-7-4-5|PAPER Wed-O-7-4-5 — Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-1-4|PAPER Thu-O-9-1-4 — Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170515.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-O-9-6-3|PAPER Thu-O-9-6-3 — Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-SS-2-8-5|PAPER Mon-SS-2-8-5 — ResNet and Model Fusion for Automatic Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ResNet and Model Fusion for Automatic Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170406.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-3-15|PAPER Thu-P-9-3-15 — Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-O-3-8-5|PAPER Tue-O-3-8-5 — Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-4-2-5|PAPER Tue-P-4-2-5 — Parallel Neural Network Features for Improved Tandem Acoustic Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Neural Network Features for Improved Tandem Acoustic Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-P-5-3-9|PAPER Tue-P-5-3-9 — Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-P-8-3-10|PAPER Wed-P-8-3-10 — Spanish Sign Language Recognition with Different Topology Hidden Markov Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spanish Sign Language Recognition with Different Topology Hidden Markov Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpconfinfotable|k
|^<a href="./IS2017/PDF/ABSBOOK.PDF#page1" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in confinfo view}}</a>|^Program Book |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}}
</p></div>
<div class="cpcopyrightpage">{{$:/causal/publication/Copyright Statement}}</div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haizhou Li|AUTHOR Haizhou Li]]
</p><p class="cpabstractcardaffiliationlist">NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1
</span></p></div>
<div class="cpabstractcardabstract"><p>The ISCA Medal for Scientific Achievement 2017 will be awarded to Professor Fumitada Itakura by the President of ISCA during the opening ceremony.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[William Hartmann|AUTHOR William Hartmann]], [[Roger Hsiao|AUTHOR Roger Hsiao]], [[Tim Ng|AUTHOR Tim Ng]], [[Jeff Ma|AUTHOR Jeff Ma]], [[Francis Keith|AUTHOR Francis Keith]], [[Man-Hung Siu|AUTHOR Man-Hung Siu]]
</p><p class="cpabstractcardaffiliationlist">Raytheon BBN Technologies, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 112–116
</span></p></div>
<div class="cpabstractcardabstract"><p>On small datasets, discriminatively trained bottleneck features from deep networks commonly outperform more traditional spectral or cepstral features. While these features are typically trained with small, fully-connected networks, recent studies have used more sophisticated networks with great success. We use the recent deep CNN (VGG) network for bottleneck feature extraction — previously used only for low-resource tasks — and apply it to the Switchboard English conversational telephone speech task. Unlike features derived from traditional MLP networks, the VGG features outperform cepstral features even when used with BLSTM acoustic models trained on large amounts of data. We achieve the best BBN single system performance when combining the VGG features with a BLSTM acoustic model. When decoding with an n-gram language model, which are used for deployable systems, we have a realistic production system with a WER of 7.4%. This result is competitive with the current state-of-the-art in the literature. While our focus is on realistic single system performance, we further reduce the WER to 6.1% through system combination and using expensive neural network language model rescoring.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jeremy H.M. Wong|AUTHOR Jeremy H.M. Wong]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 117–121
</span></p></div>
<div class="cpabstractcardabstract"><p>Student-teacher training allows a large teacher model or ensemble of teachers to be compressed into a single student model, for the purpose of efficient decoding. However, current approaches in automatic speech recognition assume that the state clusters, often defined by Phonetic Decision Trees (PDT), are the same across all models. This limits the diversity that can be captured within the ensemble, and also the flexibility when selecting the complexity of the student model output. This paper examines an extension to student-teacher training that allows for the possibility of having different PDTs between teachers, and also for the student to have a different PDT from the teacher. The proposal is to train the student to emulate the logical context dependent state posteriors of the teacher, instead of the frame posteriors. This leads to a method of mapping frame posteriors from one PDT to another. This approach is evaluated on three speech recognition tasks: the Tok Pisin and Javanese low resource conversational telephone speech tasks from the IARPA Babel programme, and the HUB4 English broadcast news task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Vaibhava Goel|AUTHOR Vaibhava Goel]], [[George Saon|AUTHOR George Saon]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 122–126
</span></p></div>
<div class="cpabstractcardabstract"><p>An embedding-based speaker adaptive training (SAT) approach is proposed and investigated in this paper for deep neural network acoustic modeling. In this approach, speaker embedding vectors, which are a constant given a particular speaker, are mapped through a control network to layer-dependent element-wise affine transformations to canonicalize the internal feature representations at the output of hidden layers of a main network. The control network for generating the speaker-dependent mappings are jointly estimated with the main network for the overall speaker adaptive acoustic modeling. Experiments on large vocabulary continuous speech recognition (LVCSR) tasks show that the proposed SAT scheme can yield superior performance over the widely-used speaker-aware training using i-vectors with speaker-adapted input features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jeff Ma|AUTHOR Jeff Ma]], [[Francis Keith|AUTHOR Francis Keith]], [[Tim Ng|AUTHOR Tim Ng]], [[Man-Hung Siu|AUTHOR Man-Hung Siu]], [[Owen Kimball|AUTHOR Owen Kimball]]
</p><p class="cpabstractcardaffiliationlist">Raytheon BBN Technologies, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 127–131
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper reports our recent progress on using multilingual data for improving speech-to-text (STT) systems that can be easily delivered. We continued the work BBN conducted on the use of multilingual data for improving Babel evaluation systems, but focused on training time-delay neural network (TDNN) based chain models. As done for the Babel evaluations, we used multilingual data in two ways: first, to train multilingual deep neural networks (DNN) for extracting bottle-neck (BN) features, and second, for initializing training on target languages.
Our results show that TDNN chain models trained on multilingual DNN bottleneck features yield significant gains over their counterparts trained on MFCC plus i-vector features. By initializing from models trained on multilingual data, TDNN chain models can achieve great improvements over random initializations of the network weights on target languages. Two other important findings are: 1) initialization with multilingual TDNN chain models produces larger gains on target languages that have less training data; 2) inclusion of target languages in multilingual training for either BN feature extraction or initialization have limited impact on performance measured on the target languages. Our results also reveal that for TDNN chain models, the combination of multilingual BN features and multilingual initialization achieves the best performance on all target languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[George Saon|AUTHOR George Saon]]^^1^^, [[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^2^^, [[Tom Sercu|AUTHOR Tom Sercu]]^^1^^, [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]]^^1^^, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^1^^, [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]]^^1^^, [[Xiaodong Cui|AUTHOR Xiaodong Cui]]^^1^^, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^1^^, [[Michael Picheny|AUTHOR Michael Picheny]]^^1^^, [[Lynn-Li Lim|AUTHOR Lynn-Li Lim]]^^3^^, [[Bergul Roomi|AUTHOR Bergul Roomi]]^^3^^, [[Phil Hall|AUTHOR Phil Hall]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, USA; ^^2^^IBM, Japan; ^^3^^Appen, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 132–136
</span></p></div>
<div class="cpabstractcardabstract"><p>Word error rates on the Switchboard conversational corpus that just a few years ago were 14% have dropped to 8.0%, then 6.6% and most recently 5.8%, and are now believed to be within striking range of human performance. This then raises two issues: what is human performance, and how far down can we still drive speech recognition error rates? In trying to assess human performance, we performed an independent set of measurements on the Switchboard and CallHome subsets of the Hub5 2000 evaluation and found that human accuracy may be considerably better than what was earlier reported, giving the community a significantly harder goal to achieve. We also report on our own efforts in this area, presenting a set of acoustic and language modeling techniques that lowered the WER of our system to 5.5%/10.3% on these subsets, which is a new performance milestone (albeit not at what we measure to be human performance). On the acoustic side, we use a score fusion of one LSTM with multiple feature inputs, a second LSTM trained with speaker-adversarial multi-task learning and a third convolutional residual net (ResNet). On the language modeling side, we use word and character LSTMs and convolutional WaveNet-style language models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Jasha Droppo|AUTHOR Jasha Droppo]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 137–141
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent work in automatic recognition of conversational telephone speech (CTS) has achieved accuracy levels comparable to human transcribers, although there is some debate how to precisely quantify human performance on this task, using the NIST 2000 CTS evaluation set. This raises the question what systematic differences, if any, may be found differentiating human from machine transcription errors. In this paper we approach this question by comparing the output of our most accurate CTS recognition system to that of a standard speech transcription vendor pipeline. We find that the most frequent substitution, deletion and insertion error types of both outputs show a high degree of overlap. The only notable exception is that the automatic recognizer tends to confuse filled pauses (“uh”) and backchannel acknowledgments (“uhhuh”). Human tend not to make this error, presumably due to the distinctive and opposing pragmatic functions attached to these words. Furthermore, we quantify the correlation between human and machine errors at the speaker level, and investigate the effect of speaker overlap between training and test data. Finally, we report on an informal “Turing test” asking humans to discriminate between automatic and human transcription error cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[João Paulo Cabral|AUTHOR João Paulo Cabral]]^^1^^, [[Benjamin R. Cowan|AUTHOR Benjamin R. Cowan]]^^2^^, [[Katja Zibrek|AUTHOR Katja Zibrek]]^^1^^, [[Rachel McDonnell|AUTHOR Rachel McDonnell]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Trinity College Dublin, Ireland; ^^2^^University College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 229–233
</span></p></div>
<div class="cpabstractcardabstract"><p>Graphical realism and the naturalness of the voice used are important aspects to consider when designing a virtual agent or character. In this work, we evaluate how synthetic speech impacts people’s perceptions of a rendered virtual character. Using a controlled experiment, we focus on the role that speech, in particular voice expressiveness in the form of personality, has on the assessment of voice level and character level perceptions. We found that people rated a real human voice as more expressive, understandable and likeable than the expressive synthetic voice we developed. Contrary to our expectations, we found that the voices did not have a significant impact on the character level judgments; people in the voice conditions did not significantly vary on their ratings of appeal, credibility, human-likeness and voice matching the character. The implications this has for character design and how this compares with previous work are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amelia J. Gully|AUTHOR Amelia J. Gully]]^^1^^, [[Takenori Yoshimura|AUTHOR Takenori Yoshimura]]^^2^^, [[Damian T. Murphy|AUTHOR Damian T. Murphy]]^^1^^, [[Kei Hashimoto|AUTHOR Kei Hashimoto]]^^2^^, [[Yoshihiko Nankaku|AUTHOR Yoshihiko Nankaku]]^^2^^, [[Keiichi Tokuda|AUTHOR Keiichi Tokuda]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of York, UK; ^^2^^Nagoya Institute of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 234–238
<a href="./IS2017/MEDIA/0900" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Following recent advances in direct modeling of the speech waveform using a deep neural network, we propose a novel method that directly estimates a physical model of the vocal tract from the speech waveform, rather than magnetic resonance imaging data. This provides a clear relationship between the model and the size and shape of the vocal tract, offering considerable flexibility in terms of speech characteristics such as age and gender. Initial tests indicate that despite a highly simplified physical model, intelligible synthesized speech is obtained. This illustrates the potential of the combined technique for the control of physical models in general, and hence the generation of more natural-sounding synthetic speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sébastien Le Maguer|AUTHOR Sébastien Le Maguer]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]], [[Alexander Hewer|AUTHOR Alexander Hewer]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 239–243
<a href="./IS2017/MEDIA/0936" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>We present an end-to-end text-to-speech (TTS) synthesis system that generates audio and synchronized tongue motion directly from text. This is achieved by adapting a statistical shape space model of the tongue surface to an articulatory speech corpus and training a speech synthesis system directly on the tongue model parameter weights. We focus our analysis on the application of two standard methodologies, based on Hidden Markov Models (HMMs) and Deep Neural Networks (DNNs), respectively, to train both acoustic models and the tongue model parameter weights. We evaluate both methodologies at every step by comparing the predicted articulatory movements against the reference data. The results show that even with less than 2h of data, DNNs already outperform HMMs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rachel Alexander|AUTHOR Rachel Alexander]], [[Tanner Sorensen|AUTHOR Tanner Sorensen]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 244–248
<a href="./IS2017/MEDIA/1410" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents an initial architecture for articulatory synthesis which combines a dynamical system for the control of vocal tract shaping with a novel MATLAB implementation of an articulatory synthesizer. The dynamical system controls a speaker-specific vocal tract model derived by factor analysis of mid-sagittal real-time MRI data and provides input to the articulatory synthesizer, which simulates the propagation of sound waves in the vocal tract. First, parameters of the dynamical system are estimated from real-time MRI data of human speech production. Second, vocal-tract dynamics is simulated for vowel-consonant-vowel utterances using a sequence of two dynamical systems: the first one starts from a vowel vocal-tract configuration and achieves a vocal-tract closure; the second one starts from the closure and achieves the target configuration of the second vowel. Third, vocal-tract dynamics is converted to area function dynamics and is input to the synthesizer to generate the acoustic signal. Synthesized vowel-consonant-vowel examples demonstrate the feasibility of the method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joseph Mendelson|AUTHOR Joseph Mendelson]]^^1^^, [[Matthew P. Aylett|AUTHOR Matthew P. Aylett]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^CereProc, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 249–253
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditionally, subjective text-to-speech (TTS) evaluation is performed through audio-only listening tests, where participants evaluate unrelated, context-free utterances. The ecological validity of these tests is questionable, as they do not represent real-world end-use scenarios. In this paper, we examine a novel approach to TTS evaluation in an imagined end-use, via a complex interaction with an avatar. 6 different voice conditions were tested: Natural speech, Unit Selection and Parametric Synthesis, in neutral and expressive realizations. Results were compared to a traditional audio-only evaluation baseline. Participants in both studies rated the voices for naturalness and expressivity. The baseline study showed canonical results for naturalness: Natural speech scored highest, followed by Unit Selection, then Parametric synthesis. Expressivity was clearly distinguishable in all conditions. In the avatar interaction study, participants rated naturalness in the same order as the baseline, though with smaller effect size; expressivity was not distinguishable. Further, no significant correlations were found between cognitive or affective responses and any voice conditions. This highlights 2 primary challenges in designing more valid TTS evaluations: in real-world use-cases involving interaction, listeners generally interact with a single voice, making comparative analysis unfeasible, and in complex interactions, the context and content may confound perception of voice quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Beiming Cao|AUTHOR Beiming Cao]]^^1^^, [[Myungjong Kim|AUTHOR Myungjong Kim]]^^1^^, [[Jan van Santen|AUTHOR Jan van Santen]]^^2^^, [[Ted Mau|AUTHOR Ted Mau]]^^3^^, [[Jun Wang|AUTHOR Jun Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^Oregon Health & Science University, USA; ^^3^^UT Southwestern, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 254–258
</span></p></div>
<div class="cpabstractcardabstract"><p>Articulatory information has been shown to be effective in improving the performance of hidden Markov model (HMM)-based text-to-speech (TTS) synthesis. Recently, deep learning-based TTS has outperformed HMM-based approaches. However, articulatory information has rarely been integrated in deep learning-based TTS. This paper investigated the effectiveness of integrating articulatory movement data to deep learning-based TTS. The integration of articulatory information was achieved in two ways: (1) direct integration, where articulatory and acoustic features were the output of a deep neural network (DNN), and (2) direct integration plus forward-mapping, where the output articulatory features were mapped to acoustic features by an additional DNN; These forward-mapped acoustic features were then combined with the output acoustic features to produce the final acoustic features. Articulatory (tongue and lip) and acoustic data collected from male and female speakers were used in the experiment. Both objective measures and subjective judgment by human listeners showed the approaches integrated articulatory information outperformed the baseline approach (without using articulatory information) in terms of naturalness and speaker voice identity (voice similarity).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Volha Petukhova|AUTHOR Volha Petukhova]]^^1^^, [[Manoj Raju|AUTHOR Manoj Raju]]^^1^^, [[Harry Bunt|AUTHOR Harry Bunt]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität des Saarlandes, Germany; ^^2^^Tilburg University, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 142–146
</span></p></div>
<div class="cpabstractcardabstract"><p>The study presented in this paper is carried out to support debate performance assessment in the context of debate skills training. The perception of good performance as a debater is influenced by how believable and convincing the debater’s argumentation is. We identified a number of features that are useful for explaining perceived properties of persuasive speech and for defining rules and strategies to produce and assess debate performance. We collected and analysed multimodal and multisensory data of the trainees debate behaviour, and contrasted it with those of skilled professional debaters. Observational, correlation and machine learning studies were performed to identify multimodal markers of persuasive speech and link them to experts’ assessments. A combination of multimodal in- and out-of-domain debate data, and various non-verbal, prosodic, lexical, linguistic and structural features has been computed based on our analysis and assessed used to , and several classification procedures has been applied achieving an accuracy of 0.79 on spoken debate data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Bone|AUTHOR Daniel Bone]]^^1^^, [[Julia Mertens|AUTHOR Julia Mertens]]^^2^^, [[Emily Zane|AUTHOR Emily Zane]]^^2^^, [[Sungbok Lee|AUTHOR Sungbok Lee]]^^1^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^, [[Ruth Grossman|AUTHOR Ruth Grossman]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Emerson College, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 147–151
</span></p></div>
<div class="cpabstractcardabstract"><p>Social anxiety is a prevalent condition affecting individuals to varying degrees. Research on autism spectrum disorder (ASD), a group of neurodevelopmental disorders marked by impairments in social communication, has found that social anxiety occurs more frequently in this population. Our study aims to further understand the multimodal manifestation of social stress for adolescents with ASD versus neurotypically developing (TD) peers. We investigate this through objective measures of speech behavior and physiology (mean heart rate) acquired during three tasks: a low-stress conversation, a medium-stress interview, and a high-stress presentation. Measurable differences are found to exist for speech behavior and heart rate in relation to task-induced stress. Additionally, we find the acoustic measures are particularly effective for distinguishing between diagnostic groups. Individuals with ASD produced higher prosodic variability, agreeing with previous reports. Moreover, the most informative features captured an individual’s vocal changes between low and high social-stress, suggesting an interaction between vocal production and social stressors in ASD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alec Burmania|AUTHOR Alec Burmania]], [[Carlos Busso|AUTHOR Carlos Busso]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 152–156
</span></p></div>
<div class="cpabstractcardabstract"><p>Affect recognition is a difficult problem that most often relies on human annotated data to train automated systems. As humans perceive emotion differently based on personality, cognitive state and past experiences, it is important to collect rankings from multiple individuals to assess the emotional content in corpora, which are later aggregated with rules such as majority vote. With the increased use of crowdsourcing services for perceptual evaluations, collecting large amount of data is now feasible. It becomes important to question the amount of data needed to create well-trained classifiers. How different are the aggregated labels collected from five raters compared to the ones obtained from twenty evaluators? Is it worthwhile to spend resources to increase the number of evaluators beyond those used in conventional/laboratory studies? This study evaluates the consensus labels obtained by incrementally adding new evaluators during perceptual evaluations. Using majority vote over categorical emotional labels, we compare the changes in the aggregated labels starting with one rater, and finishing with 20 raters. The large number of evaluators in a subset of the MSP-IMPROV database and the ability to filter annotators by quality allows us to better understand label aggregation as a function of the number of annotators.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gaurav Fotedar|AUTHOR Gaurav Fotedar]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 157–161
</span></p></div>
<div class="cpabstractcardabstract"><p>We analyze the temporal co-ordination between head gestures and prosodic patterns in spontaneous speech in a data-driven manner. For this study, we consider head motion and speech data from 24 subjects while they tell a fixed set of five stories. The head motion, captured using a motion capture system, is converted to Euler angles and translations in X, Y and Z-directions to represent head gestures. Pitch and short-time energy in voiced segments are used to represent the prosodic patterns. To capture the statistical relationship between head gestures and prosodic patterns, mutual information (MI) is computed at various delays between the two using data from 24 subjects in six native languages. The estimated MI, averaged across all subjects, is found to be maximum when the head gestures lag the prosodic patterns by 30msec. This is found to be true when subjects tell stories in English as well as in their native language. We observe a similar pattern in the root mean squared error of predicting head gestures from prosodic patterns using Gaussian mixture model. These results indicate that there could be an asynchrony between head gestures and prosody during spontaneous speech where head gestures follow the corresponding prosodic patterns.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[D.-Y. Huang|AUTHOR D.-Y. Huang]]^^1^^, [[Wan Ding|AUTHOR Wan Ding]]^^2^^, [[Mingyu Xu|AUTHOR Mingyu Xu]]^^1^^, [[Huaiping Ming|AUTHOR Huaiping Ming]]^^1^^, [[Minghui Dong|AUTHOR Minghui Dong]]^^1^^, [[Xinguo Yu|AUTHOR Xinguo Yu]]^^2^^, [[Haizhou Li|AUTHOR Haizhou Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^A*STAR, Singapore; ^^2^^Central China Normal University, China; ^^3^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 162–165
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a multimodal approach to predict affective dimensions, that makes full use of features from audio, video, Electrodermal Activity (EDA) and Electrocardiogram (ECG) using three regression techniques such as support vector regression (SVR), partial least squares regression (PLS), and a deep bidirectional long short-term memory recurrent neural network (DBLSTM-RNN) regression. Each of the three regression techniques performs multimodal affective dimension prediction followed by a fusion of different models on features of four modalities using a support vector regression. A support vector regression is also applied for a final fusion of the three regression systems. Experiments show that our proposed approach obtains promising results on the AVEC 2015 benchmark dataset for prediction of multimodal affective dimensions. For the development set, the concordance correlation coefficient (CCC) achieves results of 0.856 for arousal and 0.720 for valence, which increases 3.88% and 4.66% of the top-performer of AVEC 2015 in arousal and valence, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marion Dohen|AUTHOR Marion Dohen]]^^1^^, [[Benjamin Roustan|AUTHOR Benjamin Roustan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^GIPSA, France; ^^2^^UroMems, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 166–170
</span></p></div>
<div class="cpabstractcardabstract"><p>Designation consists in attracting an interlocutor’s attention on a specific object and/or location. It is most often achieved using both speech (e.g., demonstratives) and gestures (e.g., manual pointing). This study aims at analyzing how speech and pointing gestures are co-produced in a semi-directed interactive task involving designation. 20 native speakers of French were involved in a cooperative task in which they provided instructions to a partner for her to reproduce a model she could not see on a grid both of them saw. They had to use only sentences of the form ‘The [target word] goes there.’. They did this in two conditions: silence and noise. Their speech and articulatory/hand movements (motion capture) were recorded. The analyses show that the participants’ speech features were modified in noise (Lombard effect). They also spoke slower and made more pauses and errors. Their pointing gestures lasted longer and started later showing an adaptation of gesture production to speech. The condition did not influence speech/gesture coordination. The apex (part of the gesture that shows) mainly occurred at the same time as the target word and not as the demonstrative showing that speakers group speech and gesture carrying complementary rather than redundant information.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peter Guzewich|AUTHOR Peter Guzewich]], [[Stephen A. Zahorian|AUTHOR Stephen A. Zahorian]]
</p><p class="cpabstractcardaffiliationlist">Binghamton University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 171–175
</span></p></div>
<div class="cpabstractcardabstract"><p>We present an improved method for training Deep Neural Networks for dereverberation and show that it can improve performance for the speech processing tasks of speaker verification and speech enhancement. We replicate recently proposed methods for dereverberation using Deep Neural Networks and present our improved method, highlighting important aspects that influence performance. We then experimentally evaluate the capabilities and limitations of the method with respect to speech quality and speaker verification to show that ours achieves better performance than other proposed methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Philipp Bulling|AUTHOR Philipp Bulling]]^^1^^, [[Klaus Linhard|AUTHOR Klaus Linhard]]^^1^^, [[Arthur Wolf|AUTHOR Arthur Wolf]]^^1^^, [[Gerhard Schmidt|AUTHOR Gerhard Schmidt]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Daimler, Germany; ^^2^^Christian-Albrechts-Universität zu Kiel, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 176–180
</span></p></div>
<div class="cpabstractcardabstract"><p>A new approach for acoustic feedback cancellation is presented. The challenge in acoustic feedback cancellation is a strong correlation between the local speech and the loudspeaker signal. Due to this correlation, the convergence rate of adaptive algorithms is limited. Therefore, a novel stepsize control of the adaptive filter is presented. The stepsize control exploits reverberant signal periods to update the adaptive filter. As soon as local speech stops, the reverberation energy of the system decays exponentially. This means that during reverberation there is only excitation of the filter but no local speech. Thus, signals are not correlated and the filter can converge without correlation problems. Consequently, the stepsize control accelerates the adaption process during reverberation and slows it down at the beginning of speech activity. It is shown, that with a particular gain control, the reverb-based stepsize control can be interpreted as the theoretical optimum stepsize. However, for this purpose a precise estimation of the system distance is required. One estimation method is presented. The proposed estimator has a rescue mechanism to detect enclosure dislocations. Both, simulations and real world testing show that the acoustic feedback canceler is capable of improving stability and convergence rate, even at high system gains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Franzen|AUTHOR Jan Franzen]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Braunschweig, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 181–185
</span></p></div>
<div class="cpabstractcardabstract"><p>In-car communication (ICC) systems supporting speech communication in noise by reproducing amplified speech from the car cabin in the car cabin ask for low-delay acoustic echo cancellation (AEC). In this paper we propose a delay-flexible DFT-based stereo AEC capable of cancelling also the echoes stemming from the audio player or FM radio. For the price of a somewhat higher complexity we are able to reduce the 32 ms delay of the baseline down to 4 ms, loosing only 1 dB in ERLE while even preserving system distance properties.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dongmei Wang|AUTHOR Dongmei Wang]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 186–190
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a speech enhancement algorithm is proposed to improve the speech intelligibility for cochlear implant recipients. Our method is based on combination of harmonic estimation and traditional statistical method. Traditional statistical based speech enhancement method is effective only for stationary noise suppression, but not non-stationary noise. To address more complex noise scenarios, we explore the harmonic structure of target speech to obtain a more accurate noise estimation. The estimated noise is then employed in the MMSE framework to obtain the gain function for recovering the target speech. Listening test experiments show a substantial speech intelligibility improvement for cochlear implant recipients in noisy environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Ayllón|AUTHOR David Ayllón]]^^1^^, [[Roberto Gil-Pita|AUTHOR Roberto Gil-Pita]]^^2^^, [[Manuel Rosa-Zurera|AUTHOR Manuel Rosa-Zurera]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Fonetic, Spain; ^^2^^Universidad de Alcalá, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 191–195
</span></p></div>
<div class="cpabstractcardabstract"><p>An efficient algorithm for speech enhancement in binaural hearing aids is proposed. The algorithm is based on the estimation of a time-frequency mask using supervised machine learning. The standard least-squares linear classifier is reformulated to optimize a metric related to speech/noise separation. The method is energy-efficient in two ways: the computational complexity is limited and the wireless data transmission optimized. The ability of the algorithm to enhance speech contaminated with different types of noise and low SNR has been evaluated. Objective measures of speech intelligibility and speech quality demonstrate that the algorithm increments both the hearing comfort and speech understanding of the user. These results are supported by subjective listening tests.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tsung-Chen Wu|AUTHOR Tsung-Chen Wu]]^^1^^, [[Tai-Shih Chi|AUTHOR Tai-Shih Chi]]^^1^^, [[Chia-Fone Lee|AUTHOR Chia-Fone Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Chiao Tung University, Taiwan; ^^2^^Hualien Tzu Chi Hospital, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 196–200
</span></p></div>
<div class="cpabstractcardabstract"><p>Vocoder simulations are generally adopted to simulate the electrical hearing induced by the cochlear implant (CI). Our research group is developing a new four-electrode CI microsystem which induces high-frequency electrical hearing while preserving low-frequency acoustic hearing. To simulate the functionality of this CI, a previously developed hearing-impaired (HI) hearing model is combined with a 4-channel vocoder in this paper to respectively mimic the perceived acoustic hearing and electrical hearing. Psychoacoustic experiments are conducted on Mandarin speech recognition for determining parameters of electrodes for this CI. Simulation results show that initial consonants of Mandarin are more difficult to recognize than final vowels of Mandarin via acoustic hearing of HI patients. After electrical hearing being induced through logarithmic-frequency distributed electrodes, speech intelligibility of HI patients is boosted for all Mandarin phonemes, especially for initial consonants. Similar results are consistently observed in clean and noisy test conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zainab Hermes|AUTHOR Zainab Hermes]], [[Marissa Barlaz|AUTHOR Marissa Barlaz]], [[Ryan Shosted|AUTHOR Ryan Shosted]], [[Zhi-Pei Liang|AUTHOR Zhi-Pei Liang]], [[Brad Sutton|AUTHOR Brad Sutton]]
</p><p class="cpabstractcardaffiliationlist">University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 201–205
</span></p></div>
<div class="cpabstractcardabstract"><p>The phonemic inventory of Arabic includes sounds that involve a pharyngeal constriction. Sounds referred to as ‘pharyngeal’ (/ʕ/ and /ħ/) are reported to have a primary constriction in the pharynx, while sounds referred to as ‘pharyngealized’ (/s^^ʕ^^/, /t^^ʕ^^/, /d^^ʕ^^/, and /ð^^ʕ^^/ or /z^^ʕ^^/) are reported to have a secondary constriction in the pharynx. Some studies propose grouping both types of sounds together, citing phonetic and phonological evidence. Phonetically, pharyngeal consonants are argued to have a primary constriction below the pharynx, and are thus posited to be pharyngealized laryngeals. Under this view, the pharyngeal constriction is secondary, not primary. Phonologically, it has been established that pharyngealized sounds trigger pharyngealization spread, and proposals for grouping pharyngeal and pharyngealized consonants together cite similar, but not identical, spread patterns triggered by pharyngeals. In this study, Real-time Magnetic Resonance Imaging is employed to investigate the phonetic correlates of the pharyngeal constriction in both pharyngeal and pharyngealized sounds in Saudi, Lebanese, and Jordanian Arabic as exemplified by one speaker from each dialect. Our findings demonstrate a difference in the location of constriction among both types of sounds. These distinctions in place possibly account for the differences in the spread patterns triggered by each type of sound.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin Elie|AUTHOR Benjamin Elie]], [[Yves Laprie|AUTHOR Yves Laprie]]
</p><p class="cpabstractcardaffiliationlist">INRIA, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 206–209
</span></p></div>
<div class="cpabstractcardabstract"><p>This work investigates the influence of the gradual opening of the glottis along its length during the production of fricatives in intervocalic contexts. Acoustic simulations reveal the existence of a transient zone in the articulatory space where the frication noise level is very sensitive to small perturbations of the glottal opening. This corresponds to the configurations where both frication noise and voiced contributions are present in the speech signal. To avoid this unstability, speakers may adopt different strategies to ensure the voiced/voiceless contrast of fricatives. This is evidenced by experimental data of simultaneous glottal opening measurements, performed with ePGG, and audio recordings of vowel-fricative-vowel pseudowords. Voiceless fricatives are usually longer, in order to maximize the number of voiceless time frames over voiced frames due to the crossing of the transient regime. For voiced fricatives, the speaker may avoid the unstable regime by keeping low frication noise level, and thus by favoring the voicing characteristic, or by doing very short crossings into the unstable regime. It is also shown that when speakers are asked to sustain voiced fricatives longer than in natural speech, they adopt the strategy of keeping low frication noise level to avoid the unstable regime.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohamed Yassine Frej|AUTHOR Mohamed Yassine Frej]], [[Christopher Carignan|AUTHOR Christopher Carignan]], [[Catherine T. Best|AUTHOR Catherine T. Best]]
</p><p class="cpabstractcardaffiliationlist">Western Sydney University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 210–214
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents results of a simultaneous acoustic and articulatory investigation of word-medial and word-final geminate/singleton coronal stop contrasts in Moroccan Arabic (MA). The acoustic analysis revealed that, only for the word-medial contrast, the two MA speakers adopted comparable strategies in contrasting geminates with singletons, mainly by significantly lengthening closure duration in geminates, relative to singletons. In word-final position, two speaker-specific contrasting patterns emerged. While one speaker also lengthened the closure duration for final geminates, the other speaker instead lengthened only the release duration for final geminates, relative to singletons. Consonant closure and preceding vowel were significantly longer for the geminate only in medial position, not in final position. These temporal differences were even more clearly delineated in the articulatory signal, captured via ultrasound, to which we applied the novel approach of using TRACTUS [Temporally Resolved Articulatory Configuration Tracking of UltraSound: 15] to index temporal properties of closure gestures for these geminate/singleton contrasts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Giuseppina Turco|AUTHOR Giuseppina Turco]], [[Karim Shoul|AUTHOR Karim Shoul]], [[Rachid Ridouane|AUTHOR Rachid Ridouane]]
</p><p class="cpabstractcardaffiliationlist">LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 215–218
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate the durational properties of Moroccan Arabic identical consonant sequences contrasting singleton (S) and geminate (G) dental fricatives, in six combinations of four-level length contrasts across word boundaries (#) (one timing slot for #S, two for #G and S#S, three for S#G and G#S, and four for G#G). The aim is to determine the nature of the mapping between discrete phonological timing units and phonetic durations. Acoustic results show that the largest and most systematic jump in duration is displayed between the singleton fricative on the one hand and the other sequences on the other hand. Looking at these sequences, S#S is shown to have the same duration as #G. When a geminate is within the sequence, a temporal reorganization is observed: G#S is not significantly longer than S#S and #G; and G#G is only slightly longer than S#G. Instead of a four-way hierarchy, our data point towards a possible upper limit of three-way length contrasts for consonants: S < G=S#S=G#S < S#G=G#G. The interplay of a number of factors resulting in this mismatch between phonological length and phonetic duration are discussed, and a working hypothesis is provided for why duration contrasts are rarely ternary, and almost never quaternary.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Caroline Jones|AUTHOR Caroline Jones]]^^1^^, [[Katherine Demuth|AUTHOR Katherine Demuth]]^^2^^, [[Weicong Li|AUTHOR Weicong Li]]^^1^^, [[Andre Almeida|AUTHOR Andre Almeida]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Western Sydney University, Australia; ^^2^^Macquarie University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 219–223
</span></p></div>
<div class="cpabstractcardabstract"><p>North Australian Kriol is an English based creole spoken widely by Indigenous people in northern Australia in areas where the traditional languages are endangered or no longer spoken. This paper offers the first acoustic description of the vowel phonology of Roper Kriol, within a variety spoken at Barunga Community, east of the town of Katherine in the Northern Territory.
Drawing on a new corpus for Barunga Kriol, the paper presents analyses of the short and long monophthongs, as well as the diphthongs in the spontaneous speech of young adults. The results show the durations and spectral characteristics of the vowels, including major patterns of allophony (i.e. coarticulation and context effects). This updates the phonology over the previous description from the 1970s, showing that there is an additional front low vowel phoneme in the speech of young people today, as well as a vowel length contrast. Interestingly there are points of similarity with the vowel acoustics for traditional Aboriginal languages of the region, for example in a relatively compact vowel space and in the modest trajectories of diphthongs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Indranil Dutta|AUTHOR Indranil Dutta]]^^1^^, [[Irfan S.|AUTHOR Irfan S.]]^^2^^, [[Pamir Gogoi|AUTHOR Pamir Gogoi]]^^3^^, [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^EFLU, India; ^^2^^University of Illinois at Urbana-Champaign, USA; ^^3^^University of Florida, USA; ^^4^^IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 224–228
</span></p></div>
<div class="cpabstractcardabstract"><p>Tonal coarticulation is universally found to be greater in extent in the carryover direction compared to the anticipatory direction ([1], [2], [3], [4], [5]) leading to assimilatory processes. In general, carryover coarticulation has been understood to be due to intertio-mechanical forces, and, anticipatory effects are seen to be a consequence of parallel activation of articulatory plans ([6]). In this paper, we report on results from a set of Artificial Neural Networks (ANN) trained to predict adjacent tones in disyllabic sequences. Our results confirm the universal pattern of greater carryover effects in Mizo leading to tonal assimilation. In addition, we report on results from single-layered ANN models and Support Vector Machines (SVM) that predict the identity of V₂ from V₁ (anticipatory) consistently better than V₁ from V₂ (carryover) in Assamese non-harmonic #…V₁CV₂…# sequences. The directionality in the performance of the V₁ and V₂ models, help us conclude that the directionality effect of coarticulation in Assamese non-harmonic sequences is greater in the anticipatory direction, which is the same direction as in the harmonic sequences. We argue that coarticulatory propensity exhibits a great deal of sensitivity to the nature of contrast in a language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Min Ma|AUTHOR Min Ma]]^^1^^, [[Michael Nirschl|AUTHOR Michael Nirschl]]^^2^^, [[Fadi Biadsy|AUTHOR Fadi Biadsy]]^^2^^, [[Shankar Kumar|AUTHOR Shankar Kumar]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUNY Graduate Center, USA; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 259–263
</span></p></div>
<div class="cpabstractcardabstract"><p>Language Models (LMs) for Automatic Speech Recognition (ASR) are typically trained on large text corpora from news articles, books and web documents. These types of corpora, however, are unlikely to match the test distribution of ASR systems, which expect spoken utterances. Therefore, the LM is typically adapted to a smaller held-out in-domain dataset that is drawn from the test distribution. We propose three LM adaptation approaches for Deep NN and Long Short-Term Memory (LSTM): (1) Adapting the softmax layer in the Neural Network (NN); (2) Adding a non-linear adaptation layer before the softmax layer that is trained only in the adaptation phase; (3) Training the extra non-linear adaptation layer in pre-training and adaptation phases. Aiming to improve upon a hierarchical Maximum Entropy (MaxEnt) second-pass LM baseline, which factors the model into word-cluster and word models, we build an NN LM that predicts only word clusters. Adapting the LSTM LM by training the adaptation layer in both training and adaptation phases (Approach 3), we reduce the cluster perplexity by 30% on a held-out dataset compared to an unadapted LSTM LM. Initial experiments using a state-of-the-art ASR system show a 2.3% relative reduction in WER on top of an adapted MaxEnt LM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youssef Oualil|AUTHOR Youssef Oualil]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 264–268
</span></p></div>
<div class="cpabstractcardabstract"><p>Training large vocabulary Neural Network Language Models (NNLMs) is a difficult task due to the explicit requirement of the output layer normalization, which typically involves the evaluation of the full softmax function over the complete vocabulary. This paper proposes a Batch Noise Contrastive Estimation (B-NCE) approach to alleviate this problem. This is achieved by reducing the vocabulary, at each time step, to the target words in the batch and then replacing the softmax by the noise contrastive estimation approach, where these words play the role of targets and noise samples at the same time. In doing so, the proposed approach can be fully formulated and implemented using optimal dense matrix operations. Applying B-NCE to train different NNLMs on the Large Text Compression Benchmark (LTCB) and the One Billion Word Benchmark (OBWB) shows a significant reduction of the training time with no noticeable degradation of the models performance. This paper also presents a new baseline comparative study of different standard NNLMs on the large OBWB on a single Titan-X GPU.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[X. Chen|AUTHOR X. Chen]]^^1^^, [[A. Ragni|AUTHOR A. Ragni]]^^1^^, [[X. Liu|AUTHOR X. Liu]]^^2^^, [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Cambridge, UK; ^^2^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 269–273
</span></p></div>
<div class="cpabstractcardabstract"><p>Recurrent neural network language models (RNNLMs) are powerful language modeling techniques. Significant performance improvements have been reported in a range of tasks including speech recognition compared to n-gram language models. Conventional n-gram and neural network language models are trained to predict the probability of the next word given its preceding context history. In contrast, bidirectional recurrent neural network based language models consider the context from future words as well. This complicates the inference process, but has theoretical benefits for tasks such as speech recognition as additional context information can be used. However to date, very limited or no gains in speech recognition performance have been reported with this form of model. This paper examines the issues of training bidirectional recurrent neural network language models (bi-RNNLMs) for speech recognition. A bi-RNNLM probability smoothing technique is proposed, that addresses the very sharp posteriors that are often observed in these models. The performance of the bi-RNNLMs is evaluated on three speech recognition tasks: broadcast news; meeting transcription (AMI); and low-resource systems (Babel data). On all tasks gains are observed by applying the smoothing technique to the bi-RNNLM. In addition consistent performance gains can be obtained by combining bi-RNNLMs with n-gram and uni-directional RNNLMs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yinghui Huang|AUTHOR Yinghui Huang]], [[Abhinav Sethy|AUTHOR Abhinav Sethy]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 274–278
</span></p></div>
<div class="cpabstractcardabstract"><p>Feed forward Neural Network Language Models (NNLM) have shown consistent gains over backoff word n-gram models in a variety of tasks. However, backoff n-gram models still remain dominant in applications with real time decoding requirements as word probabilities can be computed orders of magnitude faster than the NNLM. In this paper, we present a combination of techniques that allows us to speed up the probability computation from a neural net language model to make it comparable to the word n-gram model without any approximations. We present results on state of the art systems for Broadcast news transcription and conversational speech which demonstrate the speed improvements in real time factor and probability computation while retaining the WER gains from NNLM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^, [[Abhinav Sethy|AUTHOR Abhinav Sethy]]^^2^^, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^2^^, [[George Saon|AUTHOR George Saon]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 279–283
</span></p></div>
<div class="cpabstractcardabstract"><p>While recurrent neural network language models based on Long Short Term Memory (LSTM) have shown good gains in many automatic speech recognition tasks, Convolutional Neural Network (CNN) language models are relatively new and have not been studied in-depth. In this paper we present an empirical comparison of LSTM and CNN language models on English broadcast news and various conversational telephone speech transcription tasks. We also present a new type of CNN language model that leverages dilated causal convolution to efficiently exploit long range history. We propose a novel criterion for training language models that combines word and class prediction in a multi-task learning framework. We apply this criterion to train word and character based LSTM language models and CNN language models and show that it improves performance. Our results also show that CNN and LSTM language models are complementary and can be combined to obtain further gains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karel Beneš|AUTHOR Karel Beneš]], [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]], [[Lukáš Burget|AUTHOR Lukáš Burget]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 284–288
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce the Residual Memory Network (RMN) architecture to language modeling. RMN is an architecture of feed-forward neural networks that incorporates residual connections and time-delay connections that allow us to naturally incorporate information from a substantial time context. As this is the first time RMNs are applied for language modeling, we thoroughly investigate their behaviour on the well studied Penn Treebank corpus. We change the model slightly for the needs of language modeling, reducing both its time and memory consumption. Our results show that RMN is a suitable choice for small-sized neural language models: With test perplexity 112.7 and as few as 2.3M parameters, they out-perform both a much larger vanilla RNN (PPL 124, 8M parameters) and a similarly sized LSTM (PPL 115, 2.08M parameters), while being only by less than 3 perplexity points worse than twice as big LSTM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chanwoo Kim|AUTHOR Chanwoo Kim]], [[Ananya Misra|AUTHOR Ananya Misra]], [[Kean Chin|AUTHOR Kean Chin]], [[Thad Hughes|AUTHOR Thad Hughes]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 379–383
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe the structure and application of an acoustic room simulator to generate large-scale simulated data for training deep neural networks for far-field speech recognition. The system simulates millions of different room dimensions, a wide distribution of reverberation time and signal-to-noise ratios, and a range of microphone and sound source locations. We start with a relatively clean training set as the source and artificially create simulated data by randomly sampling a noise configuration for every new training example. As a result, the acoustic model is trained using examples that are virtually never repeated. We evaluate performance of this approach based on room simulation using a factored complex Fast Fourier Transform (CFFT) acoustic model introduced in our earlier work, which uses CFFT layers and LSTM AMs for joint multichannel processing and acoustic modeling. Results show that the simulator-driven approach is quite effective in obtaining large improvements not only in simulated test conditions, but also in real / rerecorded conditions. This room simulation system has been employed in training acoustic models including the ones for the recently released Google Home.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^1^^, [[Marc Delcroix|AUTHOR Marc Delcroix]]^^1^^, [[Haeyong Kwon|AUTHOR Haeyong Kwon]]^^2^^, [[Takuma Mori|AUTHOR Takuma Mori]]^^1^^, [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTT, Japan; ^^2^^Sogang University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 384–388
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel speech dereverberation framework that utilizes deep neural network (DNN)-based spectrum estimation to construct linear inverse filters. The proposed dereverberation framework is based on the state-of-the-art inverse filter estimation algorithm called weighted prediction error (WPE) algorithm, which is known to effectively reduce reverberation and greatly boost the ASR performance in various conditions. In WPE, the accuracy of the inverse filter estimation, and thus the dereverberation performance, is largely dependent on the estimation of the power spectral density (PSD) of the target signal. Therefore, the conventional WPE iteratively performs the inverse filter estimation, actual dereverberation and the PSD estimation to gradually improve the PSD estimate. However, while such iterative procedure works well when sufficiently long acoustically-stationary observed signals are available, WPE’s performance degrades when the duration of observed/accessible data is short, which typically is the case for real-time applications using online block-batch processing with small batches. To solve this problem, we incorporate the DNN-based spectrum estimator into the framework of WPE, because a DNN can estimate the PSD robustly even from very short observed data. We experimentally show that the proposed framework outperforms the conventional WPE, and improves the ASR performance in real noisy reverberant environments in both single-channel and multichannel cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Osamu Ichikawa|AUTHOR Osamu Ichikawa]]^^1^^, [[Takashi Fukuda|AUTHOR Takashi Fukuda]]^^1^^, [[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^, [[Steven J. Rennie|AUTHOR Steven J. Rennie]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 389–393
</span></p></div>
<div class="cpabstractcardabstract"><p>The assumed scenario is transcription of a face-to-face conversation, such as in the financial industry when an agent and a customer talk over a desk with microphones placed between the speakers and then it is transcribed. From the automatic speech recognition (ASR) perspective, one of the speakers is the target speaker, and the other speaker is a directional noise source. When the number of microphones is small, we often accept microphone intervals that are larger than the spatial aliasing limit because the performance of the beamformer is better. Unfortunately, such a configuration results in significant leakage of directional noise in certain frequency bands because the spatial aliasing makes the beamformer and post-filter inaccurate there. Thus, we introduce a factorial model to compensate only the degraded bands with information from the reliable bands in a probabilistic framework integrating our proposed metrics and speech model. In our experiments, the proposed method reduced the errors from 29.8% to 24.9%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yan-Hui Tu|AUTHOR Yan-Hui Tu]]^^1^^, [[Jun Du|AUTHOR Jun Du]]^^1^^, [[Lei Sun|AUTHOR Lei Sun]]^^1^^, [[Feng Ma|AUTHOR Feng Ma]]^^2^^, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China; ^^3^^Georgia Institute of Technology, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 394–398
</span></p></div>
<div class="cpabstractcardabstract"><p>We design a novel deep learning framework for multi-channel speech recognition in two aspects. First, for the front-end, an iterative mask estimation (IME) approach based on deep learning is presented to improve the beamforming approach based on the conventional complex Gaussian mixture model (CGMM). Second, for the back-end, deep convolutional neural networks (DCNNs), with augmentation of both noisy and beamformed training data, are adopted for acoustic modeling while the forward and backward long short-term memory recurrent neural networks (LSTM-RNNs) are used for language modeling. The proposed framework can be quite effective to multi-channel speech recognition with random combinations of fixed microphones. Testing on the CHiME-4 Challenge speech recognition task with a single set of acoustic and language models, our approach achieves the best performance of all three tracks (1-channel, 2-channel, and 6-channel) among submitted systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Joe Caroselli|AUTHOR Joe Caroselli]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]], [[Ananya Misra|AUTHOR Ananya Misra]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Haşim Sak|AUTHOR Haşim Sak]], [[Golan Pundak|AUTHOR Golan Pundak]], [[Kean Chin|AUTHOR Kean Chin]], [[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]], [[Kevin W. Wilson|AUTHOR Kevin W. Wilson]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[Chanwoo Kim|AUTHOR Chanwoo Kim]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Mitchel Weintraub|AUTHOR Mitchel Weintraub]], [[Erik McDermott|AUTHOR Erik McDermott]], [[Richard Rose|AUTHOR Richard Rose]], [[Matt Shannon|AUTHOR Matt Shannon]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 399–403
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the technical and system building advances made to the Google Home multichannel speech recognition system, which was launched in November 2016. Technical advances include an adaptive dereverberation frontend, the use of neural network models that do multichannel processing jointly with acoustic modeling, and Grid-LSTMs to model frequency variations. On the system level, improvements include adapting the model using Google Home specific data. We present results on a variety of multichannel sets. The combination of technical and system advances result in a reduction of WER of 8–28% relative compared to the current production system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seyedmahdad Mirsamadi|AUTHOR Seyedmahdad Mirsamadi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 404–408
</span></p></div>
<div class="cpabstractcardabstract"><p>Recognition of distant (far-field) speech is a challenge for ASR due to mismatch in recording conditions resulting from room reverberation and environment noise. Given the remarkable learning capacity of deep neural networks, there is increasing interest to address this problem by using a large corpus of reverberant far-field speech to train robust models. In this study, we explore how an end-to-end RNN acoustic model trained on speech from different rooms and acoustic conditions (different domains) achieves robustness to environmental variations. It is shown that the first hidden layer acts as a domain separator, projecting the data from different domains into different subspaces. The subsequent layers then use this encoded domain knowledge to map these features to final representations that are invariant to domain change. This mechanism is closely related to noise-aware or room-aware approaches which append manually-extracted domain signatures to the input features. Additionally, we demonstrate how this understanding of the learning procedure provides useful guidance for model adaptation to new acoustic conditions. We present results based on AMI corpus to demonstrate the propagation of domain information in a deep RNN, and perform recognition experiments which indicate the role of encoded domain knowledge on training and adaptation of RNN acoustic models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amir Hossein Poorjam|AUTHOR Amir Hossein Poorjam]]^^1^^, [[Jesper Rindom Jensen|AUTHOR Jesper Rindom Jensen]]^^1^^, [[Max A. Little|AUTHOR Max A. Little]]^^2^^, [[Mads Græsbøll Christensen|AUTHOR Mads Græsbøll Christensen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalborg University, Denmark; ^^2^^MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 289–293
</span></p></div>
<div class="cpabstractcardabstract"><p>Advances in speech signal analysis facilitate the development of techniques for remote biomedical voice assessment. However, the performance of these techniques is affected by noise and distortion in signals. In this paper, we focus on the vowel /a/ as the most widely-used voice signal for pathological voice assessments and investigate the impact of four major types of distortion that are commonly present during recording or transmission in voice analysis, namely: background noise, reverberation, clipping and compression, on Mel-frequency cepstral coefficients (MFCCs) — the most widely-used features in biomedical voice analysis. Then, we propose a new distortion classification approach to detect the most dominant distortion in such voice signals. The proposed method involves MFCCs as frame-level features and a support vector machine as classifier to detect the presence and type of distortion in frames of a given voice signal. Experimental results obtained from the healthy and Parkinson’s voices show the effectiveness of the proposed approach in distortion detection and classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Duc Le|AUTHOR Duc Le]], [[Keli Licata|AUTHOR Keli Licata]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]
</p><p class="cpabstractcardaffiliationlist">University of Michigan, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 294–298
</span></p></div>
<div class="cpabstractcardabstract"><p>Aphasia is an acquired language disorder resulting from brain damage that can cause significant communication difficulties. Aphasic speech is often characterized by errors known as paraphasias, the analysis of which can be used to determine an appropriate course of treatment and to track an individual’s recovery progress. Being able to detect paraphasias automatically has many potential clinical benefits; however, this problem has not previously been investigated in the literature. In this paper, we perform the first study on detecting phonemic and neologistic paraphasias from scripted speech samples in AphasiaBank. We propose a speech recognition system with task-specific language models to transcribe aphasic speech automatically. We investigate features based on speech duration, Goodness of Pronunciation, phone edit distance, and Dynamic Time Warping on phoneme posteriorgrams. Our results demonstrate the feasibility of automatic paraphasia detection and outline the path toward enabling this system in real-world clinical applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[N. Garcia|AUTHOR N. Garcia]]^^1^^, [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]]^^2^^, [[L.F. D’Haro|AUTHOR L.F. D’Haro]]^^3^^, [[Najim Dehak|AUTHOR Najim Dehak]]^^4^^, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Antioquia, Colombia; ^^2^^Universidad de Antioquia, Colombia; ^^3^^A*STAR, Singapore; ^^4^^Johns Hopkins University, USA; ^^5^^FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 299–303
</span></p></div>
<div class="cpabstractcardabstract"><p>The i-vector approach is used to model the speech of PD patients with the aim of assessing their condition. Features related to the articulation, phonation, and prosody dimensions of speech were used to train different i-vector extractors. Each i-vector extractor is trained using utterances from both PD patients and healthy controls. The i-vectors of the healthy control (HC) speakers are averaged to form a single i-vector that represents the HC group, i.e., the reference i-vector. A similar process is done to create a reference of the group with PD patients. Then the i-vectors of test speakers are compared to these reference i-vectors using the cosine distance. Three analyses are performed using this distance: classification between PD patients and HC, prediction of the neurological state of PD patients according to the MDS-UPDRS-III scale, and prediction of a modified version of the Frenchay Dysarthria Assessment. The Spearman’s correlation between this cosine distance and the MDS-UPDRS-III scale was 0.63. These results show the suitability of this approach to monitor the neurological state of people with Parkinson’s Disease.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Ren Chien|AUTHOR Yu-Ren Chien]], [[Michal Borský|AUTHOR Michal Borský]], [[Jón Guðnason|AUTHOR Jón Guðnason]]
</p><p class="cpabstractcardaffiliationlist">Reykjavik University, Iceland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 304–308
</span></p></div>
<div class="cpabstractcardabstract"><p>In clinical practice, the severity of disordered voice is typically rated by a professional with auditory-perceptual judgment. The present study aims to automate this assessment procedure, in an attempt to make the assessment objective and less labor-intensive. In the automated analysis, glottal airflow is estimated from the analyzed voice signal with an inverse filtering algorithm. Automatic assessment is realized by a regressor that predicts from temporal and spectral features of the glottal airflow. A regressor trained on overtone amplitudes and harmonic richness factors extracted from a set of continuous-speech utterances was applied to a set of sustained-vowel utterances, giving severity predictions (on a scale of ratings from 0 to 100) with an average error magnitude of 14.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Florian B. Pokorny|AUTHOR Florian B. Pokorny]]^^1^^, [[Björn Schuller|AUTHOR Björn Schuller]]^^2^^, [[Peter B. Marschik|AUTHOR Peter B. Marschik]]^^1^^, [[Raymond Brueckner|AUTHOR Raymond Brueckner]]^^3^^, [[Pär Nyström|AUTHOR Pär Nyström]]^^4^^, [[Nicholas Cummins|AUTHOR Nicholas Cummins]]^^2^^, [[Sven Bölte|AUTHOR Sven Bölte]]^^5^^, [[Christa Einspieler|AUTHOR Christa Einspieler]]^^1^^, [[Terje Falck-Ytter|AUTHOR Terje Falck-Ytter]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Medizinische Universität Graz, Austria; ^^2^^Universität Passau, Germany; ^^3^^Technische Universität München, Germany; ^^4^^Uppsala University, Sweden; ^^5^^Karolinska Institute, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 309–313
</span></p></div>
<div class="cpabstractcardabstract"><p>Autism spectrum disorder (ASD) is a neurodevelopmental disorder usually diagnosed in or beyond toddlerhood. ASD is defined by repetitive and restricted behaviours, and deficits in social communication. The early speech-language development of individuals with ASD has been characterised as delayed. However, little is known about ASD-related characteristics of pre-linguistic vocalisations at the feature level. In this study, we examined pre-linguistic vocalisations of 10-month-old individuals later diagnosed with ASD and a matched control group of typically developing individuals (N = 20). We segmented 684 vocalisations from parent-child interaction recordings. All vocalisations were annotated and signal-analytically decomposed. We analysed ASD-related vocalisation specificities on the basis of a standardised set (eGeMAPS) of 88 acoustic features selected for clinical speech analysis applications. 54 features showed evidence for a differentiation between vocalisations of individuals later diagnosed with ASD and controls. In addition, we evaluated the feasibility of automated, vocalisation-based identification of individuals later diagnosed with ASD. We compared linear kernel support vector machines and a 1-layer bidirectional long short-term memory neural network. Both classification approaches achieved an accuracy of 75% for subject-wise identification in a subject-independent 3-fold cross-validation scheme. Our promising results may be an important contribution en-route to facilitate earlier identification of ASD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]]^^1^^, [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]]^^1^^, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Antioquia, Colombia; ^^2^^FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 314–318
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech impairments are one of the earliest manifestations in patients with Parkinson’s disease. Particularly, articulation deficits related to the capability of the speaker to start/stop the vibration of the vocal folds have been observed in the patients. Those difficulties can be assessed by modeling the transitions between voiced and unvoiced segments from speech. A robust strategy to model the articulatory deficits related to the starting or stopping vibration of the vocal folds is proposed in this study. The transitions between voiced and unvoiced segments are modeled by a convolutional neural network that extracts suitable information from two time-frequency representations: the short time Fourier transform and the continuous wavelet transform. The proposed approach improves the results previously reported in the literature. Accuracies of up to 89% are obtained for the classification of Parkinson’s patients vs. healthy speakers. This study is a step towards the robust modeling of the speech impairments in patients with neuro-degenerative disorders.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Linxue Bai|AUTHOR Linxue Bai]], [[Peter Jančovič|AUTHOR Peter Jančovič]], [[Martin Russell|AUTHOR Martin Russell]], [[Philip Weber|AUTHOR Philip Weber]], [[Steve Houghton|AUTHOR Steve Houghton]]
</p><p class="cpabstractcardaffiliationlist">University of Birmingham, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 319–323
</span></p></div>
<div class="cpabstractcardabstract"><p>Most state-of-the-art automatic speech recognition (ASR) systems use a single deep neural network (DNN) to map the acoustic space to the decision space. However, different phonetic classes employ different production mechanisms and are best described by different types of features. Hence it may be advantageous to replace this single DNN with several phone class dependent DNNs. The appropriate mathematical formalism for this is a manifold. This paper assesses the use of a non-linear manifold structure with multiple DNNs for phone classification. The system has two levels. The first comprises a set of broad phone class (BPC) dependent DNN-based mappings and the second level is a fusion network. Various ways of designing and training the networks in both levels are assessed, including varying the size of hidden layers, the use of the bottleneck or softmax outputs as input to the fusion network, and the use of different broad class definitions. Phone classification experiments are performed on TIMIT. The results show that using the BPC-dependent DNNs provides small but significant improvements in phone classification accuracy relative to a single global DNN. The paper concludes with visualisations of the structures learned by the local and global DNNs and discussion of their interpretations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Siyuan Chen|AUTHOR Siyuan Chen]], [[Julien Epps|AUTHOR Julien Epps]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Phu Ngoc Le|AUTHOR Phu Ngoc Le]]
</p><p class="cpabstractcardaffiliationlist">University of New South Wales, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 324–328
</span></p></div>
<div class="cpabstractcardabstract"><p>Room occupancy estimation technology has been shown to reduce building energy cost significantly. However speech-based occupancy estimation has not been well explored. In this paper, we investigate energy mode and babble speaker count methods for estimating both small and large crowds in a party-mode room setting. We also examine how distance between speakers and microphone affects their estimation accuracies. Then we propose a novel entropy-based method, which is invariant to different speakers and their different positions in a room. Evaluations on synthetic crowd speech generated using the TIMIT corpus show that acoustic volume features are less affected by distance, and our proposed method outperforms existing methods across a range of different conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karthika Vijayan|AUTHOR Karthika Vijayan]]^^1^^, [[Jitendra Kumar Dhiman|AUTHOR Jitendra Kumar Dhiman]]^^2^^, [[Chandra Sekhar Seelamantula|AUTHOR Chandra Sekhar Seelamantula]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 329–333
</span></p></div>
<div class="cpabstractcardabstract"><p>Decomposing speech signals into periodic and aperiodic components is an important task, finding applications in speech synthesis, coding, denoising, etc. In this paper, we construct a time-frequency coherence function to analyze spectro-temporal signatures of speech signals for distinguishing between deterministic and stochastic components of speech. The narrowband speech spectrogram is segmented into patches, which are represented as 2-D cosine carriers modulated in amplitude and frequency. Separation of carrier and amplitude/frequency modulations is achieved by 2-D demodulation using Riesz transform, which is the 2-D extension of Hilbert transform. The demodulated AM component reflects contributions of the vocal tract to spectrogram. The frequency modulated carrier (FM-carrier) signal exhibits properties of the excitation. The time-frequency coherence is defined with respect to FM-carrier and a coherence map is constructed, in which highly coherent regions represent nearly periodic and deterministic components of speech, whereas the incoherent regions correspond to unstructured components. The coherence map shows a clear distinction between deterministic and stochastic components in speech characterized by jitter, shimmer, lip radiation, type of excitation, etc. Binary masks prepared from the time-frequency coherence function are used for periodic-aperiodic decomposition of speech. Experimental results are presented to validate the efficiency of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexsandro R. Meireles|AUTHOR Alexsandro R. Meireles]]^^1^^, [[Ant^onio R.M. Simões|AUTHOR Ant^onio R.M. Simões]]^^2^^, [[Antonio Celso Ribeiro|AUTHOR Antonio Celso Ribeiro]]^^1^^, [[Beatriz Raposo de Medeiros|AUTHOR Beatriz Raposo de Medeiros]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade Federal do Espírito Santo, Brazil; ^^2^^University of Kansas, USA; ^^3^^Universidade de São Paulo, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 334–338
<a href="./IS2017/MEDIA/0316" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Musical Speech is a new methodology for transcribing speech prosody using musical notation. The methodology presented in this paper is an updated version of our work [12]. Our work is situated in a historical context with a brief survey of the literature of speech melodies, in which we highlight the pioneering works of John Steele, Leoš Janávcek, Engelbert Humperdinck, and Arnold Schoenberg, followed by a linguistic view of musical notation in the analysis of speech. Finally, we present the current state-of-the-art of our innovative methodology that uses a quarter-tone scale for transcribing speech, and shows some initial results of the application of this methodology to prosodic transcription.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[K.S. Nataraj|AUTHOR K.S. Nataraj]], [[Prem C. Pandey|AUTHOR Prem C. Pandey]], [[Hirak Dasgupta|AUTHOR Hirak Dasgupta]]
</p><p class="cpabstractcardaffiliationlist">IIT Bombay, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 339–343
</span></p></div>
<div class="cpabstractcardabstract"><p>A visual feedback of the place of articulation is considered to be useful for speech training aids for hearing-impaired children and for learners of second languages in helping them in improving pronunciation. For such applications, the relation between place of articulation of fricatives and their spectral characteristics is investigated using English fricatives available in the XRMB database, which provides simultaneously acquired speech signal and articulogram. Place of articulation is estimated from the articulogram as the position of maximum constriction in the oral cavity, using an automated graphical technique. The magnitude spectrum is smoothed by critical band based median and mean filters for improving the consistency of the spectral parameters. Out of several spectral parameters investigated, spectral moments and spectral slope appear to be related to the place of articulation of the fricative segment of the utterances as measured from articulogram. The data are used to train and test a Gaussian mixture model to estimate the place of articulation with spectral parameters as the inputs. The estimated values showed a good match with those obtained from the articulograms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tom Bäckström|AUTHOR Tom Bäckström]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 344–348
</span></p></div>
<div class="cpabstractcardabstract"><p>The efficiency of many speech processing methods rely on accurate modeling of the distribution of the signal spectrum and a majority of prior works suggest that the spectral components follow the Laplace distribution. To improve the probability distribution models based on our knowledge of speech source modeling, we argue that the model should in fact be a multiplicative mixture model, including terms for voiced and unvoiced utterances. While prior works have applied Gaussian mixture models, we demonstrate that a mixture of generalized Gaussian models more accurately follows the observations. The proposed estimation method is based on measuring the ratio of L,,p,,-norms between spectral bands. Such ratios follow the Beta-distribution when the input signal is generalized Gaussian, whereby the estimated parameters can be used to determine the underlying parameters of the mixture of generalized Gaussian distributions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sucheta Ghosh|AUTHOR Sucheta Ghosh]]^^1^^, [[Camille Fauth|AUTHOR Camille Fauth]]^^2^^, [[Yves Laprie|AUTHOR Yves Laprie]]^^3^^, [[Aghilas Sini|AUTHOR Aghilas Sini]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LORIA, France; ^^2^^LiLPa, France; ^^3^^LORIA, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 349–353
</span></p></div>
<div class="cpabstractcardabstract"><p>This work aims at providing an end-to-end acoustic feedback framework to help learners of French to pronounce voiced fricatives. A classifier ensemble detects voiced/unvoiced utterances, then a correction method is proposed to improve the perception and production of voiced fricatives in a word-final position. Realizations of voiced fricatives contained in French sentences uttered by French and German speakers were analyzed to find out the deviations between the acoustic cues realized by the two groups of speakers. The correction method consists in substituting the erroneous devoiced fricative by TD-PSOLA concatenative synthesis that uses exemplars of voiced fricatives chosen from a French speaker corpus. To achieve a seamless concatenation the energy of the replacement fricative was adjusted with respect to the energy levels of the learner’s and French speaker’s preceding vowels. Finally, a perception experiment with the corrected stimuli has been carried out with French native speakers to check the appropriateness of the fricative revoicing. The results showed that the proposed revoicing strategy proved to be very efficient and can be used as an acoustic feedback.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ewa Jacewicz|AUTHOR Ewa Jacewicz]], [[Robert A. Fox|AUTHOR Robert A. Fox]]
</p><p class="cpabstractcardaffiliationlist">Ohio State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 354–358
</span></p></div>
<div class="cpabstractcardabstract"><p>The acquisition of regional dialect variation is an inherent part of the language learning process that takes place in the specific environments in which the child participates. This study examined dialect perception by 9–12-year-olds who grew up in two very diverse dialect regions in the United States, Western North Carolina (NC) and Southeastern Wisconsin (WI). In a dialect identification task, each group of children responded to 120 talkers from the same dialects representing three generations, ranging in age from old adults to children. There was a robust discrepancy in the children’s dialect identification performance: WI children were able to identify talker dialect quite well (although still not as well as the adults) whereas NC children were at chance level. WI children were also more sensitive to cross-generational changes in both dialects as a function of diachronic sound change. It is concluded that both groups of children demonstrated their sociolinguistic awareness in very different ways, corresponding to relatively stable (WI) and changing (NC) socio-cultural environments in their respective speech communities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kiyoko Yoneyama|AUTHOR Kiyoko Yoneyama]]^^1^^, [[Mafuyu Kitahara|AUTHOR Mafuyu Kitahara]]^^2^^, [[Keiichi Tajima|AUTHOR Keiichi Tajima]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Daito Bunka University, Japan; ^^2^^Sophia University, Japan; ^^3^^Hosei University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 359–363
</span></p></div>
<div class="cpabstractcardabstract"><p>Alveolar flaps are non-contrastive allophonic variants of alveolar stops in American English. A lexical decision experiment was conducted with Japanese learners of English (JE) to investigate whether second-language (L2) learners are sensitive to such allophonic variations when recognizing words in L2. The stimuli consisted of 36 isolated bisyllabic English words containing word-medial /t/, half of which were flap-favored words, e.g. city, and the other half were [t]-favored words, e.g. faster. All stimuli were recorded with two surface forms: /t/ as a flap, e.g. city with a flap, or as [t], e.g. city with [t]. The stimuli were counterbalanced so that participants only heard one of the two surface forms of each word. The accuracy data indicated that flap-favored words pronounced with a flap, e.g. city with a flap, were recognized significantly less accurately than flap-favored words with [t], e.g. city with [t], and [t]-favored words with [t], e.g. faster with [t]. These results suggest that JE learners prefer canonical forms over frequent forms produced with context-dependent allophonic variations. These results are inconsistent with previous studies that found native speakers’ preference for frequent forms, and highlight differences in the effect of allophonic variations on the perception of native-language and L2 speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lieke van Maastricht|AUTHOR Lieke van Maastricht]]^^1^^, [[Tim Zee|AUTHOR Tim Zee]]^^2^^, [[Emiel Krahmer|AUTHOR Emiel Krahmer]]^^1^^, [[Marc Swerts|AUTHOR Marc Swerts]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tilburg University, The Netherlands; ^^2^^Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 364–368
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates the cumulative effect of (non-)native intonation, rhythm, and speech rate in utterances produced by Spanish learners of Dutch on Dutch native listeners’ perceptions. In order to assess the relative contribution of these language-specific properties to perceived accentedness and comprehensibility, speech produced by Spanish learners of Dutch was manipulated using transplantation and resynthesis techniques. Thus, eight manipulation conditions reflecting all possible combinations of L1 and L2 intonation, rhythm, and speech rate were created, resulting in 320 utterances that were rated by 50 Dutch natives on their degree of foreign accent and ease of comprehensibility.
Our analyses show that all manipulations result in lower accentedness and higher comprehensibility ratings. Moreover, both measures are not affected in the same way by different combinations of prosodic features: For accentedness, Dutch listeners appear most influenced by intonation, and intonation combined with speech rate. This holds for comprehensibility ratings as well, but here the combination of all three properties, including rhythm, also significantly affects ratings by native speakers. Thus, our study reaffirms the importance of differentiating between different aspects of perception and provides insight into those features that are most likely to affect how native speakers perceive second language learners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Izumi Takiguchi|AUTHOR Izumi Takiguchi]]
</p><p class="cpabstractcardaffiliationlist">Bunkyo Gakuin University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 369–373
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated whether and how the role of pitch fall in the first language (L1) interacts with its use as a cue for Japanese phonological vowel length in the second language (L2). Native listeners of Japanese (NJ) and L2 learners of Japanese with L1 backgrounds in Mandarin Chinese (NC), Seoul Korean (NK), American English (NE), and French (NFr) participated in a perception experiment. The results showed that the proportion of “long” responses increased as a function of vowel duration for all groups, giving s-shaped curves. Meanwhile, the presence or absence of a pitch fall within a syllable affected only NJ and NC’s perception. Their category boundary occurred at a shorter duration for vowels with a pitch fall than without a pitch fall. Among the four groups of L2 learners, only NC use pitch fall to distinguish words in the L1. Thus, it is possible to think that the role of pitch fall as an L1 cue relates to its use as a cue for L2 length identification. L2 learners tend to attend to an important phonetic feature as a cue for perceiving an L1 category differentiating L1 words even in the L2 as implied by the Feature Hypothesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuanyuan Zhang|AUTHOR Yuanyuan Zhang]], [[Hongwei Ding|AUTHOR Hongwei Ding]]
</p><p class="cpabstractcardaffiliationlist">Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 374–378
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated whether Chinese learners of English as a foreign language (EFL learners hereafter) could use prosodic cues to resolve syntactically ambiguous sentences in English. 8 sentences with 3 types of syntactic ambiguity were adopted. They were far/near PP attachment, left/right word attachment and wide/narrow scope. In the production experiment, 15 Chinese college students who passed the annual national examination CET (College English Test) Band 4 and 5 native English speakers from America were recruited. They were asked to read the 8 target sentences after hearing the contexts spoken by a Native American speaker, which clarified the intended meaning of the ambiguous sentences. The preliminary results showed that, as the native speakers did, Chinese EFL learners employed different durational patterns to express the alternative meanings of the ambiguous sentences by altering prosodic phrasing. That is, the duration of the pre-boundary items were lengthened and pause were inserted at the boundary. But the perception experiment showed that the utterances produced by Chinese EFL learners couldn’t be effectively perceived by the native speakers due to their different use of pre-boundary lengthening and pause. The conclusion is that Chinese EFL learners find prosodic disambiguation difficult.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masanori Morise|AUTHOR Masanori Morise]], [[Genta Miyashita|AUTHOR Genta Miyashita]], [[Kenji Ozawa|AUTHOR Kenji Ozawa]]
</p><p class="cpabstractcardaffiliationlist">University of Yamanashi, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 409–413
</span></p></div>
<div class="cpabstractcardabstract"><p>A speech coding for a full-band speech analysis/synthesis system is described. In this work, full-band speech is defined as speech with a sampling frequency above 40 kHz, whose Nyquist frequency covers the audible frequency range. In prior works, speech coding has generally focused on the narrow-band speech with a sampling frequency below 16 kHz. On the other hand, statistical parametric speech synthesis currently uses the full-band speech, and low-dimensional representation of speech parameters is being used. The purpose of this study is to achieve speech coding without deterioration for full-band speech. We focus on a high-quality speech analysis/synthesis system and mel-cepstral analysis using frequency warping. In the frequency warping function, we directly use three auditory scales. We carried out a subjective evaluation using the WORLD vocoder and found that the optimum number of dimensions was around 50. The kind of frequency warping did not significantly affect the sound quality in the dimensions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christian Kroos|AUTHOR Christian Kroos]], [[Mark D. Plumbley|AUTHOR Mark D. Plumbley]]
</p><p class="cpabstractcardaffiliationlist">University of Surrey, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 454–458
</span></p></div>
<div class="cpabstractcardabstract"><p>The first generation of three-dimensional Electromagnetic Articulography devices (Carstens AG500) suffered from occasional critical tracking failures. Although now superseded by new devices, the AG500 is still in use in many speech labs and many valuable data sets exist. In this study we investigate whether deep neural networks (DNNs) can learn the mapping function from raw voltage amplitudes to sensor positions based on a comprehensive movement data set. This is compared to arriving sample by sample at individual position values via direct optimisation as used in previous methods. We found that with appropriate hyperparameter settings a DNN was able to approximate the mapping function with good accuracy, leading to a smaller error than the previous methods, but that the DNN-based approach was not able to solve the tracking problem completely.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Jon Barker|AUTHOR Jon Barker]], [[Oscar Saz Torralba|AUTHOR Oscar Saz Torralba]], [[Thomas Hain|AUTHOR Thomas Hain]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 414–418
</span></p></div>
<div class="cpabstractcardabstract"><p>In earlier work we proposed a framework for speech source-filter separation that employs phase-based signal processing. This paper presents a further theoretical investigation of the model and optimisations that make the filter and source representations less sensitive to the effects of noise and better matched to downstream processing. To this end, first, in computing the Hilbert transform, the log function is replaced by the generalised logarithmic function. This introduces a tuning parameter that adjusts both the dynamic range and distribution of the phase-based representation. Second, when computing the group delay, a more robust estimate for the derivative is formed by applying a regression filter instead of using sample differences. The effectiveness of these modifications is evaluated in clean and noisy conditions by considering the accuracy of the fundamental frequency extracted from the estimated source, and the performance of speech recognition features extracted from the estimated filter. In particular, the proposed filter-based front-end reduces Aurora-2 WERs by 6.3% (average 0–20 dB) compared with previously reported results. Furthermore, when tested in a LVCSR task (Aurora-4) the new features resulted in 5.8% absolute WER reduction compared to MFCCs without performance loss in the clean/matched condition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simon Stone|AUTHOR Simon Stone]], [[Peter Steiner|AUTHOR Peter Steiner]], [[Peter Birkholz|AUTHOR Peter Birkholz]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Dresden, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 419–423
</span></p></div>
<div class="cpabstractcardabstract"><p>Accurately tracking the fundamental frequency (f₀) or pitch in speech data is of great interest in numerous contexts. All currently available pitch tracking algorithms perform a short-term analysis of a speech signal to extract the f₀ under the assumption that the pitch does not change within a single analysis frame, a simplification that introduces errors when the f₀ changes rather quickly over time. This paper proposes a new algorithm that warps the time axis of an analysis frame to counteract intra-frame f₀ changes and thus to improve the total tracking results. The algorithm was evaluated on a set of 4718 sentences from 20 speakers (10 male, 10 female) and with added white and babble noise. It was comparative in performance to the state-of-the-art algorithms RAPT and PRAAT to Pitch (ac) under clean conditions and outperformed both of them under noisy conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hideki Kawahara|AUTHOR Hideki Kawahara]]^^1^^, [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]]^^2^^, [[Masanori Morise|AUTHOR Masanori Morise]]^^3^^, [[Hideki Banno|AUTHOR Hideki Banno]]^^4^^, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^Health Science University of Hokkaido, Japan; ^^3^^University of Yamanashi, Japan; ^^4^^Meijo University, Japan; ^^5^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 424–428
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce a simple and linear SNR (strictly speaking, periodic to random power ratio) estimator (0 dB to 80 dB without additional calibration/linearization) for providing reliable descriptions of aperiodicity in speech corpus. The main idea of this method is to estimate the background random noise level without directly extracting the background noise. The proposed method is applicable to a wide variety of time windowing functions with very low sidelobe levels. The estimate combines the frequency derivative and the time-frequency derivative of the mapping from filter center frequency to the output instantaneous frequency. This procedure can replace the periodicity detection and aperiodicity estimation subsystems of recently introduced open source vocoder, YANG vocoder. Source code of MATLAB implementation of this method will also be open sourced.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Avinash Kumar|AUTHOR Avinash Kumar]], [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]], [[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]]
</p><p class="cpabstractcardaffiliationlist">NIT Patna, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 429–433
</span></p></div>
<div class="cpabstractcardabstract"><p>Vowel onset point (VOP) is an important information extensively employed in speech analysis and synthesis. Detecting the VOPs in a given speech sequence, independent of the text contexts and recording environments, is a challenging area of research. Performance of existing VOP detection methods have not yet been extensively studied in varied environmental conditions. In this paper, we have exploited the non-local means estimation to detect those regions in the speech sequence which are of high signal-to-noise ratio and exhibit periodicity. Mostly, those regions happen to be the vowel regions. This helps in overcoming the ill-effects of environmental degradations. Next, for each short-time frame of estimated speech sequence, we cumulatively sum the magnitude of the corresponding Fourier transform spectrum. The cumulative sum is then used as the feature to detect the VOPs. The experiments conducted on TIMIT database show that the proposed approach provides better results in terms of detection and spurious rate when compared to a few existing methods under clean and noisy test conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohammed Salah Al-Radhi|AUTHOR Mohammed Salah Al-Radhi]], [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]], [[Géza Németh|AUTHOR Géza Németh]]
</p><p class="cpabstractcardaffiliationlist">BME, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 434–438
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present an extension of a novel continuous residual-based vocoder for statistical parametric speech synthesis. Previous work has shown the advantages of adding envelope modulated noise to the voiced excitation, but this has not been investigated yet in the context of continuous vocoders, i.e. of which all parameters are continuous. The noise component is often not accurately modeled in modern vocoders (e.g. STRAIGHT). For more natural sounding speech synthesis, four time-domain envelopes (Amplitude, Hilbert, Triangular and True) are investigated and enhanced, and then applied to the noise component of the excitation in our continuous vocoder. The performance evaluation is based on the study of time envelopes. In an objective experiment, we investigated the Phase Distortion Deviation of vocoded samples. A MUSHRA type subjective listening test was also conducted comparing natural and vocoded speech samples. Both experiments have shown that the proposed framework using Hilbert and True envelopes provides high-quality vocoding while outperforming the two other envelopes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chia-Lung Wu|AUTHOR Chia-Lung Wu]]^^1^^, [[Hsiang-Ping Hsu|AUTHOR Hsiang-Ping Hsu]]^^1^^, [[Syu-Siang Wang|AUTHOR Syu-Siang Wang]]^^2^^, [[Jeih-Weih Hung|AUTHOR Jeih-Weih Hung]]^^3^^, [[Ying-Hui Lai|AUTHOR Ying-Hui Lai]]^^4^^, [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]^^2^^, [[Yu Tsao|AUTHOR Yu Tsao]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ministry of Justice, Taiwan; ^^2^^Academia Sinica, Taiwan; ^^3^^National Chi Nan University, Taiwan; ^^4^^Yuan Ze University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 439–443
</span></p></div>
<div class="cpabstractcardabstract"><p>Most state-of-the-art speech enhancement (SE) techniques prefer to enhance utterances in the frequency domain rather than in the time domain. However, the overlap-add (OLA) operation in the short-time Fourier transform (STFT) for speech signal processing possibly distorts the signal and limits the performance of the SE techniques. In this study, a novel SE method that integrates the discrete wavelet packet transform (DWPT) and a novel subspace-based method, robust principal component analysis (RPCA), is proposed to enhance noise-corrupted signals directly in the time domain. We evaluate the proposed SE method on the Mandarin hearing in noise test (MHINT) sentences. The experimental results show that the new method reduces the signal distortions dramatically, thereby improving speech quality and intelligibility significantly. In addition, the newly proposed method outperforms the STFT-RPCA-based speech enhancement system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bidisha Sharma|AUTHOR Bidisha Sharma]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]
</p><p class="cpabstractcardaffiliationlist">IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 444–448
</span></p></div>
<div class="cpabstractcardabstract"><p>Vowel onset point (VOP) refers to the starting event of a vowel, that may be reflected in different aspects of the speech signal. The major issue in VOP detection using existing methods is the confusion among the vowels and other categories of sounds preceding them. This work explores the usefulness of sonority information to reduce this confusion and improve VOP detection. Vowels are the most sonorant sounds followed by semivowels, nasals, voiced fricatives, voiced stops. The sonority feature is derived from the vocal-tract system, excitation source and suprasegmental aspects. As this feature has the capability to discriminate among different sonorant sound units, it reduces the confusion among onset of vowels with that of other sonorant sounds. This results in improved detection and resolution of VOP detection for continuous speech. The performance of proposed sonority information based VOP detection is found to be 92.4%, compared to 85.2% by the existing method. Also the resolution of localizing VOP within 10 ms is significantly enhanced and a performance of 73.0% is achieved as opposed to 60.2% by the existing method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Unto K. Laine|AUTHOR Unto K. Laine]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 449–453
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech signal consists of events in time and frequency, and therefore its analysis with high-resolution time-frequency tools is often of importance. Analytic filter bank provides a simple, fast, and flexible method to construct time-frequency representations of signals. Its parameters can be easily adapted to different situations from uniform to any auditory frequency scale, or even to a focused resolution. Since the Hilbert magnitude values of the channels are obtained at every sample, it provides a practical tool for a high-resolution time-frequency analysis.
The present study describes the basic theory of analytic filters and tests their main properties. Applications of analytic filter bank to different speech analysis tasks including pitch period estimation and pitch synchronous analysis of formant frequencies and bandwidths are demonstrated. In addition, a new feature vector called group delay vector is introduced. It is shown that this representation provides comparable, or even better results, than those obtained by spectral magnitude feature vectors in the analysis and classification of vowels. The implications of this observation are discussed also from the speech perception point of view.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jia Dai|AUTHOR Jia Dai]], [[Wei Xue|AUTHOR Wei Xue]], [[Wenju Liu|AUTHOR Wenju Liu]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 459–463
</span></p></div>
<div class="cpabstractcardabstract"><p>For music signal processing, compared with the strategy which models each short-time frame independently, when the long-time features are considered, the time-series characteristics of the music signal can be better presented. As a typical kind of long-time modeling strategy, the identification vector (i-vector) uses statistical modeling to model the audio signal in the segment level. It can better capture the important elements of the music signal, and these important elements may benefit to the classification of music signal. In this paper, the i-vector based statistical feature for music genre classification is explored. In addition to learn enough important elements for music signal, a new multilingual i-vector feature is proposed based on the multilingual model. The experimental results show that the multilingual i-vector based models can achieve better classification performances than conventional short-time modeling based methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[G. Nisha Meenakshi|AUTHOR G. Nisha Meenakshi]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 503–507
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose a robust method to perform frame-level classification of voiced (V) and unvoiced (UV) phonemes from whispered speech, a challenging task due to its voiceless and noise-like nature. We hypothesize that a whispered speech spectrum can be represented as a linear combination of a set of colored noise spectra. A five-dimensional (5D) feature is computed by employing non-negative matrix factorization with a fixed basis dictionary, constructed using spectra of five colored noises. Deep Neural Network (DNN) is used as the classifier. We consider two baseline features-1) Mel Frequency Cepstral Coefficients (MFCC), 2) features computed from a data driven dictionary. Experiments reveal that the features from the colored noise dictionary perform better (on average) than that using the data driven dictionary, with a relative improvement in the average V/UV accuracy of 10.30%, within, and 10.41%, across, data from seven subjects. We also find that the MFCCs and 5D features carry complementary information regarding the nature of voicing decisions in whispered speech. Hence, across all subjects, we obtain a balanced frame-level V/UV classification performance, when MFCC and 5D features are combined, compared to a skewed performance when they are considered separately.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Banriskhem K. Khonglah|AUTHOR Banriskhem K. Khonglah]]^^1^^, [[K.T. Deepak|AUTHOR K.T. Deepak]]^^2^^, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Guwahati, India; ^^2^^IIIT Dharwad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 464–468
</span></p></div>
<div class="cpabstractcardabstract"><p>The task of indoor/ outdoor audio classification using foreground speech segmentation is attempted in this work. Foreground speech segmentation is the use of features to segment between foreground speech and background interfering sources like noise. Initially, the foreground and background segments are obtained from foreground speech segmentation by using the normalized autocorrelation peak strength (NAPS) of the zero frequency filtered signal (ZFFS) as a feature. The background segments are then considered for determining whether a particular segment is an indoor or outdoor audio sample. The mel frequency cepstral coefficients are obtained from the background segments of both the indoor and outdoor audio samples and are used to train the Support Vector Machine (SVM) classifier. The use of foreground speech segmentation gives a promising performance for the indoor/ outdoor audio classification task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinxi Guo|AUTHOR Jinxi Guo]]^^1^^, [[Ning Xu|AUTHOR Ning Xu]]^^2^^, [[Li-Jia Li|AUTHOR Li-Jia Li]]^^3^^, [[Abeer Alwan|AUTHOR Abeer Alwan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at Los Angeles, USA; ^^2^^Snap, USA; ^^3^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 469–473
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, neural networks with deep architecture have been widely applied to acoustic scene classification. Both Convolutional Neural Networks (CNNs) and Long Short-Term Memory Networks (LSTMs) have shown improvements over fully connected Deep Neural Networks (DNNs). Motivated by the fact that CNNs, LSTMs and DNNs are complimentary in their modeling capability, we apply the CLDNNs (Convolutional, Long Short-Term Memory, Deep Neural Networks) framework to short-duration acoustic scene classification in a unified architecture. The CLDNNs take advantage of frequency modeling with CNNs, temporal modeling with LSTM, and discriminative training with DNNs. Based on the CLDNN architecture, several novel attention-based mechanisms are proposed and applied on the LSTM layer to predict the importance of each time step. We evaluate the proposed method on the truncated version of the 2016 TUT acoustic scenes dataset which consists of recordings from 15 different scenes. By using CLDNNs with bidirectional LSTM, we achieve higher performance compared to the conventional neural network architectures. Moreover, by combining the attention-weighted output with LSTM final time step output, significant improvement can be further achieved.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianjun Xia|AUTHOR Xianjun Xia]]^^1^^, [[Roberto Togneri|AUTHOR Roberto Togneri]]^^1^^, [[Ferdous Sohel|AUTHOR Ferdous Sohel]]^^2^^, [[David Huang|AUTHOR David Huang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Western Australia, Australia; ^^2^^Murdoch University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 474–478
</span></p></div>
<div class="cpabstractcardabstract"><p>Acoustic event detection, the determination of the acoustic event type and the localisation of the event, has been widely applied in many real-world applications. Many works adopt multi-label classification techniques to perform the polyphonic acoustic event detection with a global threshold to detect the active acoustic events. However, the global threshold has to be set manually and is highly dependent on the database being tested. To deal with this, we replaced the fixed threshold method with a frame-wise dynamic threshold approach in this paper. Two novel approaches, namely contour and regressor based dynamic threshold approaches are proposed in this work. Experimental results on the popular TUT Acoustic Scenes 2016 database of polyphonic events demonstrated the superior performance of the proposed approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Inseon Jang|AUTHOR Inseon Jang]]^^1^^, [[ChungHyun Ahn|AUTHOR ChungHyun Ahn]]^^1^^, [[Jeongil Seo|AUTHOR Jeongil Seo]]^^1^^, [[Younseon Jang|AUTHOR Younseon Jang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ETRI, Korea; ^^2^^Chungnam National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 479–483
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech detection is an important first step for audio analysis on media contents, whose goal is to discriminate the presence of speech from non-speech. It remains a challenge owing to various sound sources included in media audio. In this work, we present a novel audio feature extraction method to reflect the acoustic characteristic of the media audio in the time-frequency domain. Since the degree of combination of harmonic and percussive components varies depending on the type of sound source, the audio features which further distinguish between speech and non-speech can be obtained by decomposing the signal into both components. For the evaluation, we use over 20 hours of drama which manually annotated for speech detection as well as 4 full-length movies with annotations released for a research community, whose total length is over 8 hours. Experimental results with deep neural network show superior performance of the proposed in media audio condition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sukanya Sonowal|AUTHOR Sukanya Sonowal]]^^1^^, [[Tushar Sandhan|AUTHOR Tushar Sandhan]]^^2^^, [[Inkyu Choi|AUTHOR Inkyu Choi]]^^2^^, [[Nam Soo Kim|AUTHOR Nam Soo Kim]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung Electronics, Korea; ^^2^^Seoul National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 484–487
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a classification scheme for audio signals using high-level feature descriptors. The descriptor is designed to capture the relevance of each acoustic feature group (or feature set like mel-frequency cepstral coefficients, perceptual features etc.) in recognizing an audio class. For this, a bank of RVM classifiers are modeled for each ‘audio class’–‘feature group’ pair. The response of an input signal to this bank of RVM classifiers forms the entries of the descriptor. Each entry of the descriptor thus measures the proximity of the input signal to an audio class based on a single feature group. This form of signal representation offers two-fold advantages. First, it helps to determine the effectiveness of each feature group in classifying a specific audio class. Second, the descriptor offers higher discriminability than the low-level feature groups and a simple SVM classifier trained on the descriptor produces better performance than several state-of-the-art methods. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Janek Ebbers|AUTHOR Janek Ebbers]]^^1^^, [[Jahn Heymann|AUTHOR Jahn Heymann]]^^1^^, [[Lukas Drude|AUTHOR Lukas Drude]]^^1^^, [[Thomas Glarner|AUTHOR Thomas Glarner]]^^1^^, [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]^^1^^, [[Bhiksha Raj|AUTHOR Bhiksha Raj]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Paderborn, Germany; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 488–492
</span></p></div>
<div class="cpabstractcardabstract"><p>Variational Autoencoders (VAEs) have been shown to provide efficient neural-network-based approximate Bayesian inference for observation models for which exact inference is intractable. Its extension, the so-called Structured VAE (SVAE) allows inference in the presence of both discrete and continuous latent variables. Inspired by this extension, we developed a VAE with Hidden Markov Models (HMMs) as latent models. We applied the resulting HMM-VAE to the task of acoustic unit discovery in a zero resource scenario. Starting from an initial model based on variational inference in an HMM with Gaussian Mixture Model (GMM) emission probabilities, the accuracy of the acoustic unit discovery could be significantly improved by the HMM-VAE. In doing so we were able to demonstrate for an unsupervised learning task what is well-known in the supervised learning case: Neural networks provide superior modeling power compared to GMMs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthias Zöhrer|AUTHOR Matthias Zöhrer]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Graz, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 493–497
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we use gated recurrent neural networks (GRNNs) for efficiently detecting environmental events of the IEEE Detection and Classification of Acoustic Scenes and Events challenge (DCASE2016). For this acoustic event detection task data is limited. Therefore, we propose data augmentation such as on-the-fly shuffling and virtual adversarial training for regularization of the GRNNs. Both improve the performance using GRNNs. We obtain a segment-based error rate of 0.59 and an F-score of 58.6%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael McAuliffe|AUTHOR Michael McAuliffe]]^^1^^, [[Michaela Socolof|AUTHOR Michaela Socolof]]^^2^^, [[Sarah Mihuc|AUTHOR Sarah Mihuc]]^^1^^, [[Michael Wagner|AUTHOR Michael Wagner]]^^1^^, [[Morgan Sonderegger|AUTHOR Morgan Sonderegger]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^McGill University, Canada; ^^2^^University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 498–502
</span></p></div>
<div class="cpabstractcardabstract"><p>We present the Montreal Forced Aligner (MFA), a new open-source system for speech-text alignment. MFA is an update to the Prosodylab-Aligner, and maintains its key functionality of trainability on new data, as well as incorporating improved architecture (triphone acoustic models and speaker adaptation), and other features. MFA uses Kaldi instead of HTK, allowing MFA to be distributed as a stand-alone package, and to exploit parallel processing for computationally-intensive training and scaling to larger datasets. We evaluate MFA’s performance on aligning word and phone boundaries in English conversational and laboratory speech, relative to human-annotated boundaries, focusing on the effects of aligner architecture and training on the data to be aligned. MFA performs well relative to two existing open-source aligners with simpler architecture (Prosodylab-Aligner and FAVE), and both its improved architecture and training on data to be aligned generally result in more accurate boundaries.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ian Williams|AUTHOR Ian Williams]], [[Petar Aleksic|AUTHOR Petar Aleksic]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 508–512
</span></p></div>
<div class="cpabstractcardabstract"><p>Using context in automatic speech recognition allows the recognition system to dynamically task-adapt and bring gains to a broad variety of use-cases. An important mechanism of context-inclusion is on-the-fly rescoring of hypotheses with contextual language model content available only in real-time.
In systems where rescoring occurs on the lattice during its construction as part of beam search decoding, hypotheses eligible for rescoring may be missed due to pruning. This can happen for many reasons: the language model and rescoring model may assign significantly different scores, there may be a lot of noise in the utterance, or word prefixes with a high out-degree may necessitate aggressive pruning to keep the search tractable. This results in misrecognitions when contextually-relevant hypotheses are pruned before rescoring, even if a contextual rescoring model favors those hypotheses by a large margin.
We present a technique to adapt the beam search algorithm to preserve hypotheses when they may benefit from rescoring. We show that this technique significantly reduces the number of search pruning errors on rescorable hypotheses, without a significant increase in the search space size. This technique makes it feasible to use one base language model, but still achieve high-accuracy speech recognition results in all contexts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoyu Shen|AUTHOR Xiaoyu Shen]], [[Youssef Oualil|AUTHOR Youssef Oualil]], [[Clayton Greenberg|AUTHOR Clayton Greenberg]], [[Mittul Singh|AUTHOR Mittul Singh]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 553–557
</span></p></div>
<div class="cpabstractcardabstract"><p>Language models (LMs) have gained dramatic improvement in the past years due to the wide application of neural networks. This raises the question of how far we are away from the perfect language model and how much more research is needed in language modelling. As for perplexity giving a value for human perplexity (as an upper bound of what is reasonably expected from an LM) is difficult. Word error rate (WER) has the disadvantage that it also measures the quality of other components of a speech recognizer like the acoustic model and the feature extraction. We therefore suggest evaluating LMs in a generative setting (which has been done before on selected hand-picked examples) and running a human evaluation on the generated sentences. The results imply that LMs need about 10 to 20 more years of research before human performance is reached. Moreover, we show that the human judgement scores on the generated sentences and perplexity are closely correlated. This leads to an estimated perplexity of 12 for an LM that would be able to pass the human judgement test in the setting we suggested.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Moró|AUTHOR Anna Moró]], [[György Szaszák|AUTHOR György Szaszák]]
</p><p class="cpabstractcardaffiliationlist">BME, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 558–562
</span></p></div>
<div class="cpabstractcardabstract"><p>For the automatic punctuation of Automatic Speech Recognition (ASR) output, both prosodic and text based features are used, often in combination. Pure prosody based approaches usually have low computation needs, introduce little latency (delay) and they are also more robust to ASR errors. Text based approaches usually yield better performance, they are however resource demanding (both regarding their training and computational needs), often introduce high time latency and are more sensitive to ASR errors. The present paper proposes a lightweight prosody based punctuation approach following a new paradigm: we argue in favour of an all-inclusive modelling of speech prosody instead of just relying on distinct acoustic markers: first, the entire phonological phrase structure is reconstructed, then its close correlation with punctuations is exploited in a sequence modelling approach with recurrent neural networks. With this tiny and easy to implement model we reach performance in Hungarian punctuation comparable to large, text based models for other languages by keeping resource requirements minimal and suitable for real-time operation with low latency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thomas Zenkel|AUTHOR Thomas Zenkel]]^^1^^, [[Ramon Sanabria|AUTHOR Ramon Sanabria]]^^1^^, [[Florian Metze|AUTHOR Florian Metze]]^^1^^, [[Jan Niehues|AUTHOR Jan Niehues]]^^2^^, [[Matthias Sperber|AUTHOR Matthias Sperber]]^^2^^, [[Sebastian Stüker|AUTHOR Sebastian Stüker]]^^2^^, [[Alex Waibel|AUTHOR Alex Waibel]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^KIT, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 513–517
</span></p></div>
<div class="cpabstractcardabstract"><p>Connectionist Temporal Classification has recently attracted a lot of interest as it offers an elegant approach to building acoustic models (AMs) for speech recognition. The CTC loss function maps an input sequence of observable feature vectors to an output sequence of symbols. Output symbols are conditionally independent of each other under CTC loss, so a language model (LM) can be incorporated conveniently during decoding, retaining the traditional separation of acoustic and linguistic components in ASR.
For fixed vocabularies, Weighted Finite State Transducers provide a strong baseline for efficient integration of CTC AMs with n-gram LMs. Character-based neural LMs provide a straight forward solution for open vocabulary speech recognition and all-neural models, and can be decoded with beam search. Finally, sequence-to-sequence models can be used to translate a sequence of individual sounds into a word string.
We compare the performance of these three approaches, and analyze their error patterns, which provides insightful guidance for future research and development in this important area.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hossein Hadian|AUTHOR Hossein Hadian]]^^1^^, [[Daniel Povey|AUTHOR Daniel Povey]]^^2^^, [[Hossein Sameti|AUTHOR Hossein Sameti]]^^1^^, [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sharif University of Technology, Iran; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 518–522
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe our work on incorporating probabilities of phone durations, learned by a neural net, into an ASR system. Phone durations are incorporated via lattice rescoring. The input features are derived from the phone identities of a context window of phones, plus the durations of preceding phones within that window. Unlike some previous work, our network outputs the probability of different durations (in frames) directly, up to a fixed limit. We evaluate this method on several large vocabulary tasks, and while we consistently see improvements inWord Error Rates, the improvements are smaller when the lattices are generated with neural net based acoustic models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Chorowski|AUTHOR Jan Chorowski]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 523–527
</span></p></div>
<div class="cpabstractcardabstract"><p>The recently proposed Sequence-to-Sequence (seq2seq) framework advocates replacing complex data processing pipelines, such as an entire automatic speech recognition system, with a single neural network trained in an end-to-end fashion. In this contribution, we analyse an attention-based seq2seq speech recognition system that directly transcribes recordings into characters. We observe two shortcomings: overconfidence in its predictions and a tendency to produce incomplete transcriptions when language models are used. We propose practical solutions to both problems achieving competitive speaker independent word error rates on the Wall Street Journal dataset: without separate language models we reach 10.6% WER, while together with a trigram language model, we reach 6.7% WER, a state-of-the-art result for HMM-free methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wenpeng Li|AUTHOR Wenpeng Li]]^^1^^, [[Binbin Zhang|AUTHOR Binbin Zhang]]^^1^^, [[Lei Xie|AUTHOR Lei Xie]]^^1^^, [[Dong Yu|AUTHOR Dong Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Tencent AI Lab, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 528–532
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep learning models (DLMs) are state-of-the-art techniques in speech recognition. However, training good DLMs can be time consuming especially for production-size models and corpora. Although several parallel training algorithms have been proposed to improve training efficiency, there is no clear guidance on which one to choose for the task in hand due to lack of systematic and fair comparison among them. In this paper we aim at filling this gap by comparing four popular parallel training algorithms in speech recognition, namely asynchronous stochastic gradient descent (ASGD), blockwise model-update filtering (BMUF), bulk synchronous parallel (BSP) and elastic averaging stochastic gradient descent (EASGD), on 1000-hour LibriSpeech corpora using feed-forward deep neural networks (DNNs) and convolutional, long short-term memory, DNNs (CLDNNs). Based on our experiments, we recommend using BMUF as the top choice to train acoustic models since it is most stable, scales well with number of GPUs, can achieve reproducible results, and in many cases even outperforms single-GPU SGD. ASGD can be used as a substitute in some cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xu Xiang|AUTHOR Xu Xiang]], [[Yanmin Qian|AUTHOR Yanmin Qian]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 533–537
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep neural networks (DNNs) are widely used in most current automatic speech recognition (ASR) systems. To guarantee good recognition performance, DNNs usually require significant computational resources, which limits their application to low-power devices. Thus, it is appealing to reduce the computational cost while keeping the accuracy. In this work, in light of the success in image recognition, binary DNNs are utilized in speech recognition, which can achieve competitive performance and substantial speed up. To our knowledge, this is the first time that binary DNNs have been used in speech recognition. For binary DNNs, network weights and activations are constrained to be binary values, which enables faster matrix multiplication based on bit operations. By exploiting the hardware population count instructions, the proposed binary matrix multiplication can achieve 5~7 times speed up compared with highly optimized floating-point matrix multiplication. This results in much faster DNN inference since matrix multiplication is the most computationally expensive operation. Experiments on both TIMIT phone recognition and a 50-hour Switchboard speech recognition show that, binary DNNs can run about 4 times faster than standard DNNs during inference, with roughly 10.0% relative accuracy reduction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Akshay Chandrashekaran|AUTHOR Akshay Chandrashekaran]]^^1^^, [[Ian Lane|AUTHOR Ian Lane]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 538–542
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe the implementation of a hierarchical constrained Bayesian Optimization algorithm and it’s application to joint optimization of features, acoustic model structure and decoding parameters for deep neural network (DNN)-based large vocabulary continuous speech recognition (LVCSR) systems. Within our hierarchical optimization method we perform constrained Bayesian optimization jointly of feature hyper-parameters and acoustic model structure in the first-level, and then perform an iteration of constrained Bayesian optimization for the decoder hyper-parameters in the second. We show the the proposed hierarchical optimization method can generate a model with higher performance than a manually optimized system on a server platform. Furthermore, we demonstrate that the proposed framework can be used to automatically build real-time speech recognition systems for graphics processing unit (GPU)-enabled embedded platforms that retain similar accuracy to a server platform, while running with constrained computing resources.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shohei Toyama|AUTHOR Shohei Toyama]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 543–547
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we propose a new method of adapting language models for speech recognition using para-linguistic and extra-linguistic features in speech. When we talk with others, we often change the way of lexical choice and speaking style according to various contextual factors. This fact indicates that the performance of automatic speech recognition can be improved by taking the contextual factors into account, which can be estimated from speech acoustics. In this study, we attempt to find global and acoustic features that are associated with those contextual factors, then integrate those features into Recurrent Neural Network (RNN) language models for speech recognition. In experiments, using Japanese spontaneous speech corpora, we examine how i-vector and openSMILE are associated with contextual factors. Then, we use those features in the reranking process of RNN-based language models. Results show that perplexity is reduced by 16% relative and word error rate is reduced by 2.1% relative for highly emotional speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vardaan Pahuja|AUTHOR Vardaan Pahuja]]^^1^^, [[Anirban Laha|AUTHOR Anirban Laha]]^^1^^, [[Shachar Mirkin|AUTHOR Shachar Mirkin]]^^2^^, [[Vikas Raykar|AUTHOR Vikas Raykar]]^^1^^, [[Lili Kotlerman|AUTHOR Lili Kotlerman]]^^2^^, [[Guy Lev|AUTHOR Guy Lev]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, India; ^^2^^IBM, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 548–552
</span></p></div>
<div class="cpabstractcardabstract"><p>The stream of words produced by Automatic Speech Recognition (ASR) systems is typically devoid of punctuations and formatting. Most natural language processing applications expect segmented and well-formatted texts as input, which is not available in ASR output. This paper proposes a novel technique of jointly modeling multiple correlated tasks such as punctuation and capitalization using bidirectional recurrent neural networks, which leads to improved performance for each of these tasks. This method could be extended for joint modeling of any other correlated sequence labeling tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lei Wang|AUTHOR Lei Wang]], [[Fei Chen|AUTHOR Fei Chen]]
</p><p class="cpabstractcardaffiliationlist">SUSTech, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 563–566
</span></p></div>
<div class="cpabstractcardabstract"><p>Frequency compression is an effective alternative to conventional hearing aids amplification for patients with severe-to-profound middle- and high-frequency hearing loss and with some low-frequency residual hearing. In order to develop novel frequency compression strategy, it is important to first understand the mechanism for recognizing low-pass filtered speech, which simulates high-frequency hearing loss. The present work investigated three factors affecting the intelligibility of low-pass filtered speech, i.e., vowels, temporal fine-structure, and fundamental frequency (F0) contour. Mandarin sentences were processed to generate three types (i.e., vowel-only, fine-structure-only, and F0-contour-flattened) of low-pass filtered stimuli. Listening experiments with normal-hearing listeners showed that among the three factors assessed, the vowel-only low-pass filtered speech was the most intelligible, which was followed by the fine-structure-based low-pass filtered speech. Flattening F0-contour significantly deteriorated the intelligibility of low-pass filtered speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]]^^1^^, [[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]]^^2^^, [[Gustav Eje Henter|AUTHOR Gustav Eje Henter]]^^1^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NII, Japan; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 606–610
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper analyzes a) how often listeners interpret the emotional content of an utterance incorrectly when listening to vocoded or natural speech in adverse conditions; b) which noise conditions cause the most misperceptions; and c) which group of listeners misinterpret emotions the most. The long-term goal is to construct new emotional speech synthesizers that adapt to the environment and to the listener. We performed a large-scale listening test where over 400 listeners between the ages of 21 and 72 assessed natural and vocoded acted emotional speech stimuli. The stimuli had been artificially degraded using a room impulse response recorded in a car and various in-car noise types recorded in a real car. Experimental results show that the recognition rates for emotions and perceived emotional strength degrade as signal-to-noise ratio decreases. Interestingly, misperceptions seem to be more pronounced for negative and low-arousal emotions such as calmness or anger, while positive emotions such as happiness appear to be more robust to noise. An ANOVA analysis of listener meta-data further revealed that gender and age also influenced results, with elderly male listeners most likely to incorrectly identify emotions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]^^1^^, [[Jana Winkler|AUTHOR Jana Winkler]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern Denmark, Denmark; ^^2^^Christian-Albrechts-Universität zu Kiel, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 611–615
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous studies showed for German and other (West) Germanic language, including English, that perceived syllable prominence is primarily controlled by changes in duration and F0, with the latter cue being more powerful than the former. Our study is an initial approach to develop this prominence hierarchy further by putting numbers on the interplay of duration and F0. German listeners indirectly judged through lexical identification the relative prominence levels of two neighboring syllables. Results show that an increase in F0 of between 0.49 and 0.76 st is required to outweigh the prominence effect of a 30% increase in duration of a neighboring syllable. These numbers are fairly stable across a large range of absolute F0 and duration levels and hence useful in speech technology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Luciana Marques|AUTHOR Luciana Marques]], [[Rebecca Scarborough|AUTHOR Rebecca Scarborough]]
</p><p class="cpabstractcardaffiliationlist">University of Colorado at Boulder, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 616–620
</span></p></div>
<div class="cpabstractcardabstract"><p>This study explores the relationship between identification, degree of nasality and vowel quality in oral, nasal and nasalized vowels in Brazilian Portuguese. Despite common belief that the language possesses contrastive nasal vowels, literature examination shows that nasal vowels may be followed by a nasal resonance, while nasalized vowels must be followed by a nasal consonant. It is argued that the nasal resonance may be the remains of a consonant that nasalizes the vowel, making nasal vowels simply coarticulatorily nasalized (e.g. [1]). If so, vowel nasality should not be more informative for the perception of a word containing a nasal vowel than for a word containing a nasalized vowel, as nasality is attributed to coarticulation. To test this hypothesis, randomized stimuli containing the first syllable of words with oral, nasal and nasalized vowels were presented to BP listeners who had to identify the stimuli original word. Preliminary results demonstrate that accuracy decreased for nasal and nasalized stimuli. A comparison between patterns of response to measured degrees of vowel acoustic nasality and formant values demonstrate that vowel quality differences may play a more relevant role in word identification than type of nasality in a vowel.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jonny Kim|AUTHOR Jonny Kim]], [[Katie Drager|AUTHOR Katie Drager]]
</p><p class="cpabstractcardaffiliationlist">University of Hawai‘i at Mānoa, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 621–625
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous studies on spoken word recognition suggest that lexical access is facilitated when social information attributed to the voice is congruent with the social characteristics associated with the word. This paper builds on this work, presenting results from a lexical decision task in which target words associated with different age groups were preceded by sociophonetic primes. No age-related phonetic cues were provided within the target words; instead, the non-related prime words contained a sociophonetic variable involved in ongoing change. We found that age-associated words are recognized faster when preceded by an age-congruent phonetic variant in the prime word. The results demonstrate that lexical access is influenced by sociophonetic variation, a result which we argue arises from experience-based probabilities of covariation between sounds and words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shi-yu Wang|AUTHOR Shi-yu Wang]], [[Fei Chen|AUTHOR Fei Chen]]
</p><p class="cpabstractcardaffiliationlist">SUSTech, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 567–570
</span></p></div>
<div class="cpabstractcardabstract"><p>Early study showed that temporally reversed speech may still be very intelligible. The present work further assessed the role of acoustic cues accounting for the intelligibility of temporally reversed speech. Mandarin sentences were edited to be temporally reversed. Experiment 1 preserved the original consonant segments, and experiment 2 only preserved the temporally reversed fine-structure waveform. Experimental results with normal-hearing listeners showed that for Mandarin speech, listeners could still perfectly understand the temporally reversed speech with a reversion duration up to 50 ms. Preserving original consonant segments did not significantly improve the intelligibility of the temporally reversed speech, suggesting that the reversion processing applied to vowels largely affected the intelligibility of temporally reversed speech. When the local short-time envelope waveform was removed, listeners could still understand stimuli with primarily temporally reversed fine-structure waveform, suggesting the perceptual role of temporally reversed fine-structure to the intelligibility of temporally reversed speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mako Ishida|AUTHOR Mako Ishida]]
</p><p class="cpabstractcardaffiliationlist">Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 571–575
</span></p></div>
<div class="cpabstractcardabstract"><p>The current study explores how native and non-native speakers cope with simultaneous articulatory and acoustic distortion in speech perception. The articulatory distortion was generated by asking a speaker to articulate target speech as fast as possible (fast speech). The acoustic distortion was created by dividing speech signals into small segments with equal time duration (e.g., 50 ms) from the onset of speech, and flipping every segment on a temporal axis, and putting them back together (locally time-reversed speech). This study explored how “locally time-reversed fast speech” was intelligible as compared to “locally time-reversed normal speech” measured in Ishida, Samuel, and Arai (2016). Participants were native English speakers and native Japanese speakers who spoke English as a second language. They listened to English words and pseudowords that contained a lot of stop consonants. These items were spoken fast and locally time-reversed at every 10, 20, 30, 40, 50, or 60 ms. In general, “locally time-reversed fast speech” became gradually unintelligible as the length of reversed segments increased. Native speakers generally understood locally time-reversed fast spoken words well but not pseudowords, while non-native speakers hardly understood both words and pseudowords. Language proficiency strongly supported the perceptual restoration of locally time-reversed fast speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[L. Ann Burchfield|AUTHOR L. Ann Burchfield]]^^1^^, [[San-hei Kenny Luk|AUTHOR San-hei Kenny Luk]]^^2^^, [[Mark Antoniou|AUTHOR Mark Antoniou]]^^1^^, [[Anne Cutler|AUTHOR Anne Cutler]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Western Sydney University, Australia; ^^2^^McMaster University, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 576–580
</span></p></div>
<div class="cpabstractcardabstract"><p>Lexically guided perceptual learning refers to the use of lexical knowledge to retune speech categories and thereby adapt to a novel talker’s pronunciation. This adaptation has been extensively documented, but primarily for segmental-based learning in English and Dutch. In languages with lexical tone, such as Mandarin Chinese, tonal categories can also be retuned in this way, but segmental category retuning had not been studied. We report two experiments in which Mandarin Chinese listeners were exposed to an ambiguous mixture of [f] and [s] in lexical contexts favoring an interpretation as either [f] or [s]. Listeners were subsequently more likely to identify sounds along a continuum between [f] and [s], and to interpret minimal word pairs, in a manner consistent with this exposure. Thus lexically guided perceptual learning of segmental categories had indeed taken place, consistent with suggestions that such learning may be a universally available adaptation process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chris Davis|AUTHOR Chris Davis]], [[Chee Seng Chong|AUTHOR Chee Seng Chong]], [[Jeesun Kim|AUTHOR Jeesun Kim]]
</p><p class="cpabstractcardaffiliationlist">Western Sydney University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 581–585
</span></p></div>
<div class="cpabstractcardabstract"><p>The current study investigated why the intelligibility of expressive speech in noise varies as a function of the emotion expressed (e.g., happiness being more intelligible than sadness), even though the signal-to-noise ratio is the same. We tested the straightforward proposal that the expression of some emotions affect speech intelligibility by shifting spectral energy above the energy profile of the noise masker. This was done by determining how the spectral profile of speech is affected by different emotional expressions using three different expressive speech databases. We then examined if these changes were correlated with scores produced by an objective intelligibility metric. We found a relatively consistent shift in spectral energy for different emotions across the databases and a high correlation between the extent of these changes and the objective intelligibility scores. Moreover, the pattern of intelligibility scores is consistent with human perception studies (although there was considerable individual variation). We suggest that the intelligibility of emotion speech in noise is simply related to its audibility as conditioned by the effect that the expression of emotion has on its spectral profile.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Merel Maslowski|AUTHOR Merel Maslowski]], [[Antje S. Meyer|AUTHOR Antje S. Meyer]], [[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]]
</p><p class="cpabstractcardaffiliationlist">MPI for Psycholinguistics, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 586–590
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech rate is known to modulate perception of temporally ambiguous speech sounds. For instance, a vowel may be perceived as short when the immediate speech context is slow, but as long when the context is fast. Yet, effects of long-term tracking of speech rate are largely unexplored. Two experiments tested whether long-term tracking of rate influences perception of the temporal Dutch vowel contrast /ʀipta/-/a:/. In Experiment 1, one low-rate group listened to ‘neutral’ rate speech from talker A and to slow speech from talker B. Another high-rate group was exposed to the same neutral speech from A, but to fast speech from B. Between-group comparison of the ‘neutral’ trials revealed that the low-rate group reported a higher proportion of /a:/ in A’s ‘neutral’ speech, indicating that A sounded faster when B was slow. Experiment 2 tested whether one’s own speech rate also contributes to effects of long-term tracking of rate. Here, talker B’s speech was replaced by playback of participants’ own fast or slow speech. No evidence was found that one’s own voice affected perception of talker A in larger speech contexts. These results carry implications for our understanding of the mechanisms involved in rate-dependent speech perception and of dialogue.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Oliveira Peres|AUTHOR Daniel Oliveira Peres]]^^1^^, [[Dominic Watt|AUTHOR Dominic Watt]]^^2^^, [[Waldemar Ferreira Netto|AUTHOR Waldemar Ferreira Netto]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade de São Paulo, Brazil; ^^2^^University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 591–595
</span></p></div>
<div class="cpabstractcardabstract"><p>Human listeners are adept at successfully recovering linguistically- and socially-relevant information from very brief utterances. Studies using the ‘thin-slicing’ approach show that accurate judgments of the speaker’s emotional state can be made from minimal quantities of speech. The present experiment tested the performance of listeners exposed to thin-sliced samples of spoken Brazilian Portuguese selected to exemplify four emotions ( anger, fear, sadness, happiness). Rather than attaching verbal labels to the audio samples, participants were asked to pair the excerpts with averaged facial images illustrating the four emotion categories. Half of the listeners were native speakers of Brazilian Portuguese, while the others were native English speakers who knew no Portuguese. Both groups of participants were found to be accurate and consistent in assigning the audio samples to the expected emotion category, but some emotions were more reliably identified than others. Fear was misidentified most frequently. We conclude that the phonetic cues to speakers’ emotional states are sufficiently salient and differentiated that listeners need only a few syllables upon which to base judgments, and that as a species we owe our perceptual sensitivity in this area to the survival value of being able to make rapid decisions concerning the psychological states of others.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adriana Guevara-Rukoz|AUTHOR Adriana Guevara-Rukoz]]^^1^^, [[Erika Parlato-Oliveira|AUTHOR Erika Parlato-Oliveira]]^^2^^, [[Shi Yu|AUTHOR Shi Yu]]^^1^^, [[Yuki Hirose|AUTHOR Yuki Hirose]]^^3^^, [[Sharon Peperkamp|AUTHOR Sharon Peperkamp]]^^1^^, [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ENS, France; ^^2^^Universidade Federal de Minas Gerais, Brazil; ^^3^^University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 596–600
</span></p></div>
<div class="cpabstractcardabstract"><p>Past research has shown that sound sequences not permitted in our native language may be distorted by our perceptual system. A well-documented example is vowel epenthesis, a phenomenon by which listeners hallucinate non-existent vowels within illegal consonantal sequences. As reported in previous work, this occurs for instance in Japanese (JP) and Brazilian Portuguese (BP), languages for which the ‘default’ epenthetic vowels are /u/ and /i/, respectively. In a perceptual experiment, we corroborate the finding that the quality of this illusory vowel is language-dependent, but also that this default choice can be overridden by coarticulatory information present on the consonant cluster. In a second step, we analyse recordings of JP and BP speakers producing ‘epenthesized’ versions of stimuli from the perceptual task. Results reveal that the default vowel corresponds to the vowel with the most reduced acoustic characteristics and whose formants are acoustically closest to formant transitions present in consonantal clusters. Lastly, we model behavioural responses from the perceptual experiment with an exemplar model using dynamic time warping (DTW)-based similarity measures on MFCCs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Toshie Matsui|AUTHOR Toshie Matsui]]^^1^^, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^, [[Kodai Yamamoto|AUTHOR Kodai Yamamoto]]^^1^^, [[Hideki Kawahara|AUTHOR Hideki Kawahara]]^^1^^, [[Roy D. Patterson|AUTHOR Roy D. Patterson]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 601–605
</span></p></div>
<div class="cpabstractcardabstract"><p>A number of studies, with either voiced or unvoiced speech, have demonstrated that a speaker’s geometric mean formant frequency (MFF) has a large effect on the perception of the speaker’s size, as would be expected. One study with unvoiced speech showed that lifting the slope of the speech spectrum by 6 dB/octave also led to a reduction in the perceived size of the speaker. This paper reports an analogous experiment to determine whether lifting the slope of the speech spectrum by 6 dB/octave affects the perception of speaker size with voiced speech (words). The results showed that voiced speech with high-frequency enhancement was perceived to arise from smaller speakers. On average, the point of subjective equality in MFF discrimination was reduced by about 5%. However, there were large individual differences; some listeners were effectively insensitive to spectral enhancement of 6 dB/octave; others showed a consistent effect of the same enhancement. The results suggest that models of speaker size perception will need to include a listener specific parameter for the effect of spectral slope.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samuel Silva|AUTHOR Samuel Silva]], [[António Teixeira|AUTHOR António Teixeira]]
</p><p class="cpabstractcardaffiliationlist">Universidade de Aveiro, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 626–630
</span></p></div>
<div class="cpabstractcardabstract"><p>Several technologies, such as electromagnetic midsagittal articulography (EMA) or real-time magnetic resonance (RT-MRI), enable studying the static and dynamic aspects of speech production. The resulting knowledge can, in turn, inform the improvement of speech production models, e.g., for articulatory speech synthesis, by enabling the identification of which articulators and gestures are involved in producing specific sounds.
The amount of data available from these technologies, and the need for a systematic quantitative assessment, advise tackling these matters through data-driven approaches, preferably unsupervised, since annotated data is scarce. In this context, a method for statistical identification of critical articulators has been proposed, in the literature, and successfully applied to EMA data. However, the many differences regarding the data available from other technologies, such as RT-MRI, and language-specific aspects create a challenging setting for its direct and wider applicability.
In this article, we address the steps needed to extend the applicability of the proposed statistical analyses, initially applied to EMA, to an existing RT-MRI corpus and test it for a different language, European Portuguese. The obtained results, for three speakers, and considering 33 phonemes, provide phonologically meaningful critical articulator outcomes and show evidence of the applicability of the method to RT-MRI.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takayuki Ito|AUTHOR Takayuki Ito]]^^1^^, [[Hiroki Ohashi|AUTHOR Hiroki Ohashi]]^^2^^, [[Eva Montas|AUTHOR Eva Montas]]^^2^^, [[Vincent L. Gracco|AUTHOR Vincent L. Gracco]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^GIPSA, France; ^^2^^Haskins Laboratories, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 669–673
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech perception often involves multisensory processing. Although previous studies have demonstrated visual [1, 2] and somatosensory interactions [3, 4] with auditory processing, it is not clear whether somatosensory information can contribute to the processing of audio-visual speech perception. This study explored the neural consequence of somatosensory interactions in audio-visual speech processing. We assessed whether somatosensory orofacial stimulation influenced event-related potentials (ERPs) in response to an audio-visual speech illusion (the McGurk Effect [1]). 64 scalp sites of ERPs were recorded in response to audio-visual speech stimulation and somatosensory stimulation. In the audio-visual condition, an auditory stimulus /ba/ was synchronized with the video of congruent facial motion (the production of /ba/) or incongruent facial motion (the production of the /da/: McGurk condition). These two audio-visual stimulations were randomly presented with and without somatosensory stimulation associated with facial skin deformation. We found ERPs differences associated with the McGurk effect in the presence of the somatosensory conditions. ERPs for the McGurk effect reliably diverge around 280 ms after auditory onset. The results demonstrate a change of cortical potential of audio-visual processing due to somatosensory inputs and suggest that somatosensory information encoding facial motion also influences speech processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lena F. Renner|AUTHOR Lena F. Renner]], [[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]
</p><p class="cpabstractcardaffiliationlist">Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 674–678
</span></p></div>
<div class="cpabstractcardabstract"><p>In the present study, we investigate pupil dilation as a measure of lexical retrieval. We captured pupil size changes in reaction to a match or a mismatch between a picture and an auditorily presented word in 120 trials presented to ten native speakers of Swedish. In each trial a picture was displayed for six seconds, and 2.5 seconds into the trial the word was played through loudspeakers. The picture and the word were matching in half of the trials, and all stimuli were common high-frequency monosyllabic Swedish words. The difference in pupil diameter trajectories across the two conditions was analyzed with Functional Data Analysis. In line with the expectations, the results indicate greater dilation in the mismatch condition starting from around 800 ms after the stimulus onset. Given that similar processes were observed in brain imaging studies, pupil dilation measurements seem to provide an appropriate tool to reveal lexical retrieval. The results suggest that pupillometry could be a viable alternative to existing methods in the field of speech and language processing, for instance across different ages and clinical groups.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Win Thuzar Kyaw|AUTHOR Win Thuzar Kyaw]], [[Yoshinori Sagisaka|AUTHOR Yoshinori Sagisaka]]
</p><p class="cpabstractcardaffiliationlist">Waseda University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 679–683
</span></p></div>
<div class="cpabstractcardabstract"><p>Motivated by the success of speech characteristics representation by color attributes, we analyzed the cross-modal sentiment correlations between voice source characteristics and textural image characteristics. For the analysis, we employed vowel sounds with representative three phonation differences (modal, creaky and breathy) and 36 texture images with 36 semantic attributes (e.g., banded, cracked and scaly) annotated one semantic attribute for each texture. By asking 40 subjects to select the most fitted textures from 36 figures with different textures after listening 30 speech samples with different phonations, we measured the correlations between acoustic parameters showing voice source variations and the parameters of selected textural image differences showing coarseness, contrast, directionality, busyness, complexity and strength. From the texture classifications, voice characteristics can be roughly characterized by textural differences: modal — gauzy, banded and smeared, creaky — porous, crystalline, cracked and scaly, breathy — smeared, freckled and stained. We have also found significant correlations between voice source acoustic parameters and textural parameters. These correlations suggest the possibility of cross-modal mapping between voice source characteristics and textural parameters, which enables visualization of speech information with source variations reflecting human sentiment perception.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daryush D. Mehta|AUTHOR Daryush D. Mehta]]^^1^^, [[Patrick C. Chwalek|AUTHOR Patrick C. Chwalek]]^^2^^, [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]]^^2^^, [[Laura J. Brattain|AUTHOR Laura J. Brattain]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Massachusetts General Hospital, USA; ^^2^^MIT Lincoln Laboratory, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 684–688
</span></p></div>
<div class="cpabstractcardabstract"><p>Ambulatory monitoring of real-world voice characteristics and behavior has the potential to provide important assessment of voice and speech disorders and psychological and emotional state. In this paper, we report on the novel development of a lightweight, wireless voice monitor that synchronously records dual-channel data from an acoustic microphone and a neck-surface accelerometer embedded on a flex circuit. In this paper, Lombard speech effects were investigated in pilot data from four adult speakers with normal vocal function who read a phonetically balanced paragraph in the presence of different ambient acoustic noise levels. Whereas the signal-to-noise ratio (SNR) of the microphone signal decreased in the presence of increasing ambient noise level, the SNR of the accelerometer sensor remained high. Lombard speech properties were thus robustly computed from the accelerometer signal and observed in all four speakers who exhibited increases in average estimates of sound pressure level (+2.3 dB), fundamental frequency (+21.4 Hz), and cepstral peak prominence (+1.3 dB) from quiet to loud ambient conditions. Future work calls for ambulatory data collection in naturalistic environments, where the microphone acts as a sound level meter and the accelerometer functions as a noise-robust voicing sensor to assess voice disorders, neurological conditions, and cognitive load.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrea Bandini|AUTHOR Andrea Bandini]], [[Aravind Namasivayam|AUTHOR Aravind Namasivayam]], [[Yana Yunusova|AUTHOR Yana Yunusova]]
</p><p class="cpabstractcardaffiliationlist">University Health Network, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 689–693
</span></p></div>
<div class="cpabstractcardabstract"><p>Facial (e.g., lips and jaw) movements can provide important information for the assessment, diagnosis and treatment of motor speech disorders. However, due to the high costs of the instrumentation used to record speech movements, such information is typically limited to research studies. With the recent development of depth sensors and efficient algorithms for facial tracking, clinical applications of this technology may be possible. Although lip tracking methods have been validated in the past, jaw tracking remains a challenge. In this study, we assessed the accuracy of tracking jaw movements with a video-based system composed of a face tracker and a depth sensor, specifically developed for short range applications (Intel RealSense SR300). The assessment was performed on healthy subjects during speech and non-speech tasks. Preliminary results showed that jaw movements can be tracked with reasonable accuracy (RMSE~2mm), with better performance for slow movements. Further tests are needed in order to improve the performance of these systems and develop accurate methodologies that can reveal subtle changes in jaw movements for the assessment and treatment of motor speech disorders.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sunil Kumar S.B.|AUTHOR Sunil Kumar S.B.]], [[K. Sreenivasa Rao|AUTHOR K. Sreenivasa Rao]], [[Tanumay Mandal|AUTHOR Tanumay Mandal]]
</p><p class="cpabstractcardaffiliationlist">IIT Kharagpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 694–698
</span></p></div>
<div class="cpabstractcardabstract"><p>Synchronization of speech and corresponding Electroglottographic (EGG) signal is very helpful for speech processing research and development. During simultaneous recording of speech and EGG signals, the speech signal will be delayed by the duration corresponding to the speech wave propagation from the glottis to the microphone relative to the EGG signal. Even in same session of recording, the delay between the speech and the EGG signals is varying due to the natural movement of speaker’s head and movement of microphone in case MIC is held by hand. To study and model the information within glottal cycles, precise synchronization of speech and EGG signals is of utmost necessity. In this work, we propose a method for synchronization of speech and EGG signals based on the glottal activity information present in the signals. The performance of the proposed method is demonstrated by estimation of delay between the two signals (speech signals and corresponding EGG signals) and synchronizing these signals by compensating the estimated delay. The CMU-Arctic database consist of simultaneous recording of the speech and the EGG signals is used for the evaluation of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Sara H. Romøren|AUTHOR Anna Sara H. Romøren]]^^1^^, [[Aoju Chen|AUTHOR Aoju Chen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HiOA, Norway; ^^2^^Universiteit Utrecht, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 699–703
</span></p></div>
<div class="cpabstractcardabstract"><p>In order to be efficient communicators, children need to adapt their utterances to the common ground shared between themselves and their conversational partners. One way of doing this is by prosodically highlighting focal information. In this paper we look at one specific prosodic manipulation, namely word duration, asking whether Swedish-speaking children lengthen words to mark focus, as compared to adult controls. To the best of our knowledge, this is the first study on the relationship between focus and word duration in Swedish-speaking children.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Krishna Somandepalli|AUTHOR Krishna Somandepalli]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 631–635
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent developments in real-time magnetic resonance imaging (rtMRI) have enabled the study of vocal tract dynamics during production of running speech at high frame rates (e.g., 83 frames per second). Such large amounts of acquired data require scalable automated methods to identify different articulators (e.g., tongue, velum) for further analysis. In this paper, we propose a convolutional neural network with an encoder-decoder architecture to jointly detect the relevant air-tissue boundaries as well as to label them, which we refer to as ‘semantic edge detection’. We pose this as a pixel labeling problem, with the outline contour of each articulator of interest as positive class and the remaining tissue and airway as negative classes. We introduce a loss function modified with additional penalty for misclassification at air-tissue boundaries to account for class imbalance and improve edge localization. We then use a greedy search algorithm to draw contours from the probability maps of the positive classes predicted by the network. The articulator contours obtained by our method are comparable to the true labels generated by iteratively fitting a manually created subject-specific template. Our results generalize well across subjects and different vocal tract postures, demonstrating a significant improvement over the structured regression baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sasan Asadiabadi|AUTHOR Sasan Asadiabadi]], [[Engin Erzin|AUTHOR Engin Erzin]]
</p><p class="cpabstractcardaffiliationlist">Koç Üniversitesi, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 636–640
</span></p></div>
<div class="cpabstractcardabstract"><p>Knowledge about the dynamic shape of the vocal tract is the basis of many speech production applications such as, articulatory analysis, modeling and synthesis. Vocal tract airway tissue boundary segmentation in the mid-sagittal plane is necessary as an initial step for extraction of the cross-sectional area function. This segmentation problem is however challenging due to poor resolution of real-time speech MRI, grainy noise and the rapidly varying vocal tract shape. We present a novel approach to vocal tract airway tissue boundary tracking by training a statistical shape and appearance model for human vocal tract. We manually segment a set of vocal tract profiles and utilize a statistical approach to train a shape and appearance model for the tract. An active contour approach is employed to segment the airway tissue boundaries of the vocal tract while restricting the curve movement to the trained shape and appearance model. Then the contours in subsequent frames are tracked using dense motion estimation methods. Experimental evaluations over the mean square error metric indicate significant improvements compared to the state-of-the-art.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[T.V. Ananthapadmanabha|AUTHOR T.V. Ananthapadmanabha]]^^1^^, [[A.G. Ramakrishnan|AUTHOR A.G. Ramakrishnan]]^^2^^, [[Shubham Sharma|AUTHOR Shubham Sharma]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^VSS, India; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 641–644
</span></p></div>
<div class="cpabstractcardabstract"><p>Spectral integration is a subjective phenomenon in which a vowel with two formants, spaced below a critical distance, is perceived to be of the same phonetic quality as that of a vowel with a single formant. It is tedious to conduct perceptual tests to determine the critical distance for various experimental conditions. To alleviate this difficulty, we propose an objective critical distance (OCD) that can be determined from the spectral envelope of a speech signal. OCD is defined as that spacing between the adjacent formants when the level of the spectral valley between them reaches the mean spectral value. The measured OCD lies in the same range of 3 to 3.5 Bark as the subjective critical distance for similar experimental conditions giving credibility to the definition. However, it is noted that OCD for front vowels is significantly different from that for the back vowels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanner Sorensen|AUTHOR Tanner Sorensen]]^^1^^, [[Zisis Skordilis|AUTHOR Zisis Skordilis]]^^1^^, [[Asterios Toutios|AUTHOR Asterios Toutios]]^^1^^, [[Yoon-Chul Kim|AUTHOR Yoon-Chul Kim]]^^2^^, [[Yinghua Zhu|AUTHOR Yinghua Zhu]]^^3^^, [[Jangwon Kim|AUTHOR Jangwon Kim]]^^4^^, [[Adam Lammert|AUTHOR Adam Lammert]]^^5^^, [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]]^^6^^, [[Louis Goldstein|AUTHOR Louis Goldstein]]^^1^^, [[Dani Byrd|AUTHOR Dani Byrd]]^^1^^, [[Krishna Nayak|AUTHOR Krishna Nayak]]^^1^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Samsung Medical Center, Korea; ^^3^^Google, USA; ^^4^^Canary Speech, USA; ^^5^^MIT Lincoln Laboratory, USA; ^^6^^Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 645–649
</span></p></div>
<div class="cpabstractcardabstract"><p>We present the USC Speech and Vocal Tract Morphology MRI Database, a 17-speaker magnetic resonance imaging database for speech research. The database consists of real-time magnetic resonance images (rtMRI) of dynamic vocal tract shaping, denoised audio recorded simultaneously with rtMRI, and 3D volumetric MRI of vocal tract shapes during sustained speech sounds. We acquired 2D real-time MRI of vocal tract shaping during consonant-vowel-consonant sequences, vowel-consonant-vowel sequences, read passages, and spontaneous speech. We acquired 3D volumetric MRI of the full set of vowels and continuant consonants of American English. Each 3D volumetric MRI was acquired in one 7-second scan in which the participant sustained the sound. This is the first database to combine rtMRI of dynamic vocal tract shaping and 3D volumetric MRI of the entire vocal tract. The database provides a unique resource with which to examine the relationship between vocal tract morphology and vocal tract function. The USC Speech and Vocal Tract Morphology MRI Database is provided free for research use at ‘http://sail.usc.edu/span/morphdb‘.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chong Cao|AUTHOR Chong Cao]], [[Yanlu Xie|AUTHOR Yanlu Xie]], [[Qi Zhang|AUTHOR Qi Zhang]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]
</p><p class="cpabstractcardaffiliationlist">BLCU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 650–654
</span></p></div>
<div class="cpabstractcardabstract"><p>Consonants in /CV/ syllables usually have potential influence on onset fundamental frequency (i.e., onset f0) of succeeding vowels. Previous studies showed such effect with respect to the aspiration of stops with evidence from Mandarin, a tonal language. While few studies investigated the effect on onset f0 from the aspiration of affricates. The differences between stops and affricates in aspiration leave space for further investigations. We examined the effect of affricate’s aspiration on the realization of onset f0 of following vowels in the form of isolated syllables and continuous speech by reference to a minimal pair of syllables which differ only in aspiration. Besides, we conducted tone identification tests using two sets of tone continua based on the same minimal pair of syllables. Experimental results showed that the aspirated syllables increased the onset f0 of following vowels compared with unaspirated counterparts in both kinds of contexts. While the magnitude of differences varied with tones. And the perception results showed that aspirated syllables tended to be perceived as tones that have relative lower onset f0, which in turn supported the production result. The present study may have applications for speech identification and speech synthesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthias K. Franken|AUTHOR Matthias K. Franken]], [[Frank Eisner|AUTHOR Frank Eisner]], [[Jan-Mathijs Schoffelen|AUTHOR Jan-Mathijs Schoffelen]], [[Daniel J. Acheson|AUTHOR Daniel J. Acheson]], [[Peter Hagoort|AUTHOR Peter Hagoort]], [[James M. McQueen|AUTHOR James M. McQueen]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 655–658
</span></p></div>
<div class="cpabstractcardabstract"><p>One of the most daunting tasks of a listener is to map a continuous auditory stream onto known speech sound categories and lexical items. A major issue with this mapping problem is the variability in the acoustic realizations of sound categories, both within and across speakers. Past research has suggested listeners may use visual information (e.g., lip-reading) to calibrate these speech categories to the current speaker. Previous studies have focused on audiovisual recalibration of consonant categories. The present study explores whether vowel categorization, which is known to show less sharply defined category boundaries, also benefit from visual cues.
Participants were exposed to videos of a speaker pronouncing one out of two vowels, paired with audio that was ambiguous between the two vowels. After exposure, it was found that participants had recalibrated their vowel categories. In addition, individual variability in audiovisual recalibration is discussed. It is suggested that listeners’ category sharpness may be related to the weight they assign to visual information in audiovisual speech perception. Specifically, listeners with less sharp categories assign more weight to visual information during audiovisual speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Judith Peters|AUTHOR Judith Peters]], [[Marieke Hoetjes|AUTHOR Marieke Hoetjes]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 659–663
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech perception is multimodal, with not only speech, but also gesture presumably playing a role in how a message is perceived. However, there have not been many studies on the effect that hand gestures may have on speech perception in general, and on persuasive speech in particular. Moreover, we do not yet know whether an effect of gestures may be larger when addressees are not involved in the topic of the discourse, and are therefore more focused on peripheral cues, rather than the content of the message. In the current study participants were shown a speech with or without gestures. Some participants were involved in the topic of the speech, others were not. We studied five measures of persuasiveness. Results showed that for all but one measure, viewing the video with accompanying gestures made the speech more persuasive. In addition, there were several interactions, showing that the performance of the speaker and the factual accuracy of the speech scored high especially for those participants who not only saw gestures but were also not involved in the topic of the speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Lai|AUTHOR Wei Lai]]
</p><p class="cpabstractcardaffiliationlist">University of Pennsylvania, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 664–668
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated the auditory-visual integration of talker gender in the perception of tone variances. Two experiments were conducted to evaluate how listeners use the information of talker gender to adjust their expectation towards speakers’ pitch range and uncover intended tonal targets in Cantonese tone perception. Results from an audio-only tone identification task showed that tone categorization along the same pitch continuum shifted under different conditions of voice gender. Listeners generally heard a tone of lower pitch when the word was produced by a female voice, while they heard a tone of higher pitch when the word was produced at the same pitch level by a male voice. Results from an audio-visual tone identification task showed that tone categorization along the same pitch continuum shifted under different conditions of face gender, despite the fact that the photos of different genders were disguised for the same set of stimuli in identical voices with identical pitch heights. These findings show that gender normalization plays a role in uncovering linguistic pitch targets, and lend support to a hypothesis according to which listeners make use of socially constructed stereotypes to facilitate their basic phonological categorization in speech perception and processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shiyu Zhou|AUTHOR Shiyu Zhou]], [[Yuanyuan Zhao|AUTHOR Yuanyuan Zhao]], [[Shuang Xu|AUTHOR Shuang Xu]], [[Bo Xu|AUTHOR Bo Xu]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 704–708
</span></p></div>
<div class="cpabstractcardabstract"><p>The shared-hidden-layer multilingual deep neural network (SHL-MDNN), in which the hidden layers of feed-forward deep neural network (DNN) are shared across multiple languages while the softmax layers are language dependent, has been shown to be effective on acoustic modeling of multilingual low-resource speech recognition. In this paper, we propose that the shared-hidden-layer with Long Short-Term Memory (LSTM) recurrent neural networks can achieve further performance improvement considering LSTM has outperformed DNN as the acoustic model of automatic speech recognition (ASR). Moreover, we reveal that shared-hidden-layer multilingual LSTM (SHL-MLSTM) with residual learning can yield additional moderate but consistent gain from multilingual tasks given the fact that residual learning can alleviate the degradation problem of deep LSTMs. Experimental results demonstrate that SHL-MLSTM can relatively reduce word error rate (WER) by 2.1–6.8% over SHL-MDNN trained using six languages and 2.6–7.3% over monolingual LSTM trained using the language specific data on CALLHOME datasets. Additional WER reduction, about relatively 2% over SHL-MLSTM, can be obtained through residual learning on CALLHOME datasets, which demonstrates residual learning is useful for SHL-MLSTM on multilingual low-resource ASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joachim Fainberg|AUTHOR Joachim Fainberg]], [[Steve Renals|AUTHOR Steve Renals]], [[Peter Bell|AUTHOR Peter Bell]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 749–753
</span></p></div>
<div class="cpabstractcardabstract"><p>Adapting acoustic models jointly to both speaker and environment has been shown to be effective. In many realistic scenarios, however, either the speaker or environment at test time might be unknown, or there may be insufficient data to learn a joint transform. Generating independent speaker and environment transforms improves the match of an acoustic model to unseen combinations. Using i-vectors, we demonstrate that it is possible to factorise speaker or environment information using multi-condition training with neural networks. Specifically, we extract bottleneck features from networks trained to classify either speakers or environments. We perform experiments on the Wall Street Journal corpus combined with environment noise from the Diverse Environments Multichannel Acoustic Noise Database. Using the factorised i-vectors we show improvements in word error rates on perturbed versions of the eval92 and dev93 test sets, both when one factor is missing and when the factors are seen but not in the desired combination.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Olivier Siohan|AUTHOR Olivier Siohan]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 709–713
</span></p></div>
<div class="cpabstractcardabstract"><p>Phone-sized acoustic units such as triphones cannot properly capture the long-term co-articulation effects that occur in spontaneous speech. For that reason, it is interesting to construct acoustic units covering a longer time-span such as syllables or words. Unfortunately, the frequency distribution of those units is such that a few high frequency units account for most of the tokens, while many units rarely occur. As a result, those units suffer from data sparsity and can be difficult to train. In this paper we propose a scalable data-driven approach to construct a set of salient units made of sequences of phones called M-phones. We illustrate that since the decomposition of a word sequence into a sequence of M-phones is ambiguous, those units are well suited to be used with a connectionist temporal classification (CTC) approach which does not rely on an explicit frame-level segmentation of the word sequence into a sequence of acoustic units. Experiments are presented on a Voice Search task using 12,500 hours of training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sibo Tong|AUTHOR Sibo Tong]], [[Philip N. Garner|AUTHOR Philip N. Garner]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 714–718
</span></p></div>
<div class="cpabstractcardabstract"><p>Different training and adaptation techniques for multilingual Automatic Speech Recognition (ASR) are explored in the context of hybrid systems, exploiting Deep Neural Networks (DNN) and Hidden Markov Models (HMM). In multilingual DNN training, the hidden layers (possibly extracting bottleneck features) are usually shared across languages, and the output layer can either model multiple sets of language-specific senones or one single universal IPA-based multilingual senone set. Both architectures are investigated, exploiting and comparing different language adaptive training (LAT) techniques originating from successful DNN-based speaker-adaptation. More specifically, speaker adaptive training methods such as Cluster Adaptive Training (CAT) and Learning Hidden Unit Contribution (LHUC) are considered. In addition, a language adaptive output architecture for IPA-based universal DNN is also studied and tested.
Experiments show that LAT improves the performance and adaptation on the top layer further improves the accuracy. By combining state-level minimum Bayes risk (sMBR) sequence training with LAT, we show that a language adaptively trained IPA-based universal DNN outperforms a monolingually sequence trained model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Karafiát|AUTHOR Martin Karafiát]], [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]], [[Pavel Matějka|AUTHOR Pavel Matějka]], [[Karel Veselý|AUTHOR Karel Veselý]], [[František Grézl|AUTHOR František Grézl]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 719–723
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper provides an analysis of BUT automatic speech recognition systems (ASR) built for the 2016 IARPA Babel evaluation. The IARPA Babel program concentrates on building ASR system for many low resource languages, where only a limited amount of transcribed speech is available for each language. In such scenario, we found essential to train the ASR systems in a multilingual fashion. In this work, we report superior results obtained with pre-trained multilingual BLSTM acoustic models, where we used multi-task training with separate classification layer for each language. The results reported on three Babel Year 4 languages show over 3% absolute WER reductions obtained from such multilingual pre-training. Experiments with different input features show that the multilingual BLSTM performs the best with simple log-Mel-filter-bank outputs, which makes our previously successful multilingual stack bottleneck features with CMLLR adaptation obsolete. Finally, we experiment with different configurations of i-vector based speaker adaptation in the mono- and multi-lingual BLSTM architectures. This results in additional WER reductions over 1% absolute.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marco Matassoni|AUTHOR Marco Matassoni]], [[Alessio Brutti|AUTHOR Alessio Brutti]], [[Daniele Falavigna|AUTHOR Daniele Falavigna]]
</p><p class="cpabstractcardaffiliationlist">FBK, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 724–728
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement directly using deep neural network (DNN) is of major interest due to the capability of DNN to tangibly reduce the impact of noisy conditions in speech recognition tasks. Similarly, DNN based acoustic model adaptation to new environmental conditions is another challenging topic. In this paper we present an analysis of acoustic model adaptation in presence of a disjoint speech enhancement component, identifying an optimal setting for improving the speech recognition performance. Adaptation is derived from a consolidated technique that introduces in the training process a regularization term to prevent overfitting. We propose to optimize the adaptation of the clean acoustic models towards the enhanced speech by tuning the regularization term based on the degree of enhancement. Experiments on a popular noisy dataset (e.g., AURORA-4) demonstrate the validity of the proposed approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Younggwan Kim|AUTHOR Younggwan Kim]], [[Hyungjun Lim|AUTHOR Hyungjun Lim]], [[Jahyun Goo|AUTHOR Jahyun Goo]], [[Hoirin Kim|AUTHOR Hoirin Kim]]
</p><p class="cpabstractcardaffiliationlist">KAIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 729–733
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, speaker adaptation methods in deep neural networks (DNNs) have been widely studied for automatic speech recognition. However, almost all adaptation methods for DNNs have to consider various heuristic conditions such as mini-batch sizes, learning rate scheduling, stopping criteria, and initialization conditions because of the inherent property of a stochastic gradient descent (SGD)-based training process. Unfortunately, those heuristic conditions are hard to be properly tuned. To alleviate those difficulties, in this paper, we propose a least squares regression-based speaker adaptation method in a DNN framework utilizing posterior mean of each class. Also, we show how the proposed method can provide a unique solution which is quite easy and fast to calculate without SGD. The proposed method was evaluated in the TED-LIUM corpus. Experimental results showed that the proposed method achieved up to a 4.6% relative improvement against a speaker independent DNN. In addition, we report further performance improvement of the proposed method with speaker-adapted features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Van Hai Do|AUTHOR Van Hai Do]]^^1^^, [[Nancy F. Chen|AUTHOR Nancy F. Chen]]^^2^^, [[Boon Pang Lim|AUTHOR Boon Pang Lim]]^^2^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Viettel Group, Vietnam; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 734–738
</span></p></div>
<div class="cpabstractcardabstract"><p>It is challenging to obtain large amounts of native (matched) labels for audio in under-resourced languages. This could be due to a lack of literate speakers of the language or a lack of universally acknowledged orthography. One solution is to increase the amount of labeled data by using mismatched transcription, which employs transcribers who do not speak the language (in place of native speakers), to transcribe what they hear as nonsense speech in their own language (e.g., Mandarin). This paper presents a multi-task learning framework where the DNN acoustic model is simultaneously trained using both a limited amount of native (matched) transcription and a larger set of mismatched transcription. We find that by using a multi-task learning framework, we achieve improvements over monolingual baselines and previously proposed mismatched transcription adaptation techniques. In addition, we show that using alignments provided by a GMM adapted by mismatched transcription further improves acoustic modeling performance. Our experiments on Georgian data from the IARPA Babel program show the effectiveness of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]], [[Sandeep Reddy Kothinti|AUTHOR Sandeep Reddy Kothinti]], [[S. Umesh|AUTHOR S. Umesh]], [[Basil Abraham|AUTHOR Basil Abraham]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 739–743
</span></p></div>
<div class="cpabstractcardabstract"><p>Generalized distillation framework has been shown to be effective in speech enhancement in the past. We extend this idea to speaker normalization without any explicit adaptation data in this paper. In the generalized distillation framework, we assume the presence of some “privileged” information to guide the training process in addition to the training data. In the proposed approach, the privileged information is obtained from a “teacher” model, trained on speaker-normalized FMLLR features. The “student” model is trained on un-normalized filterbank features and uses teacher’s supervision for cross-entropy training. The proposed distillation method does not need first pass decode information during testing and imposes no constraints on the duration of the test data for computing speaker-specific transforms unlike in FMLLR or i-vector. Experiments done on Switchboard and AMI corpus show that the generalized distillation framework shows improvement over un-normalized features with or without i-vectors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lahiru Samarakoon|AUTHOR Lahiru Samarakoon]]^^1^^, [[Brian Mak|AUTHOR Brian Mak]]^^1^^, [[Khe Chai Sim|AUTHOR Khe Chai Sim]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HKUST, China; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 744–748
</span></p></div>
<div class="cpabstractcardabstract"><p>Factorized Hidden Layer (FHL) adaptation has been proposed for speaker adaptation of deep neural network (DNN) based acoustic models. In FHL adaptation, a speaker-dependent (SD) transformation matrix and an SD bias are included in addition to the standard affine transformation. The SD transformation is a linear combination of rank-1 matrices whereas the SD bias is a linear combination of vectors. Recently, the Long Short-Term Memory (LSTM) Recurrent Neural Networks (RNNs) have shown to outperform DNN acoustic models in many Automatic Speech Recognition (ASR) tasks. In this work, we investigate the effectiveness of SD transformations for LSTM-RNN acoustic models. Experimental results show that when combined with scaling of LSTM cell states’ outputs, SD transformations achieve 2.3% and 2.1% absolute improvements over the baseline LSTM systems for the AMI IHM and AMI SDM tasks respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Richard Sproat|AUTHOR Richard Sproat]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 754–758
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a recurrent neural net (RNN) model of text normalization — defined as the mapping of written text to its spoken form, and a description of the open-source dataset that we used in our experiments. We show that while the RNN model achieves very high overall accuracies, there remain errors that would be unacceptable in a speech application like TTS. We then show that a simple FST-based filter can help mitigate those errors. Even with that mitigation challenges remain, and we end the paper outlining some possible solutions. In releasing our data we are thereby inviting others to help solve this problem.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[M. Sam Ribeiro|AUTHOR M. Sam Ribeiro]], [[Oliver Watts|AUTHOR Oliver Watts]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 799–803
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a simple count-based approach to learning word vector representations by leveraging statistics of co-occurrences between text and speech. This type of representation requires two discrete sequences of units defined across modalities. Two possible methods for the discretization of an acoustic signal are presented, which are then applied to fundamental frequency and energy contours of a transcribed corpus of speech, yielding a sequence of textual objects (e.g. words, syllables) aligned with a sequence of discrete acoustic events. Constructing a matrix recording the co-occurrence of textual objects with acoustic events and reducing its dimensionality with matrix decomposition results in a set of context-independent representations of word types. These are applied to the task of acoustic modelling for speech synthesis; objective and subjective results indicate that these representations are useful for the generation of acoustic parameters in a text-to-speech (TTS) system. In general, we observe that the more discretization approaches, acoustic signals, and levels of linguistic analysis are incorporated into a TTS system via these count-based representations, the better that TTS system performs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Éva Székely|AUTHOR Éva Székely]], [[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Joakim Gustafson|AUTHOR Joakim Gustafson]]
</p><p class="cpabstractcardaffiliationlist">KTH, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 804–808
</span></p></div>
<div class="cpabstractcardabstract"><p>As synthetic voices become more flexible, and conversational systems gain more potential to adapt to the environmental and social situation, the question needs to be examined, how different modifications to the synthetic speech interact with each other and how their specific combinations influence perception. This work investigates how the vocal effort of the synthetic speech together with added disfluencies affect listeners’ perception of the degree of uncertainty in an utterance. We introduce a DNN voice built entirely from spontaneous conversational speech data and capable of producing a continuum of vocal efforts, prolongations and filled pauses with a corpus-based method. Results of a listener evaluation indicate that decreased vocal effort, filled pauses and prolongation of function words increase the degree of perceived uncertainty of conversational utterances expressing the speaker’s beliefs. We demonstrate that the effect of these three cues are not merely additive, but that interaction effects, in particular between the two types of disfluencies and between vocal effort and prolongations need to be considered when aiming to communicate a specific level of uncertainty. The implications of these findings are relevant for adaptive and incremental conversational systems using expressive speech synthesis and aspiring to communicate the attitude of uncertainty.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Asaf Rendel|AUTHOR Asaf Rendel]]^^1^^, [[Raul Fernandez|AUTHOR Raul Fernandez]]^^2^^, [[Zvi Kons|AUTHOR Zvi Kons]]^^1^^, [[Andrew Rosenberg|AUTHOR Andrew Rosenberg]]^^2^^, [[Ron Hoory|AUTHOR Ron Hoory]]^^1^^, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Israel; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 759–763
</span></p></div>
<div class="cpabstractcardabstract"><p>The proper segmentation of an input text string into meaningful intonational phrase units is a fundamental task in the text-processing component of a text-to-speech (TTS) system that generates intelligible and natural synthesis. In this work we look at the creation of a symbolic, phrase-assignment model within the front end (FE) of a North American English TTS system when high-quality labels for supervised learning are unavailable and/or potentially mismatched to the target corpus and domain. We explore a labeling scheme that merges heuristics derived from (i) automatic high-quality phonetic alignments, (ii) linguistic rules, and (iii) a legacy acoustic phrase-labeling system to arrive at a ground truth that can be used to train a bidirectional recurrent neural network model. We evaluate the performance of this model in terms of objective metrics describing categorical phrase assignment within the FE proper, as well as on the effect that these intermediate labels carry onto the TTS back end for the task of continuous prosody prediction (i.e., intonation and duration contours, and pausing). For this second task, we rely on subjective listening tests and demonstrate that the proposed system significantly outperforms a linguistic rules-based baseline for two different synthetic voices.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yusuke Ijima|AUTHOR Yusuke Ijima]], [[Nobukatsu Hojo|AUTHOR Nobukatsu Hojo]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Taichi Asami|AUTHOR Taichi Asami]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 764–768
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent studies have shown the effectiveness of the use of word vectors in DNN-based speech synthesis. However, these word vectors trained from a large amount of text generally carry not prosodic information, which is important information for speech synthesis, but semantic information. Therefore, if word vectors that take prosodic information into account can be obtained, it would be expected to improve the quality of synthesized speech. In this paper, to obtain word-level vectors that take prosodic information into account, we propose a novel prosody aware word-level encoder. A novel point of the proposed technique is to train a word-level encoder by using a large speech corpus constructed for automatic speech recognition. A word-level encoder that estimates the F0 contour for each word from the input word sequence is trained. The outputs of the bottleneck layer in the trained encoder are used as the word-level vector. By training the relationship between words and their prosodic information by using large speech corpus, the outputs of the bottleneck layer would be expected to contain prosodic information. The results of objective and subjective experiments indicate the proposed technique can synthesize speech with improved naturalness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinfu Ni|AUTHOR Jinfu Ni]], [[Yoshinori Shiga|AUTHOR Yoshinori Shiga]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]
</p><p class="cpabstractcardaffiliationlist">NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 769–773
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent vector space representations of words have succeeded in capturing syntactic and semantic regularities. In the context of text-to-speech (TTS) synthesis, a front-end is a key component for extracting multi-level linguistic features from text, where syllable acts as a link between low- and high-level features. This paper describes the use of global syllable vectors as features to build a front-end, particularly evaluated in Chinese. The global syllable vectors directly capture global statistics of syllable-syllable co-occurrences in a large-scale text corpus. They are learned by a global log-bilinear regression model in an unsupervised manner, whilst the front-end is built using deep bidirectional recurrent neural networks in a supervised fashion. Experiments are conducted on large-scale Chinese speech and treebank text corpora, evaluating grapheme to phoneme (G2P) conversion, word segmentation, part of speech (POS) tagging, phrasal chunking, and pause break prediction. Results show that the proposed method is efficient for building a compact and robust front-end with high performance. The global syllable vectors can be acquired relatively cheaply from plain text resources, therefore, they are vital to develop multilingual speech synthesis, especially for under-resourced language modeling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ishin Fukuoka|AUTHOR Ishin Fukuoka]], [[Kazuhiko Iwata|AUTHOR Kazuhiko Iwata]], [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]
</p><p class="cpabstractcardaffiliationlist">Waseda University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 774–778
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a conversational speech synthesis system in which the prosodic features of each utterance are controlled throughout the entire input text. We have developed a “news-telling system,” which delivered news articles through spoken language. The speech synthesis system for the news-telling should be able to highlight utterances containing noteworthy information in the article with a particular way of speaking so as to impress them on the users. To achieve this, we introduced role and position features of the individual utterances in the article into the control parameters for prosody generation throughout the text. We defined three categories for the role feature: a nucleus (which is assigned to the utterance including the noteworthy information), a front satellite (which precedes the nucleus) and a rear satellite (which follows the nucleus). We investigated how the prosodic features differed depending on the role and position features through an analysis of news-telling speech data uttered by a voice actress. We designed the speech synthesis system on the basis of a deep neural network having the role and position features added to its input layer. Objective and subjective evaluation results showed that introducing those features was effective in the speech synthesis for the information delivering.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuchen Huang|AUTHOR Yuchen Huang]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Runnan Li|AUTHOR Runnan Li]], [[Helen Meng|AUTHOR Helen Meng]], [[Lianhong Cai|AUTHOR Lianhong Cai]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 779–783
</span></p></div>
<div class="cpabstractcardabstract"><p>Prosodic structure generation from text plays an important role in Chinese text-to-speech (TTS) synthesis, which greatly influences the naturalness and intelligibility of the synthesized speech. This paper proposes a multi-task learning method for prosodic structure generation using bidirectional long short-term memory (BLSTM) recurrent neural network (RNN) and structured output layer (SOL). Unlike traditional methods where prerequisites such as lexicon word or even syntactic tree are usually required as the input, the proposed method predicts prosodic boundary labels directly from Chinese characters. BLSTM RNN is used to capture the bidirectional contextual dependencies of prosodic boundary labels. SOL further models correlations between prosodic structures, lexicon words as well as part-of-speech (POS), where the prediction of prosodic boundary labels are conditioned upon word tokenization and POS tagging results. Experimental results demonstrate the effectiveness of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yibin Zheng|AUTHOR Yibin Zheng]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]], [[Ya Li|AUTHOR Ya Li]], [[Bin Liu|AUTHOR Bin Liu]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 784–788
</span></p></div>
<div class="cpabstractcardabstract"><p>Accurate modeling and prediction of speech-sound durations are important in generating natural synthetic speech. This paper focuses on both feature and training objective aspects to improve the performance of the phone duration model for speech synthesis system. In feature aspect, we combine the feature representation from gradient boosting decision tree (GBDT) and phoneme identity embedding model (which is realized by the jointly training of phoneme embedded vector (PEV) and word embedded vector (WEV)) for BLSTM to predict the phone duration. The PEV is used to replace the one-hot phoneme identity, and GBDT is utilized to transform the traditional contextual features. In the training objective aspect, a new training objective function which taking into account of the correlation and consistency between the predicted utterance and the natural utterance is proposed. Perceptual tests indicate the proposed methods could improve the naturalness of the synthetic speech, which benefits from the proposed feature representation methods could capture more precise contextual features, and the proposed training objective function could tackle the over-averaged problem for the generated phone durations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo Chen|AUTHOR Bo Chen]], [[Tianling Bian|AUTHOR Tianling Bian]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 789–793
</span></p></div>
<div class="cpabstractcardabstract"><p>The acoustic model and the duration model are the two major components in statistical parametric speech synthesis (SPSS) systems. The neural network based acoustic model makes it possible to model phoneme duration at phone-level instead of state-level in conventional hidden Markov model (HMM) based SPSS systems. Since the duration of phonemes is countable value, the distribution of the phone-level duration is discrete given the linguistic features, which means the Gaussian hypothesis is no longer necessary. This paper provides an investigation on the performance of LSTM-RNN duration model that directly models the probability of the countable duration values given linguistic features using cross entropy as criteria. The multi-task learning is also experimented at the same time, with a comparison to the standard LSTM-RNN duration model in objective and subjective measures. The result shows that directly modeling the discrete distribution has its benefit and multi-task model achieves better performance in phone-level duration modeling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo Chen|AUTHOR Bo Chen]], [[Jiahao Lai|AUTHOR Jiahao Lai]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 794–798
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech duration is an important component in statistical parameter speech synthesis(SPSS). In LSTM-RNN based SPSS system, the speech duration affects the quality of synthesized speech in two aspects, the prosody of speech and the position features in acoustic model. This paper investigated the effects of duration in LSTM-RNN based SPSS system. The performance of the acoustic models with position features at different levels are compared. Also, duration models with different network architectures are presented. A method to utilize the priori knowledge that the sum of state duration of a phoneme should be equal to the phone duration is proposed and proved to have better performance in both state duration and phone duration modeling. The result shows that acoustic model with state-level position features has better performance in acoustic modeling (especially in voice/unvoice classification), which means state-level duration model still has its advantage and the duration models with the priori knowledge can result in better speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alp Öktem|AUTHOR Alp Öktem]], [[Mireia Farrús|AUTHOR Mireia Farrús]], [[Leo Wanner|AUTHOR Leo Wanner]]
</p><p class="cpabstractcardaffiliationlist">Universitat Pompeu Fabra, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 809–810
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents an open-source tool that has been developed to visualize a speech corpus with its transcript and prosodic features aligned at word level. In particular, the tool is aimed at providing a simple and clear way to visualize prosodic patterns along large segments of speech corpora, and can be applied in any research that involves prosody analysis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Svetlana Vetchinnikova|AUTHOR Svetlana Vetchinnikova]], [[Anna Mauranen|AUTHOR Anna Mauranen]], [[Nina Mikušová|AUTHOR Nina Mikušová]]
</p><p class="cpabstractcardaffiliationlist">University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 811–812
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a web-based application for tablets ‘ChunkitApp’ developed to investigate chunking in online speech processing. The design of the app is based on recent theoretical developments in linguistics and cognitive science, and in particular on the suggestions of Linear Unit Grammar [1]. The data collected using the app provides evidence for the reality of online chunking in language processing and the validity of the construct. In addition to experimental uses, the app has potential applications in language education and speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Markus Jochim|AUTHOR Markus Jochim]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 813–814
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we introduce a new component of the EMU Speech Database Management System [1, 2] to improve the team workflow of handling production data (both acoustic and physiological) in phonetics and the speech sciences. It is named emuDB Manager, and it facilitates the coordination of team efforts, possibly distributed over several nations, by introducing automatic revision control (based on Git), cloud hosting (in private clouds provided by the researchers themselves or a third party), by keeping track of which parts of the database have already been edited (and by whom), and by centrally collecting and making searchable the notes made during the edit process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]^^1^^, [[Mark VanDam|AUTHOR Mark VanDam]]^^2^^, [[Elika Bergelson|AUTHOR Elika Bergelson]]^^3^^, [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at Merced, USA; ^^2^^Washington State University, USA; ^^3^^Duke University, USA; ^^4^^LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 815–816
</span></p></div>
<div class="cpabstractcardabstract"><p>HomeBank is a new component of the TalkBank system, focused on long-form (i.e., multi-hour, typically daylong) real-world recordings of children’s language experiences, and it is linked to a GitHub repository in which tools for analyzing those recordings can be shared. HomeBank constitutes not only a rich resource for researchers interested in early language acquisition specifically, but also for those seeking to study spontaneous speech, media exposure, and audio environments more generally. This Show and Tell describes the procedures for accessing and contributing HomeBank data and code. It also overviews the current contents of the repositories, and provides some examples of audio recordings, available transcriptions, and currently available analysis tools.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peter Bell|AUTHOR Peter Bell]], [[Joachim Fainberg|AUTHOR Joachim Fainberg]], [[Catherine Lai|AUTHOR Catherine Lai]], [[Mark Sinclair|AUTHOR Mark Sinclair]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 817–818
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a system to enable efficient, collaborative human correction of ASR transcripts, designed to operate in real-time situations, for example, when post-editing live captions generated for news broadcasts. In the system, confusion networks derived from ASR lattices are used to highlight low-confident words and present alternatives to the user for quick correction. The system uses a client-server architecture, whereby information about each manual edit is posted to the server. Such information can be used to dynamically update the one-best ASR output for all utterances currently in the editing pipeline. We propose to make updates in three different ways; by finding a new one-best path through an existing ASR lattice consistent with the correction received; by identifying further instances of out-of-vocabulary terms entered by the user; and by adapting the language model on the fly. Updates are received asynchronously by the client.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chitralekha Bhat|AUTHOR Chitralekha Bhat]]^^1^^, [[Anjali Kant|AUTHOR Anjali Kant]]^^2^^, [[Bhavik Vachhani|AUTHOR Bhavik Vachhani]]^^1^^, [[Sarita Rautara|AUTHOR Sarita Rautara]]^^2^^, [[Ashok Kumar Sinha|AUTHOR Ashok Kumar Sinha]]^^2^^, [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^TCS Innovation Labs Mumbai, India; ^^2^^AYJNISHD, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 819–820
</span></p></div>
<div class="cpabstractcardabstract"><p>Through this paper, we present the Mobile Phone Assisted Remote Speech Therapy Platform for individuals with speech disabilities to avail the benefits of therapy remotely with minimal face-to-face sessions with the Speech Language Pathologist (SLP). The objective is to address the skewed ratio of SLP to patients as well increase the efficacy of the therapy by keeping the patient engaged more frequently albeit asynchronously and remotely. The platform comprises (1) A web-interface to be used by the SLP to monitor the progress of their patients at a time convenient to them and (2) A mobile application along with speech processing algorithms to provide instant feedback to the patient. We envision this platform to cut down the therapy time, especially for rural Indian patients. Evaluation of this platform is being done for five patients with mis-articulation in Marathi language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aurore Jaumard-Hakoun|AUTHOR Aurore Jaumard-Hakoun]]^^1^^, [[Samy Chikhi|AUTHOR Samy Chikhi]]^^1^^, [[Takfarinas Medani|AUTHOR Takfarinas Medani]]^^1^^, [[Angelika Nair|AUTHOR Angelika Nair]]^^2^^, [[Gérard Dreyfus|AUTHOR Gérard Dreyfus]]^^1^^, [[François-Beno^ıt Vialatte|AUTHOR François-Beno^ıt Vialatte]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ESPCI Paris, France; ^^2^^Drew University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 821–822
</span></p></div>
<div class="cpabstractcardabstract"><p>We present our preliminary developments on a biofeedback interface for Western operatic style training, combining performance and result biofeedback. Electromyographic performance feedbacks, as well as formant-tuning result feedbacks are displayed visually, using continuously scrolling displays, or discrete post-trial evaluations. Our final aim is to investigate electroencephalographic (EEG) measurements in order to identify neural correlates of feedback-based skill learning. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christoph Draxler|AUTHOR Christoph Draxler]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 823–824
</span></p></div>
<div class="cpabstractcardabstract"><p>PercyConfigurator is an experiment editor that eliminates the need for programming; the experiment definition and content are simply dropped onto the PercyConfigurator web page for interactive editing and testing. When the editing is done, the experiment definition and content are uploaded to the server. The server returns a link to the experiment which is then distributed to potential participants.
The Bavarian Archive for Speech Signals (BAS) hosts PercyConfigurator as a free service to the academic community.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Askars Salimbajevs|AUTHOR Askars Salimbajevs]], [[Indra Ikauniece|AUTHOR Indra Ikauniece]]
</p><p class="cpabstractcardaffiliationlist">Tilde, Latvia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 825–826
</span></p></div>
<div class="cpabstractcardabstract"><p>In this demonstration paper, we introduce a transcription service that can be used for transcription of different meetings, sessions etc. The service performs speaker diarization, automatic speech recognition, punctuation restoration and produces human-readable transcripts as special Microsoft Word documents that have audio and word alignments embedded. Thereby, a widely-used word processor is transformed into a transcription post-editing tool. Currently, Latvian and Lithuanian languages are supported, but other languages can be easily added.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ji Ho Park|AUTHOR Ji Ho Park]], [[Nayeon Lee|AUTHOR Nayeon Lee]], [[Dario Bertero|AUTHOR Dario Bertero]], [[Anik Dey|AUTHOR Anik Dey]], [[Pascale Fung|AUTHOR Pascale Fung]]
</p><p class="cpabstractcardaffiliationlist">HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 827–828
</span></p></div>
<div class="cpabstractcardabstract"><p>We developed Emojive!, a mobile game app to make emotion recognition from audio and image interactive and fun, motivating the users to play with the app. The game is to act out a specific emotion, among six emotion labels (happy, sad, anger, anxiety, loneliness, criticism), given by the system. Double player mode lets two people to compete their acting skills. The more users play the game, the more emotion-labelled data will be acquired. We are using deep Convolutional Neural Network (CNN) models to recognize emotion from audio and facial image in real-time with a mobile front-end client including intuitive user interface and simple data visualization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mietta Lennes|AUTHOR Mietta Lennes]]^^1^^, [[Jussi Piitulainen|AUTHOR Jussi Piitulainen]]^^1^^, [[Martin Matthiesen|AUTHOR Martin Matthiesen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Helsinki, Finland; ^^2^^CSC, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 829–830
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech and language researchers need to manage and analyze increasing quantities of material. Various tools are available for various stages of the work, but they often require the researcher to use different interfaces and to convert the output from each tool into suitable input for the next one.
The Language Bank of Finland (Kielipankki) is developing an on-line platform called Mylly for processing speech and language data in a graphical user interface that integrates different tools into a single workflow. Mylly provides tools and computational resources for processing material and for the inspecting the results. The tools plugged into Mylly include a parser, morphological analyzers, generic finite-state technology, and a speech recognizer. Users can upload data and download any intermediate results in the tool chain. Mylly runs on CSC’s Taito cluster and is an instance of the Chipster platform. Access rights to Mylly are given for academic use.
The Language Bank of Finland is a collection of corpora, tools and other services maintained by FIN-CLARIN, a consortium of Finnish universities and research organizations coordinated by the University of Helsinki. The technological infrastructure for the Language Bank of Finland is provided by CSC – IT Center for Science.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kyori Suzuki|AUTHOR Kyori Suzuki]], [[Ian Wilson|AUTHOR Ian Wilson]], [[Hayato Watanabe|AUTHOR Hayato Watanabe]]
</p><p class="cpabstractcardaffiliationlist">University of Aizu, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 831–832
</span></p></div>
<div class="cpabstractcardabstract"><p>We demonstrate Visual Learning 2, an English pronunciation app for second-language (L2) learners and phonetics students. This iOS app links together audio, front and side video, MRI and ultrasound movies of a native speaker reading a phonetically balanced text. Users can watch and shadow front and side video overlaid with an ultrasound tongue movie. They are able to play the video at three speeds and start the video from any word by tapping on it, with a choice of display in either English or IPA. Users can record their own audio/video and play it back in sync with the model for comparison.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emre Yılmaz|AUTHOR Emre Yılmaz]]^^1^^, [[Jelske Dijkstra|AUTHOR Jelske Dijkstra]]^^2^^, [[Hans Van de Velde|AUTHOR Hans Van de Velde]]^^2^^, [[Frederik Kampstra|AUTHOR Frederik Kampstra]]^^3^^, [[Jouke Algra|AUTHOR Jouke Algra]]^^3^^, [[Henk van den Heuvel|AUTHOR Henk van den Heuvel]]^^1^^, [[David Van Leeuwen|AUTHOR David Van Leeuwen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Radboud Universiteit Nijmegen, The Netherlands; ^^2^^Fryske Akademy, The Netherlands; ^^3^^Omrop Frysl^an, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 37–41
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a new longitudinal and bilingual broadcast database designed for speaker clustering and text-independent verification research. The broadcast data is extracted from the archives of Omrop Frysl^an which is the regional broadcaster in the province of Frysl^an, located in the north of the Netherlands. Two speaker verification tasks are provided in a standard enrollment-test setting with language consistent trials. The first task contains target trials from all speakers available appearing in at least two different programs, while the second task contains target trials from a subgroup of speakers appearing in programs recorded in multiple years. The second task is designed to investigate the effects of ageing on the accuracy of speaker verification systems. This database also contains unlabeled spoken segments from different radio programs for speaker clustering research. We provide the output of an existing speaker diarization system for baseline verification experiments. Finally, we present the baseline speaker verification results using the Kaldi GMM- and DNN-UBM speaker verification system. This database will be an extension to the recently presented open source Frisian data collection and it is publicly available for research purposes.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emre Yılmaz|AUTHOR Emre Yılmaz]], [[Henk van den Heuvel|AUTHOR Henk van den Heuvel]], [[David Van Leeuwen|AUTHOR David Van Leeuwen]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 42–46
</span></p></div>
<div class="cpabstractcardabstract"><p>We have recently presented an automatic speech recognition (ASR) system operating on Frisian-Dutch code-switched speech. This type of speech requires careful handling of unexpected language switches that may occur in a single utterance. In this paper, we extend this work by using some raw broadcast data to improve multilingually trained deep neural networks (DNN) that have been trained on 11.5 hours of manually annotated bilingual speech. For this purpose, we apply the initial ASR to the untranscribed broadcast data and automatically create transcriptions based on the recognizer output using different language models for rescoring. Then, we train new acoustic models on the combined data, i.e., the manually and automatically transcribed bilingual broadcast data, and investigate the automatic transcription quality based on the recognition accuracies on a separate set of development and test data. Finally, we report code-switching detection performance elaborating on the correlation between the ASR and the code-switching detection performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 47–51
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a database of code-switched conversational human–machine dialog in English–Hindi and English–Spanish. We leveraged HALEF, an open-source standards-compliant cloud-based dialog system to capture audio and video of bilingual crowd workers as they interacted with the system. We designed conversational items with intra-sentential code-switched machine prompts, and examine its efficacy in eliciting code-switched speech in a total of over 700 dialogs. We analyze various characteristics of the code-switched corpus and discuss some considerations that should be taken into account while collecting and processing such data. Such a database can be leveraged for a wide range of potential applications, including automated processing, recognition and understanding of code-switched speech and language learning applications for new language learners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[SaiKrishna Rallabandi|AUTHOR SaiKrishna Rallabandi]], [[Alan W. Black|AUTHOR Alan W. Black]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 52–56
</span></p></div>
<div class="cpabstractcardabstract"><p>Codemixing — phenomenon where lexical items from one language are embedded in the utterance of another — is relatively frequent in multilingual communities. However, TTS systems today are not fully capable of effectively handling such mixed content despite achieving high quality in the monolingual case. In this paper, we investigate various mechanisms for building mixed lingual systems which are built using a mixture of monolingual corpora and are capable of synthesizing such content. First, we explore the possibility of manipulating the phoneme representation: using target word to source phone mapping with the aim of emulating the native speaker intuition. We then present experiments at the acoustic stage investigating training techniques at both spectral and prosodic levels. Subjective evaluation shows that our systems are capable of generating high quality synthesis in codemixed scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khyathi Raghavi Chandu|AUTHOR Khyathi Raghavi Chandu]]^^1^^, [[SaiKrishna Rallabandi|AUTHOR SaiKrishna Rallabandi]]^^1^^, [[Sunayana Sitaram|AUTHOR Sunayana Sitaram]]^^2^^, [[Alan W. Black|AUTHOR Alan W. Black]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^Microsoft, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 57–61
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-to-Speech (TTS) systems that can read navigation instructions are one of the most widely used speech interfaces today. Text in the navigation domain may contain named entities such as location names that are not in the language that the TTS database is recorded in. Moreover, named entities can be compound words where individual lexical items belong to different languages. These named entities may be transliterated into the script that the TTS system is trained on. This may result in incorrect pronunciation rules being used for such words. We describe experiments to extend our previous work in generating code-mixed speech to synthesize navigation instructions, with a mixed-lingual TTS system. We conduct subjective listening tests with two sets of users, one being students who are native speakers of an Indian language and very proficient in English, and the other being drivers with low English literacy, but familiarity with location names. We find that in both sets of users, there is a significant preference for our proposed system over a baseline system that synthesizes instructions in English.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Djegdjiga Amazouz|AUTHOR Djegdjiga Amazouz]]^^1^^, [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]^^1^^, [[Lori Lamel|AUTHOR Lori Lamel]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPP (UMR 7018), France; ^^2^^LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 62–66
</span></p></div>
<div class="cpabstractcardabstract"><p>This study focuses on code-switching (CS) in French/Algerian Arabic bilingual communities and investigates how speech technologies, such as automatic data partitioning, language identification and automatic speech recognition (ASR) can serve to analyze and classify this type of bilingual speech. A preliminary study carried out using a corpus of Maghrebian broadcast data revealed a relatively high presence of CS Algerian Arabic as compared to the neighboring countries Morocco and Tunisia. Therefore this study focuses on code switching produced by bilingual Algerian speakers who can be considered native speakers of both Algerian Arabic and French. A specific corpus of four hours of speech from 8 bilingual French Algerian speakers was collected. This corpus contains read speech and conversational speech in both languages and includes stretches of code-switching. We provide a linguistic description of the code-switching stretches in terms of intra-sentential and inter-sentential switches, the speech duration in each language. We report on some initial studies to locate French, Arabic and the code-switched stretches, using ASR system word posteriors for this pair of languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gualberto Guzmán|AUTHOR Gualberto Guzmán]], [[Joseph Ricard|AUTHOR Joseph Ricard]], [[Jacqueline Serigos|AUTHOR Jacqueline Serigos]], [[Barbara E. Bullock|AUTHOR Barbara E. Bullock]], [[Almeida Jacqueline Toribio|AUTHOR Almeida Jacqueline Toribio]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Austin, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 67–71
</span></p></div>
<div class="cpabstractcardabstract"><p>In developing technologies for code-switched speech, it would be desirable to be able to predict how much language mixing might be expected in the signal and the regularity with which it might occur. In this work, we offer various metrics that allow for the classification and visualization of multilingual corpora according to the ratio of languages represented, the probability of switching between them, and the time-course of switching. Applying these metrics to corpora of different languages and genres, we find that they display distinct probabilities and periodicities of switching, information useful for speech processing of mixed-language data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ewald van der Westhuizen|AUTHOR Ewald van der Westhuizen]], [[Thomas Niesler|AUTHOR Thomas Niesler]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 72–76
</span></p></div>
<div class="cpabstractcardabstract"><p>Code-switching is prevalent among South African speakers, and presents a challenge to automatic speech recognition systems. It is predominantly a spoken phenomenon, and generally does not occur in textual form. Therefore a particularly serious challenge is the extreme lack of training material for language modelling. We investigate the use of word embeddings to synthesise isiZulu-to-English code-switch bigrams with which to augment such sparse language model training data. A variety of word embeddings are trained on a monolingual English web text corpus, and subsequently queried to synthesise code-switch bigrams. Our evaluation is performed on language models trained on a new, although small, English-isiZulu code-switch corpus compiled from South African soap operas. This data is characterised by fast, spontaneously spoken speech containing frequent code-switching. We show that the augmentation of the training data with code-switched bigrams synthesised in this way leads to a reduction in perplexity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Victor Soto|AUTHOR Victor Soto]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 77–81
</span></p></div>
<div class="cpabstractcardabstract"><p>Code-switching is the phenomenon by which bilingual speakers switch between multiple languages during communication. The importance of developing language technologies for code-switching data is immense, given the large populations that routinely code-switch. High-quality linguistic annotations are extremely valuable for any NLP task, and performance is often limited by the amount of high-quality labeled data. However, little such data exists for code-switching. In this paper, we describe crowd-sourcing universal part-of-speech tags for the Miami Bangor Corpus of Spanish-English code-switched speech. We split the annotation task into three subtasks: one in which a subset of tokens are labeled automatically, one in which questions are specifically designed to disambiguate a subset of high frequency words, and a more general cascaded approach for the remaining data in which questions are displayed to the worker following a decision tree structure. Each subtask is extended and adapted for a multilingual setting and the universal tagset. The quality of the annotation process is measured using hidden check questions annotated with gold labels. The overall agreement between gold standard labels and the majority vote is between 0.95 and 0.96 for just three labels and the average recall across part-of-speech tags is between 0.87 and 0.99, depending on the task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]^^1^^, [[Md. Sahidullah|AUTHOR Md. Sahidullah]]^^1^^, [[Héctor Delgado|AUTHOR Héctor Delgado]]^^2^^, [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]^^2^^, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^2^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^3^^, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Eastern Finland, Finland; ^^2^^EURECOM, France; ^^3^^NII, Japan; ^^4^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2–6
</span></p></div>
<div class="cpabstractcardabstract"><p>The ASVspoof initiative was created to promote the development of countermeasures which aim to protect automatic speaker verification (ASV) from spoofing attacks. The first community-led, common evaluation held in 2015 focused on countermeasures for speech synthesis and voice conversion spoofing attacks. Arguably, however, it is replay attacks which pose the greatest threat. Such attacks involve the replay of recordings collected from enrolled speakers in order to provoke false alarms and can be mounted with greater ease using everyday consumer devices. ASVspoof 2017, the second in the series, hence focused on the development of replay attack countermeasures. This paper describes the database, protocols and initial findings. The evaluation entailed highly heterogeneous acoustic recording and replay conditions which increased the equal error rate (EER) of a baseline ASV system from 1.76% to 31.46%. Submissions were received from 49 research teams, 20 of which improved upon a baseline replay spoofing detector EER of 24.77%, in terms of replay/non-replay discrimination. While largely successful, the evaluation indicates that the quest for countermeasures which are resilient in the face of variable replay attacks remains very much alive.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roberto Font|AUTHOR Roberto Font]], [[Juan M. Espín|AUTHOR Juan M. Espín]], [[María José Cano|AUTHOR María José Cano]]
</p><p class="cpabstractcardaffiliationlist">Biometric Vox, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 7–11
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents an experimental comparison of different features for the detection of replay spoofing attacks in Automatic Speaker Verification systems. We evaluate the proposed countermeasures using two recently introduced databases, including the dataset provided for the ASVspoof 2017 challenge. This challenge provides researchers with a common framework for the evaluation of replay attack detection systems, with a particular focus on the generalization to new, unknown conditions (for instance, replay devices different from those used during system training). Our cross-database experiments show that, although achieving this level of generalization is indeed a challenging task, it is possible to train classifiers that exhibit stable and consistent results across different experiments. The proposed approach for the ASVspoof 2017 challenge consists in the score-level fusion of several base classifiers using logistic regression. These base classifiers are 2-class Gaussian Mixture Models (GMMs) representing genuine and spoofed speech respectively. Our best system achieves an Equal Error Rate of 10.52% on the challenge evaluation set. As a result of this set of experiments, we provide some general conclusions regarding feature extraction for replay attack detection and identify which features show the most promising results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hemant A. Patil|AUTHOR Hemant A. Patil]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Tanvina B. Patel|AUTHOR Tanvina B. Patel]], [[Meet H. Soni|AUTHOR Meet H. Soni]]
</p><p class="cpabstractcardaffiliationlist">DA-IICT, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 12–16
</span></p></div>
<div class="cpabstractcardabstract"><p>Replay attacks presents a great risk for Automatic Speaker Verification (ASV) system. In this paper, we propose a novel replay detector based on Variable length Teager Energy Operator-Energy Separation Algorithm-Instantaneous Frequency Cosine Coefficients (VESA-IFCC) for the ASV spoof 2017 challenge. The key idea here is to exploit the contribution of IF in each subband energy via ESA to capture possible changes in spectral envelope (due to transmission and channel characteristics of replay device) of replayed speech. The IF is computed from narrowband components of speech signal, and DCT is applied in IF to get proposed feature set. We compare the performance of the proposed VESA-IFCC feature set with the features developed for detecting synthetic and voice converted speech. This includes the CQCC, CFCCIF and prosody-based features. On the development set, the proposed VESA-IFCC features when fused at score-level with a variant of CFCCIF and prosody-based features gave the least EER of 0.12%. On the evaluation set, this combination gave an EER of 18.33%. However, post-evaluation results of challenge indicate that VESA-IFCC features alone gave the relatively least EER of 14.06% (i.e., relatively 16.11% less compared to baseline CQCC) and hence, is a very useful countermeasure to detect replay attacks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weicheng Cai|AUTHOR Weicheng Cai]]^^1^^, [[Danwei Cai|AUTHOR Danwei Cai]]^^1^^, [[Wenbo Liu|AUTHOR Wenbo Liu]]^^1^^, [[Gang Li|AUTHOR Gang Li]]^^2^^, [[Ming Li|AUTHOR Ming Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sun Yat-sen University, China; ^^2^^JSC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 17–21
</span></p></div>
<div class="cpabstractcardabstract"><p>The ongoing ASVspoof 2017 challenge aims to detect replay attacks for text dependent speaker verification. In this paper, we propose multiple replay spoofing countermeasure systems, with some of them boosting the CQCC-GMM baseline system after score level fusion. We investigate different steps in the system building pipeline, including data augmentation, feature representation, classification and fusion. First, in order to augment training data and simulate the unseen replay conditions, we converted the raw genuine training data into replay spoofing data with parametric sound reverberator and phase shifter. Second, we employed the original spectrogram rather than CQCC as input to explore the end-to-end feature representation learning methods. The spectrogram is randomly cropped into fixed size segments, and then fed into a deep residual network (ResNet). Third, upon the CQCC features, we replaced the subsequent GMM classifier with deep neural networks including fully-connected deep neural network (FDNN) and Bidirectional Long Short Term Memory neural network (BLSTM). Experiments showed that data augmentation strategy can significantly improve the system performance. The final fused system achieves to 16.39% EER on the test set of ASVspoof 2017 for the common task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarfaraz Jelil|AUTHOR Sarfaraz Jelil]], [[Rohan Kumar Das|AUTHOR Rohan Kumar Das]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]], [[Rohit Sinha|AUTHOR Rohit Sinha]]
</p><p class="cpabstractcardaffiliationlist">IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 22–26
</span></p></div>
<div class="cpabstractcardabstract"><p>This work describes the techniques used for spoofed speech detection for the ASVspoof 2017 challenge. The main focus of this work is on exploiting the differences in the speech-specific nature of genuine speech signals and spoofed speech signals generated by replay attacks. This is achieved using glottal closure instants, epoch strength, and the peak to side lobe ratio of the Hilbert envelope of linear prediction residual. Apart from these source features, the instantaneous frequency cosine coefficient feature, and two cepstral features namely, constant Q cepstral coefficients and mel frequency cepstral coefficients are used. A combination of all these features is performed to obtain a high degree of accuracy for spoof detection. Initially, efficacy of these features are tested on the development set of the ASVspoof 2017 database with Gaussian mixture model based systems. The systems are then fused at score level which acts as the final combined system for the challenge. The combined system is able to outperform the individual systems by a significant margin. Finally, the experiments are repeated on the evaluation set of the database and the combined system results in an equal error rate of 13.95%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marcin Witkowski|AUTHOR Marcin Witkowski]], [[Stanisław Kacprzak|AUTHOR Stanisław Kacprzak]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]], [[Jakub Gałka|AUTHOR Jakub Gałka]]
</p><p class="cpabstractcardaffiliationlist">AGH UST, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 27–31
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents our contribution to the ASVspoof 2017 Challenge. It addresses a replay spoofing attack against a speaker recognition system by detecting that the analysed signal has passed through multiple analogue-to-digital (AD) conversions. Specifically, we show that most of the cues that enable to detect the replay attacks can be found in the high-frequency band of the replayed recordings. The described anti-spoofing countermeasures are based on (1) modelling the subband spectrum and (2) using the proposed features derived from the linear prediction (LP) analysis. The results of the investigated methods show a significant improvement in comparison to the baseline system of the ASVspoof 2017 Challenge. A relative equal error rate (EER) reduction by 70% was achieved for the development set and a reduction by 30% was obtained for the evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianliang Wang|AUTHOR Xianliang Wang]]^^1^^, [[Yanhong Xiao|AUTHOR Yanhong Xiao]]^^2^^, [[Xuan Zhu|AUTHOR Xuan Zhu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Beijing Samsung Telecom R&D Center, China; ^^2^^Beijing Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 32–36
</span></p></div>
<div class="cpabstractcardabstract"><p>The ASVspoof 2017 challenge aims to assess spoofing and countermeasures attack detection accuracy for automatic speaker verification. It has been proven that constant Q cepstral coefficients (CQCCs) processes speech in different frequencies with variable resolution and performs much better than traditional features. When coupled with a Gaussian mixture model (GMM), it is an excellently effective spoofing countermeasure. The baseline CQCC+GMM system considers short-term impacts while ignoring the whole influence of channel. In the meanwhile, dimension of the feature is relatively higher than the traditional feature and usually with a higher variance. This paper explores different features for ASVspoof 2017 challenge. The mean and variance of the CQCC features of an utterance is used as the representation of the whole utterance. Feature selection method is introduced to avoid high variance and overfitting for spoofing detection. Experimental results on ASVspoof 2017 dataset show that feature selection followed by Support Vector Machine (SVM) gets an improvement compared to the baseline. It is also shown that pitch feature contributes to the performance improvement, and it obtains a relative improvement of 37.39% over the baseline CQCC+GMM system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Galina Lavrentyeva|AUTHOR Galina Lavrentyeva]]^^1^^, [[Sergey Novoselov|AUTHOR Sergey Novoselov]]^^1^^, [[Egor Malykh|AUTHOR Egor Malykh]]^^1^^, [[Alexander Kozlov|AUTHOR Alexander Kozlov]]^^2^^, [[Oleg Kudashev|AUTHOR Oleg Kudashev]]^^1^^, [[Vadim Shchemelinin|AUTHOR Vadim Shchemelinin]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ITMO University, Russia; ^^2^^STC-innovations, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 82–86
</span></p></div>
<div class="cpabstractcardabstract"><p>Nowadays spoofing detection is one of the priority research areas in the field of automatic speaker verification. The success of Automatic Speaker Verification Spoofing and Countermeasures (ASVspoof) Challenge 2015 confirmed the impressive perspective in detection of unforeseen spoofing trials based on speech synthesis and voice conversion techniques. However, there is a small number of researches addressed to replay spoofing attacks which are more likely to be used by non-professional impersonators. This paper describes the Speech Technology Center (STC) anti-spoofing system submitted for ASVspoof 2017 which is focused on replay attacks detection. Here we investigate the efficiency of a deep learning approach for solution of the mentioned-above task. Experimental results obtained on the Challenge corpora demonstrate that the selected approach outperforms current state-of-the-art baseline systems in terms of spoofing detection quality. Our primary system produced an EER of 6.73% on the evaluation part of the corpora which is 72% relative improvement over the ASVspoof 2017 baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhe Ji|AUTHOR Zhe Ji]]^^1^^, [[Zhi-Yi Li|AUTHOR Zhi-Yi Li]]^^2^^, [[Peng Li|AUTHOR Peng Li]]^^1^^, [[Maobo An|AUTHOR Maobo An]]^^1^^, [[Shengxiang Gao|AUTHOR Shengxiang Gao]]^^1^^, [[Dan Wu|AUTHOR Dan Wu]]^^1^^, [[Faru Zhao|AUTHOR Faru Zhao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CNCERT, China; ^^2^^CreditEase, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 87–91
</span></p></div>
<div class="cpabstractcardabstract"><p>To enhance the security and reliability of automatic speaker verification (ASV) systems, ASVspoof 2017 challenge focuses on the detection problem of known and unknown audio replay attacks. We proposed an ensemble learning classifier for CNCB team’s submitted system scores, which across uses a variety of acoustic features and classifiers. An effective post-processing method is studied to improve the performance of Constant Q cepstral coefficients (CQCC) and to form a base feature set with some other classical acoustic features. We also proposed using an ensemble classifier set, which includes multiple Gaussian Mixture Model (GMM) based classifiers and two novel GMM mean supervector-Gradient Boosting Decision Tree (GSV-GBDT) and GSV-Random Forest (GSV-RF) classifiers. Experimental results have shown that the proposed ensemble learning system can provide substantially better performance than baseline. On common training condition of the challenge, Equal Error Rate (EER) of primary system on development set is 1.5%, compared to baseline 10.4%. EER of primary system (S02 in ASVspoof 2017 board) on evaluation data set are 12.3% (with only train dataset) and 10.8% (with train+dev dataset), which are also much better than baseline 30.6% and 24.8%, given by ASVSpoof 2017 organizer, with 59.7% and 56.4% relative performance improvement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lantian Li|AUTHOR Lantian Li]], [[Yixiang Chen|AUTHOR Yixiang Chen]], [[Dong Wang|AUTHOR Dong Wang]], [[Thomas Fang Zheng|AUTHOR Thomas Fang Zheng]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 92–96
</span></p></div>
<div class="cpabstractcardabstract"><p>For practical automatic speaker verification (ASV) systems, replay attack poses a true risk. By replaying a pre-recorded speech signal of the genuine speaker, ASV systems tend to be easily fooled. An effective replay detection method is therefore highly desirable. In this study, we investigate a major difficulty in replay detection: the over-fitting problem caused by variability factors in speech signal. An F-ratio probing tool is proposed and three variability factors are investigated using this tool: speaker identity, speech content and playback & recording device. The analysis shows that device is the most influential factor that contributes the highest over-fitting risk. A frequency warping approach is studied to alleviate the over-fitting problem, as verified on the ASV-spoof 2017 database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Parav Nagarsheth|AUTHOR Parav Nagarsheth]], [[Elie Khoury|AUTHOR Elie Khoury]], [[Kailash Patil|AUTHOR Kailash Patil]], [[Matt Garland|AUTHOR Matt Garland]]
</p><p class="cpabstractcardaffiliationlist">Pindrop, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 97–101
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice is projected to be the next input interface for portable devices. The increased use of audio interfaces can be mainly attributed to the success of speech and speaker recognition technologies. With these advances comes the risk of criminal threats where attackers are reportedly trying to access sensitive information using diverse voice spoofing techniques. Among them, replay attacks pose a real challenge to voice biometrics. This paper addresses the problem by proposing a deep learning architecture in tandem with low-level cepstral features. We investigate the use of a deep neural network (DNN) to discriminate between the different channel conditions available in the ASVSpoof 2017 dataset, namely recording, playback and session conditions. The high-level feature vectors derived from this network are used to discriminate between genuine and spoofed audio. Two kinds of low-level features are utilized: state-of-the-art constant-Q cepstral coefficients (CQCC), and our proposed high-frequency cepstral coefficients (HFCC) that derive from the high-frequency spectrum of the audio. The fusion of both features proved to be effective in generalizing well across diverse replay attacks seen in the evaluation of the ASVSpoof 2017 challenge, with an equal error rate of 11.5%, that is 53% better than the baseline Gaussian Mixture Model (GMM) applied on CQCC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhuxin Chen|AUTHOR Zhuxin Chen]], [[Zhifeng Xie|AUTHOR Zhifeng Xie]], [[Weibin Zhang|AUTHOR Weibin Zhang]], [[Xiangmin Xu|AUTHOR Xiangmin Xu]]
</p><p class="cpabstractcardaffiliationlist">SCUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 102–106
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker verification systems have achieved great progress in recent years. Unfortunately, they are still highly prone to different kinds of spoofing attacks such as speech synthesis, voice conversion, and fake audio recordings etc. Inspired by the success of ResNet in image recognition, we investigated the effectiveness of using ResNet for automatic spoofing detection. Experimental results on the ASVspoof2017 data set show that ResNet performs the best among all the single-model systems. Model fusion is a good way to further improve the system performance. Nevertheless, we found that if the same feature is used for different fused models, the resulting system can hardly be improved. By using different features and models, our best fused model further reduced the Equal Error Rate (EER) by 18% relatively, compared with the best single-model system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[K.N.R.K. Raju Alluri|AUTHOR K.N.R.K. Raju Alluri]], [[Sivanand Achanta|AUTHOR Sivanand Achanta]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[Anil Kumar Vuppala|AUTHOR Anil Kumar Vuppala]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 107–111
</span></p></div>
<div class="cpabstractcardabstract"><p>The ASVspoof 2017 challenge is about the detection of replayed speech from human speech. The proposed system makes use of the fact that when the speech signals are replayed, they pass through multiple channels as opposed to original recordings. This channel information is typically embedded in low signal to noise ratio regions. A speech signal processing method with high spectro-temporal resolution is required to extract robust features from such regions. The single frequency filtering (SFF) is one such technique, which we propose to use for replay attack detection. While SFF based feature representation was used at front-end, Gaussian mixture model and bi-directional long short-term memory models are investigated at the backend as classifiers. The experimental results on ASVspoof 2017 dataset reveal that, SFF based representation is very effective in detecting replay attacks. The score level fusion of back end classifiers further improved the performance of the system which indicates that both classifiers capture complimentary information.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Björn Lindblom|AUTHOR Björn Lindblom]]
</p><p class="cpabstractcardaffiliationlist">University of Stockholm, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3441
</span></p></div>
<div class="cpabstractcardabstract"><p>The mapping of the Speech Chain has so far been focused on the experimentally more accessible links — e.g., acoustics — whereas the brain’s activity during speaking and listening has understandably received less attention. That state of affairs is about to change now thanks to the new sophisticated tools offered by brain imaging technology.
At present many key questions concerning human speech processes remain incompletely understood despite the significant research efforts of the past half century. As speech research goes neuro, we could do with some better answers.
In this paper I will attempt to shed some light on some of the issues. I will do so by heeding the advice that Tinbergenⁱ once gave his fellow biologists on explaining behavior. I paraphrase: Nothing in biology makes sense unless you simultaneously look at it with the following questions at the back of your mind: How did it evolve? How is it acquired? How does it work here and now?
Applying the Tinbergen strategy to speech I will, in broad strokes, trace a path from the small and fixed innate repertoires of non-human primates to the open-ended vocal systems that humans learn today.
Such an agenda will admittedly identify serious gaps in our present knowledge but, importantly, it will also bring an overarching possibility:
It will strongly suggest the feasibility of bypassing the traditional linguistic operational approach to speech units and replacing it by a first-principles account anchored in biology.
I will argue that this is the road-map we need for a more profound understanding of the fundamental nature spoken language and for educational, medical and technological applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Chen Shen|AUTHOR Chen Shen]]
</p><p class="cpabstractcardaffiliationlist">National Chiao Tung University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3682–3686
</span></p></div>
<div class="cpabstractcardabstract"><p>Conventional speech recognition system is constructed by unfolding the spectral-temporal input matrices into one-way vectors and using these vectors to estimate the affine parameters of neural network according to the vector-based error back-propagation algorithm. System performance is constrained because the contextual correlations in frequency and time horizons are disregarded and the spectral and temporal factors are excluded. This paper proposes a spectral-temporal factorized neural network (STFNN) to tackle this weakness. The spectral-temporal structure is preserved and factorized in hidden layers through two ways of factor matrices which are trained by using the factorized error backpropagation. Affine transformation in standard neural network is generalized to the spectro-temporal factorization in STFNN. The structural features or patterns are extracted and forwarded towards the softmax outputs. A deep neural factorization is built by cascading a number of factorization layers with fully-connected layers for speech recognition. An orthogonal constraint is imposed in factor matrices for redundancy reduction. Experimental results show the merit of integrating the factorized features in deep feedforward and recurrent neural networks for speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karel Veselý|AUTHOR Karel Veselý]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3687–3691
</span></p></div>
<div class="cpabstractcardabstract"><p>Not all the questions related to the semi-supervised training of hybrid ASR system with DNN acoustic model were already deeply investigated. In this paper, we focus on the question of the granularity of confidences (per-sentence, per-word, per-frame), the question of how the data should be used (data-selection by masks, or in mini-batch SGD with confidences as weights). Then, we propose to re-tune the system with the manually transcribed data, both with the ‘frame CE’ training and ‘sMBR’ training.
Our preferred semi-supervised recipe which is both simple and efficient is following: we select words according to the word accuracy we obtain on the development set. Such recipe, which does not rely on a grid-search of the training hyper-parameter, generalized well for: Babel Vietnamese (transcribed 11h, untranscribed 74h), Babel Bengali (transcribed 11h, untranscribed 58h) and our custom Switchboard setup (transcribed 14h, untranscribed 95h). We obtained the absolute WER improvements 2.5% for Vietnamese, 2.3% for Bengali and 3.2% for Switchboard.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junfeng Hou|AUTHOR Junfeng Hou]], [[Shiliang Zhang|AUTHOR Shiliang Zhang]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3692–3696
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently end-to-end speech recognition has obtained much attention. One of the popular models to achieve end-to-end speech recognition is attention based encoder-decoder model, which usually generating output sequences iteratively by attending the whole representations of the input sequences. However, predicting outputs until receiving the whole input sequence is not practical for online or low time latency speech recognition. In this paper, we present a simple but effective attention mechanism which can make the encoder-decoder model generate outputs without attending the entire input sequence and can apply to online speech recognition. At each prediction step, the attention is assumed to be a time-moving gaussian window with variable size and can be predicted by using previous input and output information instead of the content based computation on the whole input sequence. To further improve the online performance of the model, we employ deep convolutional neural networks as encoder. Experiments show that the gaussian prediction based attention works well and under the help of deep convolutional neural networks the online model achieves 19.5% phoneme error rate in TIMIT ASR task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takashi Fukuda|AUTHOR Takashi Fukuda]]^^1^^, [[Masayuki Suzuki|AUTHOR Masayuki Suzuki]]^^1^^, [[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^2^^, [[Jia Cui|AUTHOR Jia Cui]]^^2^^, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3697–3701
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the effectiveness of knowledge distillation using teacher student training for building accurate and compact neural networks. We show that with knowledge distillation, information from multiple acoustic models like very deep VGG networks and Long Short-Term Memory (LSTM) models can be used to train standard convolutional neural network (CNN) acoustic models for a variety of systems requiring a quick turnaround. We examine two strategies to leverage multiple teacher labels for training student models. In the first technique, the weights of the student model are updated by switching teacher labels at the minibatch level. In the second method, student models are trained on multiple streams of information from various teacher distributions via data augmentation. We show that standard CNN acoustic models can achieve comparable recognition accuracy with much smaller number of model parameters compared to teacher VGG and LSTM acoustic models. Additionally we also investigate the effectiveness of using broadband teacher labels as privileged knowledge for training better narrowband acoustic models within this framework. We show the benefit of this simple technique by training narrowband student models with broadband teacher soft labels on the Aurora 4 task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]]^^1^^, [[Tara N. Sainath|AUTHOR Tara N. Sainath]]^^1^^, [[Bo Li|AUTHOR Bo Li]]^^1^^, [[Kanishka Rao|AUTHOR Kanishka Rao]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3702–3706
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we conduct a detailed investigation of attention-based models for automatic speech recognition (ASR). First, we explore different types of attention, including “online” and “full-sequence” attention. Second, we explore different subword units to see how much of the end-to-end ASR process can reasonably be captured by an attention model. In experimental evaluations, we find that although attention is typically focused over a small region of the acoustics during each step of next label prediction, “full-sequence” attention outperforms “online” attention, although this gap can be significantly reduced by increasing the length of the segments over which attention is computed. Furthermore, we find that context-independent phonemes are a reasonable sub-word unit for attention models. When used in the second-pass to rescore N-best hypotheses, these models provide over a 10% relative improvement in word error rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hagen Soltau|AUTHOR Hagen Soltau]], [[Hank Liao|AUTHOR Hank Liao]], [[Haşim Sak|AUTHOR Haşim Sak]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3707–3711
</span></p></div>
<div class="cpabstractcardabstract"><p>We present results that show it is possible to build a competitive, greatly simplified, large vocabulary continuous speech recognition system with whole words as acoustic units. We model the output vocabulary of about 100,000 words directly using deep bi-directional LSTM RNNs with CTC loss. The model is trained on 125,000 hours of semi-supervised acoustic training data, which enables us to alleviate the data sparsity problem for word models. We show that the CTC word models work very well as an end-to-end all-neural speech recognition model without the use of traditional context-dependent sub-word phone units that require a pronunciation lexicon, and without any language model removing the need to decode. We demonstrate that the CTC word models perform better than a strong, more complex, state-of-the-art baseline with sub-word units.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shabnam Ghaffarzadegan|AUTHOR Shabnam Ghaffarzadegan]]^^1^^, [[Attila Reiss|AUTHOR Attila Reiss]]^^2^^, [[Mirko Ruhs|AUTHOR Mirko Ruhs]]^^2^^, [[Robert Duerichen|AUTHOR Robert Duerichen]]^^2^^, [[Zhe Feng|AUTHOR Zhe Feng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Robert Bosch, USA; ^^2^^Robert Bosch, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3802–3806
</span></p></div>
<div class="cpabstractcardabstract"><p>Occupancy detection, including presence detection and head count, as one of the fast growing areas plays an important role in providing safety, comfort and reducing energy consumption both in residential and commercial setups. The focus of this study is proposing affordable strategies to increase occupancy detection performance in realistic scenarios using only audio signal collected from the environment. We use approximately 100-hour of audio data in residential and commercial environments to analyze and evaluate our setup. In this study, we take advantage of developments in feature selection methods to choose the most relevant audio features for the task. Attribute and error vs. human activity analysis are also performed to gain a better understanding of the environmental sounds and possible solutions to enhance the performance. Experimental results confirm the effectiveness of audio sensor for occupancy detection using a cost effective system with presence detection accuracy of 96% and 99%, and the head count accuracy of 70% and 95% for the residential and commercial setups, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huy Dat Tran|AUTHOR Huy Dat Tran]], [[Wen Zheng Terence Ng|AUTHOR Wen Zheng Terence Ng]], [[Yi Ren Leng|AUTHOR Yi Ren Leng]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3807–3811
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper deals with sound event classification from poor quality signals in the context of “through-the-wall” (TTW) surveillance. The task is extremely challenging due to the high level of distortion and attenuation caused by complex sound propagation and modulation effect from signal acquisition. Another problem, facing in TTW surveillance, is the lack of comprehensive training data as the recording is much more complicated than conventional approaches using audio microphones. To address that challenge, we employ a recurrent neural network, particularly the Long Short-Term Memory (LSTM) encoder, to transform conventional clean and noisy audio signals into TTW signals to augment additional training data. Furthermore, a novel missing feature mask kernel classification is developed to optimize the classification accuracy of TTW sound event classification. Particularly, Wasserstein distance is calculated from reliable intersection regions between pair-wise sound image representations and embedded into a probabilistic distance Support Vector Machine (SVM) kernel to optimize the TTW data separation. The proposed missing feature mask kernel allows effective training with inhomogeneously distorted data and the experimental results show promising results on TTW audio recordings, outperforming several state-of-art methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Gabor Simko|AUTHOR Gabor Simko]], [[Carolina Parada|AUTHOR Carolina Parada]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3812–3816
</span></p></div>
<div class="cpabstractcardabstract"><p>The task of endpointing is to determine when the user has finished speaking. This is important for interactive speech applications such as voice search and Google Home. In this paper, we propose a GLDNN-based (grid long short-term memory deep neural network) endpointer model and show that it provides significant improvements over a state-of-the-art CLDNN (convolutional, long short-term memory, deep neural network) model. Specifically, we replace the convolution layer in the CLDNN with a grid LSTM layer that models both spectral and temporal variations through recurrent connections. Results show that the GLDNN achieves 32% relative improvement in false alarm rate at a fixed false reject rate of 2%, and reduces median latency by 11%. We also include detailed experiments investigating why grid LSTMs offer better performance than convolution layers. Analysis reveals that the recurrent connection along the frequency axis is an important factor that greatly contributes to the performance of grid LSTMs, especially in the presence of background noise. Finally, we also show that multichannel input further increases robustness to background speech. Overall, we achieve 16% (100 ms) endpointer latency improvement relative to our previous best model on a Voice Search Task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Arun Baby|AUTHOR Arun Baby]], [[Jeena J. Prakash|AUTHOR Jeena J. Prakash]], [[Rupak Vignesh|AUTHOR Rupak Vignesh]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3817–3821
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic detection of phoneme boundaries is an important sub-task in building speech processing applications, especially text-to-speech synthesis (TTS) systems. The main drawback of the Gaussian mixture model - hidden Markov model (GMM-HMM) based forced-alignment is that the phoneme boundaries are not explicitly modeled. In an earlier work, we had proposed the use of signal processing cues in tandem with GMM-HMM based forced alignment for boundary correction for building Indian language TTS systems. In this paper, we capitalise on the ability of robust acoustic modeling techniques such as deep neural networks (DNN) and convolutional deep neural networks (CNN) for acoustic modeling. The GMM-HMM based forced alignment is replaced by DNN-HMM/CNN-HMM based forced alignment. Signal processing cues are used to correct the segment boundaries obtained using DNN-HMM/CNN-HMM segmentation. TTS systems built using these boundaries show a relative improvement in synthesis quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Hsuan Wang|AUTHOR Yu-Hsuan Wang]], [[Cheng-Tao Chung|AUTHOR Cheng-Tao Chung]], [[Hung-Yi Lee|AUTHOR Hung-Yi Lee]]
</p><p class="cpabstractcardaffiliationlist">National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3822–3826
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we analyze the gate activation signals inside the gated recurrent neural networks, and find the temporal structure of such signals is highly correlated with the phoneme boundaries. This correlation is further verified by a set of experiments for phoneme segmentation, in which better results compared to standard approaches were obtained.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruiqing Yin|AUTHOR Ruiqing Yin]], [[Hervé Bredin|AUTHOR Hervé Bredin]], [[Claude Barras|AUTHOR Claude Barras]]
</p><p class="cpabstractcardaffiliationlist">LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3827–3831
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker change detection is an important step in a speaker diarization system. It aims at finding speaker change points in the audio stream. In this paper, it is treated as a sequence labeling task and addressed by Bidirectional long short term memory networks (Bi-LSTM). The system is trained and evaluated on the Broadcast TV subset from ETAPE database. The result shows that the proposed model brings good improvement over conventional methods based on BIC and Gaussian Divergence. For instance, in comparison to Gaussian divergence, it produces speech turns that are 19.5% longer on average, with the same level of purity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinxi Guo|AUTHOR Jinxi Guo]], [[Usha Amrutha Nookala|AUTHOR Usha Amrutha Nookala]], [[Abeer Alwan|AUTHOR Abeer Alwan]]
</p><p class="cpabstractcardaffiliationlist">University of California at Los Angeles, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3712–3716
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-independent speaker recognition using short utterances is a highly challenging task due to the large variation and content mismatch between short utterances. I-vector and probabilistic linear discriminant analysis (PLDA) based systems have become the standard in speaker verification applications, but they are less effective with short utterances. To address this issue, we propose a novel method, which trains a convolutional neural network (CNN) model to map the i-vectors extracted from short utterances to the corresponding long-utterance i-vectors. In order to simultaneously learn the representation of the original short-utterance i-vectors and fit the target long-version i-vectors, we jointly train a supervised-regression model with an autoencoder using CNNs. The trained CNN model is then used to generate the mapped version of short-utterance i-vectors in the evaluation stage. We compare our proposed CNN-based joint mapping method with a GMM-based joint modeling method under matched and mismatched PLDA training conditions. Experimental results using the NIST SRE 2008 dataset show that the proposed technique achieves up to 30% relative improvement under duration mismatched PLDA-training conditions and outperforms the GMM-based method. The improved systems also perform better compared with the matched-length PLDA training condition using short utterances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[Abhinav Misra|AUTHOR Abhinav Misra]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3717–3721
</span></p></div>
<div class="cpabstractcardabstract"><p>This study introduces a novel Curriculum Learning based Probabilistic Linear Discriminant Analysis (CL-PLDA) algorithm for improving speaker recognition in noisy conditions. CL-PLDA operates by initializing the training EM algorithm with cleaner data ( easy examples), and successively adds noisier data ( difficult examples) as the training progresses. This curriculum learning based approach guides the parameters of CL-PLDA to better local minima compared to regular PLDA. We test CL-PLDA on speaker verification task of the severely noisy and degraded DARPA RATS data, and show it to significantly outperform regular PLDA across test-sets of varying duration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shivangi Mahto|AUTHOR Shivangi Mahto]], [[Hitoshi Yamamoto|AUTHOR Hitoshi Yamamoto]], [[Takafumi Koshinaka|AUTHOR Takafumi Koshinaka]]
</p><p class="cpabstractcardaffiliationlist">NEC, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3722–3726
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes i-vector transformations using neural networks for achieving noise-robust speaker recognition. A novel discriminative denoising autoencoder (DDAE) is employed on i-vectors to remove additive noise effects. The DDAE is trained to denoise and classify noisy i-vectors simultaneously, making it possible to add discriminability to the denoised i-vectors. Speaker recognition experiments on the NIST SRE 2012 task shows 32% better error performance as compared to a baseline system. Also, our proposed method outperforms such conventional methods as multi-condition training and a basic denoising autoencoder.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiongqiong Wang|AUTHOR Qiongqiong Wang]], [[Takafumi Koshinaka|AUTHOR Takafumi Koshinaka]]
</p><p class="cpabstractcardaffiliationlist">NEC, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3727–3731
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents, for the first time, unsupervised discriminative training of probabilistic linear discriminant analysis (unsupervised DT-PLDA). While discriminative training avoids the problem of generative training based on probabilistic model assumptions that often do not agree with actual data, it has been difficult to apply it to unsupervised scenarios because it can fit data with almost any labels. This paper focuses on unsupervised training of DT-PLDA in the application of domain adaptation in i-vector based speaker verification systems, using unlabeled in-domain data. The proposed method makes it possible to conduct discriminative training, i.e., estimation of model parameters and unknown labels, by employing data statistics as a regularization term in addition to the original objective function in DT-PLDA. An experiment on a NIST Speaker Recognition Evaluation task shows that the proposed method outperforms a conventional method using speaker clustering and performs almost as well as supervised DT-PLDA.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jahangir Alam|AUTHOR Jahangir Alam]]^^1^^, [[Patrick Kenny|AUTHOR Patrick Kenny]]^^1^^, [[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]]^^1^^, [[Marcel Kockmann|AUTHOR Marcel Kockmann]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CRIM, Canada; ^^2^^VoiceTrust, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3732–3736
</span></p></div>
<div class="cpabstractcardabstract"><p>The main challenges introduced in the 2016 NIST speaker recognition evaluation (SRE16) are domain mismatch between training and evaluation data, duration variability in test recordings and unlabeled in-domain training data. This paper outlines the systems developed at CRIM for SRE16. To tackle the domain mismatch problem, we apply minimum divergence training to adapt a conventional i-vector extractor to the task domain. Specifically, we take an out-of-domain trained i-vector extractor as an initialization and perform few iterations of minimum divergence training on the unlabeled data provided. Next, we non-linearly transform the adapted i-vectors by learning a speaker classifier neural network. Speaker features extracted from this network have been shown to be more robust than i-vectors under domain mismatch conditions with a reduction in equal error rates of 2–3% absolute. Finally, we propose a new Beta-Bernoulli backend that models the features supplied by the speaker classifier network. Our best single system is the speaker classifier network - Beta-Bernoulli backend combination. Overall system performance was very satisfactory for the fixed condition task. With our submitted fused system we achieve an equal error rate of 9.89%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Diego Castan|AUTHOR Diego Castan]]^^1^^, [[Mitchell McLaren|AUTHOR Mitchell McLaren]]^^1^^, [[Luciana Ferrer|AUTHOR Luciana Ferrer]]^^2^^, [[Aaron Lawson|AUTHOR Aaron Lawson]]^^1^^, [[Alicia Lozano-Diez|AUTHOR Alicia Lozano-Diez]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SRI International, USA; ^^2^^Universidad de Buenos Aires, Argentina; ^^3^^Universidad Autónoma de Madrid, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3737–3741
</span></p></div>
<div class="cpabstractcardabstract"><p>Unsupervised techniques for the adaptation of speaker recognition are important due to the problem of condition mismatch that is prevalent when applying speaker recognition technology to new conditions and the general scarcity of labeled ‘in-domain’ data. In the recent NIST 2016 Speaker Recognition Evaluation (SRE), symmetric score normalization (S-norm) and calibration using unlabeled in-domain data were shown to be beneficial. Because calibration requires speaker labels for training, speaker-clustering techniques were used to generate pseudo-speakers for learning calibration parameters in those cases where only unlabeled in-domain data was available. These methods performed well in the SRE16. It is unclear, however, whether those techniques generalize well to other data sources. In this work, we benchmark these approaches on several distinctly different databases, after we describe our SRI-CON-UAM team system submission for the NIST 2016 SRE. Our analysis shows that while the benefit of S-norm is also observed across other datasets, applying speaker-clustered calibration provides considerably greater benefit to the system in the context of new acoustic conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[K. Abidi|AUTHOR K. Abidi]]^^1^^, [[M.A. Menacer|AUTHOR M.A. Menacer]]^^2^^, [[Kamel Smaïli|AUTHOR Kamel Smaïli]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ESI, Algeria; ^^2^^LORIA, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3742–3746
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the issue of comparability of comments extracted from Youtube. The comments concern spoken Algerian that could be either local Arabic, Modern Standard Arabic or French. This diversity of expression gives rise to a huge number of problems concerning the data processing. In this article, several methods of alignment will be proposed and tested. The method which permits to best align is Word2Vec-based approach that will be used iteratively. This recurrent call of Word2Vec allows us improve significantly the results of comparability. In fact, a dictionary-based approach leads to a Recall of 4, while our approach allows one to get a Recall of 33 at rank 1. Thanks to this approach, we built from Youtube CALYOU, a Comparable Corpus of the spoken Algerian.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abhishek Narwekar|AUTHOR Abhishek Narwekar]]^^1^^, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3747–3751
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the acquisition of PRAV, a phonetically rich audio-visual Corpus. The PRAV Corpus contains audio as well as visual recordings of 2368 sentences from the TIMIT corpus each spoken by four subjects, making it the largest audio-visual corpus in the literature in terms of the number of sentences per subject. Visual features, comprising the coordinates of points along the contour of the subjects lips, have been extracted for the entire PRAV Corpus using the Active Appearance Models (AAM) algorithm and have been made available along with the audio and video recordings. The subjects being Indian makes PRAV an ideal resource for audio-visual speech study with non-native English speakers. Moreover, this paper describes how the large number of sentences per subject makes the PRAV Corpus a significant dataset by highlighting its utility in exploring a number of potential research problems including visual speech synthesis and perception studies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ahmed Hussen Abdelaziz|AUTHOR Ahmed Hussen Abdelaziz]]
</p><p class="cpabstractcardaffiliationlist">ICSI, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3752–3756
</span></p></div>
<div class="cpabstractcardabstract"><p>Although audio-visual speech is well known to improve the robustness properties of automatic speech recognition (ASR) systems against noise, the realm of audio-visual ASR (AV-ASR) has not gathered the research momentum it deserves. This is mainly due to the lack of audio-visual corpora and the need to combine two fields of knowledge: ASR and computer vision. This paper describes the NTCD-TIMIT database and baseline that can overcome these two barriers and attract more research interest to AV-ASR. The NTCD-TIMIT corpus has been created by adding six noise types at a range of signal-to-noise ratios to the speech material of the recently published TCD-TIMIT corpus. NTCD-TIMIT comprises visual features that have been extracted from the TCD-TIMIT video recordings using the visual front-end presented in this paper. The database contains also Kaldi scripts for training and decoding audio-only, video-only, and audio-visual ASR models. The baseline experiments and results obtained using these scripts are detailed in this paper.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David M. Howcroft|AUTHOR David M. Howcroft]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]], [[Vera Demberg|AUTHOR Vera Demberg]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3757–3761
</span></p></div>
<div class="cpabstractcardabstract"><p>Natural language generation (NLG) systems rely on corpora for both hand-crafted approaches in a traditional NLG architecture and for statistical end-to-end (learned) generation systems. Limitations in existing resources, however, make it difficult to develop systems which can vary the linguistic properties of an utterance as needed. For example, when users’ attention is split between a linguistic and a secondary task such as driving, a generation system may need to reduce the information density of an utterance to compensate for the reduction in user attention.
We introduce a new corpus in the restaurant recommendation and comparison domain, collected in a paraphrasing paradigm, where subjects wrote texts targeting either a general audience or an elderly family member. This design resulted in a corpus of more than 5000 texts which exhibit a variety of lexical and syntactic choices and differ with respect to average word & sentence length and surprisal. The corpus includes two levels of meaning representation: flat ‘semantic stacks’ for propositional content and Rhetorical Structure Theory (RST) relations between these propositions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[André Mansikkaniemi|AUTHOR André Mansikkaniemi]], [[Peter Smit|AUTHOR Peter Smit]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3762–3766
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition (ASR) systems require large amounts of transcribed speech data, for training state-of-the-art deep neural network (DNN) acoustic models. Transcribed speech is a scarce and expensive resource, and ASR systems are prone to underperform in domains where there is not a lot of training data available. In this work, we open up a vast and previously unused resource of transcribed speech for Finnish, by retrieving and aligning all the recordings and meeting transcripts from the web portal of the Parliament of Finland. Short speech-text segment pairs are retrieved from the audio and text material, by using the Levenshtein algorithm to align the first-pass ASR hypotheses with the corresponding meeting transcripts. DNN acoustic models are trained on the automatically constructed corpus, and performance is compared to other models trained on a commercially available speech corpus. Model performance is evaluated on Finnish parliament speech, by dividing the testing set into seen and unseen speakers. Performance is also evaluated on broadcast speech to test the general applicability of the parliament speech corpus. We also study the use of meeting transcripts in language model adaptation, to achieve additional gains in speech recognition accuracy of Finnish parliament speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Omnia Abdo|AUTHOR Omnia Abdo]]^^1^^, [[Sherif Abdou|AUTHOR Sherif Abdou]]^^2^^, [[Mervat Fashal|AUTHOR Mervat Fashal]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alexandria University, Egypt; ^^2^^Cairo University, Egypt</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3767–3771
</span></p></div>
<div class="cpabstractcardabstract"><p>The present research aims to build an MSA audio-visual corpus. The corpus is annotated both phonetically and visually and dedicated to emotional speech processing studies. The building of the corpus consists of 5 main stages: speaker selection, sentences selection, recording, annotation and evaluation. 500 sentences were critically selected based on their phonemic distribution. The speaker was instructed to read the same 500 sentences with 6 emotions (Happiness – Sadness – Fear – Anger – Inquiry – Neutral). A sample of 50 sentences was selected for annotation. The corpus evaluation modules were: audio, visual and audio-visual subjective evaluation.
The corpus evaluation process showed that happy, anger and inquiry emotions were better recognized visually (94%, 96% and 96%) than audibly (63.6%, 74% and 74%) and the audio visual evaluation scores (96%, 89.6% and 80.8%). Sadness and fear emotion on the other hand were better recognized audibly (76.8% and 97.6%) than visually (58% and 78.8 %) and the audio visual evaluation scores were (65.6% and 90%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent Hughes|AUTHOR Vincent Hughes]], [[Paul Foulkes|AUTHOR Paul Foulkes]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3772–3776
</span></p></div>
<div class="cpabstractcardabstract"><p>In forensic voice comparison, it is essential to consider not only the similarity between samples, but also the typicality of the evidence in the relevant population. This is explicit within the likelihood ratio (LR) framework. A significant issue, however, is the definition of the relevant population. This paper explores the complexity of population selection for voice evidence. We evaluate the effects of population specificity in terms of regional background on LR output using combinations of the F1, F2, and F3 trajectories of the diphthong /aɪ/. LRs were computed using development and reference data which were regionally matched (Standard Southern British English) and mixed (general British English) relative to the test data. These conditions reflect the paradox that without knowing who the offender is, it is not possible to know the population of which he is a member. Results show that the more specific population produced stronger evidence and better system validity than the more general definition. However, as region-specific voice features (lower formants) were removed, the difference in the output from the matched and mixed systems was reduced. This shows that the effects of population selection are dependent on the sociolinguistic constraints on the feature analysed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Véronique Delvaux|AUTHOR Véronique Delvaux]], [[Lise Caucheteux|AUTHOR Lise Caucheteux]], [[Kathy Huet|AUTHOR Kathy Huet]], [[Myriam Piccaluga|AUTHOR Myriam Piccaluga]], [[Bernard Harmegnies|AUTHOR Bernard Harmegnies]]
</p><p class="cpabstractcardaffiliationlist">Université de Mons, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3777–3781
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this study is to assess the potential for deliberately changing one’s voice as a means to conceal or falsify identity, comparing acoustic and perceptual measurements of carefully controlled speech productions.
Twenty-two non expert speakers read a phonetically-balanced text 5 times in various conditions including natural speech, free vocal disguise (2 disguises per speaker), impersonation of a common target for all speakers, impersonation of one specific target per speaker. Long-term average spectra (LTAS) were computed for each reading and multiple pairwise comparisons were performed using the SDDD dissimilarity index.
The acoustic analysis showed that all speakers were able to deliberately change their voice beyond self-typical natural variation, whether in attempting to simply disguise their identity or to impersonate a specific target. Although the magnitude of the acoustic changes was comparable in disguise vs. impersonation, overall it was limited in that it did not achieved between-speaker variation levels. Perceptual judgements performed on the same material revealed that naive listeners were better at discriminating between impersonators and targets than at simply detecting voice disguise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yaru Wu|AUTHOR Yaru Wu]]^^1^^, [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]^^2^^, [[Cécile Fougeron|AUTHOR Cécile Fougeron]]^^1^^, [[Lori Lamel|AUTHOR Lori Lamel]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPP (UMR 7018), France; ^^2^^LPP (UMR 7018), France; ^^3^^LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3782–3786
</span></p></div>
<div class="cpabstractcardabstract"><p>The study investigates different factors influencing schwa realization in French: phonological factors, speech style, gender, and socio-professional status. Three large corpora, two of public journalistic speech (ESTER and ETAPE) and one of casual speech (NCCFr) are used. The absence/presence of schwa is automatically decided via forced alignment, which has a successful performance rate of 95%. Only polysyllabic words including a potential schwa in the word-initial syllable are studied in order to control for variability in word structure and position. The effect of the left context, grouped into classes of a word final vowel or final consonant or a pause, is studied. Words preceded by a vowel (V#) tend to favor schwa deletion. Interestingly, words preceded by a consonant or a pause have similar behaviors: speakers tend to maintain schwa in both contexts. As can be expected, the more casual the speech, the more frequently schwa is dropped. Males tend to delete more schwas than females, and journalists are more likely to delete schwa than politicians. These results suggest that beyond phonology, other factors such as gender, style and socio-professional status influence the realization of schwa.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Duran|AUTHOR Daniel Duran]]^^1^^, [[Jagoda Bruni|AUTHOR Jagoda Bruni]]^^1^^, [[Grzegorz Dogil|AUTHOR Grzegorz Dogil]]^^1^^, [[Justus Roux|AUTHOR Justus Roux]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Stuttgart, Germany; ^^2^^SADiLaR, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3787–3791
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a first phonetic analysis of voiced, devoiced and ejectivized stop sounds in Setswana taken from two different speech databases. It is observed that rules governing the voicing/devoicing processes depend on sociophonetic and ethnolinguistic factors. Speakers, especially women, from the rural North West area of South Africa tend to preserve the phonologically stronger devoiced (or even ejectivized) forms, both in single standing plosives as well as in the post-nasal context (NC̥). On the other hand, in the more industrialized area of Gauteng, voiced forms of plosives prevail. The empirically observed data is modelled with KaMoso, a computational multi-agent simulation framework. So far, this framework focused on open social structures ( whole world networks) that facilitate language modernization through exchange between different phonetic forms. The updated model has been enriched with social/phonetic simulation scenarios in which speech agents interact between each other in a so-called parochial setting, reflecting smaller, closed communities. Both configurations correspond to the sociopolitical changes that have been taking place in South Africa over the last decades, showing the differences in speech between women and men from rural and industrialized areas of the country.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lea S. Kohtz|AUTHOR Lea S. Kohtz]]^^1^^, [[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Christian-Albrechts-Universität zu Kiel, Germany; ^^2^^University of Southern Denmark, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3792–3796
</span></p></div>
<div class="cpabstractcardabstract"><p>A perception experiment involving 28 German listeners is presented. It investigates — for sequences of request, pause, and affirmative answer — the effect of pause duration on the answerer’s perceived willingness to comply with the request. Replicating earlier results on American English, perceived willingness was found to decrease with increasing pause duration, particularly above a “tolerance threshold” of 600 ms. Refining and qualifying this replicated result, the perception experiment showed additional effects of speaking-rate context and pause quality (silence vs. breathing vs. café noise) on perceived willingness judgments. The overall results picture is discussed with respect to the origin of the “tolerance threshold”, the status of breathing in speech, and the function of pauses in communication.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Iona Gessinger|AUTHOR Iona Gessinger]], [[Eran Raveh|AUTHOR Eran Raveh]], [[Sébastien Le Maguer|AUTHOR Sébastien Le Maguer]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3797–3801
</span></p></div>
<div class="cpabstractcardabstract"><p>To shed light on the question whether humans converge phonetically to synthesized speech, a shadowing experiment was conducted using three different types of stimuli — natural speaker, diphone synthesis, and HMM synthesis. Three segment-level phonetic features of German that are well-known to vary across native speakers were examined. The first feature triggered convergence in roughly one third of the cases for all stimulus types. The second feature showed generally a small amount of convergence, which may be due to the nature of the feature itself. Still the effect was strongest for the natural stimuli, followed by the HMM stimuli and weakest for the diphone stimuli. The effect of the third feature was clearly observable for the natural stimuli and less pronounced in the synthetic stimuli. This is presumably a result of the partly insufficient perceptibility of this target feature in the synthetic stimuli and demonstrates the necessity of gaining fine-grained control over the synthesis output, should it be intended to implement capabilities of phonetic convergence on the segmental level in spoken dialogue systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shubham Toshniwal|AUTHOR Shubham Toshniwal]], [[Hao Tang|AUTHOR Hao Tang]], [[Liang Lu|AUTHOR Liang Lu]], [[Karen Livescu|AUTHOR Karen Livescu]]
</p><p class="cpabstractcardaffiliationlist">TTIC, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3532–3536
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end training of deep learning-based models allows for implicit learning of intermediate representations based on the final task loss. However, the end-to-end approach ignores the useful domain knowledge encoded in explicit intermediate-level supervision. We hypothesize that using intermediate representations as auxiliary supervision at lower levels of deep networks may be a good way of combining the advantages of end-to-end training and more traditional pipeline approaches. We present experiments on conversational speech recognition where we use lower-level tasks, such as phoneme recognition, in a multitask training approach with an encoder-decoder model for direct character transcription. We compare multiple types of lower-level tasks and analyze the effects of the auxiliary tasks. Our results on the Switchboard corpus show that this approach improves recognition accuracy over a standard encoder-decoder model on the Eval2000 test set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matt Shannon|AUTHOR Matt Shannon]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3537–3541
</span></p></div>
<div class="cpabstractcardabstract"><p>State-level minimum Bayes risk (sMBR) training has become the de facto standard for sequence-level training of speech recognition acoustic models. It has an elegant formulation using the expectation semiring, and gives large improvements in word error rate (WER) over models trained solely using cross-entropy (CE) or connectionist temporal classification (CTC). sMBR training optimizes the expected number of frames at which the reference and hypothesized acoustic states differ. It may be preferable to optimize the expected WER, but WER does not interact well with the expectation semiring, and previous approaches based on computing expected WER exactly involve expanding the lattices used during training. In this paper we show how to perform optimization of the expected WER by sampling paths from the lattices used during conventional sMBR training. The gradient of the expected WER is itself an expectation, and so may be approximated using Monte Carlo sampling. We show experimentally that optimizing WER during acoustic model training gives 5% relative improvement in WER over a well-tuned sMBR baseline on a 2-channel query recognition task (Google Home).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tara N. Sainath|AUTHOR Tara N. Sainath]]^^1^^, [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]]^^2^^, [[Olivier Siohan|AUTHOR Olivier Siohan]]^^1^^, [[Arun Narayanan|AUTHOR Arun Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3542–3546
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe a method to reduce the overall number of neural network training steps, during both cross-entropy and sequence training stages. This is achieved through the interpolation of frame-level CE and sequence level SMBR criteria, during the sequence training stage. This interpolation is known as f-smoothing and has previously been just used to prevent overfitting during sequence training. However, in this paper, we investigate its application to reduce the training time. We explore different interpolation strategies to reduce the overall training steps; and achieve a reduction of up to 25% with almost no degradation in word error rate (WER). Finally, we explore the generalization of f-smoothing to other tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhong Meng|AUTHOR Zhong Meng]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]
</p><p class="cpabstractcardaffiliationlist">Georgia Institute of Technology, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3547–3551
</span></p></div>
<div class="cpabstractcardabstract"><p>It has been shown in [1, 2] that improved performance can be achieved by formulating the keyword spotting as a non-uniform error automatic speech recognition problem. In this work, we discriminatively train a deep bidirectional long short-term memory (BLSTM) — hidden Markov model (HMM) based acoustic model with non-uniform boosted minimum classification error (BMCE) criterion which imposes more significant error cost on the keywords than those on the non-keywords. By introducing the BLSTM, the context information in both the past and the future are stored and updated to predict the desired output and the long-term dependencies within the speech signal are well captured. With non-uniform BMCE objective, the BLSTM is trained so that the recognition errors related to the keywords are remarkably reduced. The BLSTM is optimized using back-propagation through time and stochastic gradient descent. The keyword spotting system is implemented within weighted finite state transducer framework. The proposed method achieves 5.49% and 7.37% absolute figure-of-merit improvements respectively over the BLSTM and the feedforward deep neural network baseline systems trained with cross-entropy criterion for the keyword spotting task on Switchboard-1 Release 2 dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pranay Dighe|AUTHOR Pranay Dighe]], [[Afsaneh Asaei|AUTHOR Afsaneh Asaei]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3552–3556
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep neural network (DNN) acoustic models yield posterior probabilities of senone classes. Recent studies support the existence of low-dimensional subspaces underlying senone posteriors. Principal component analysis (PCA) is applied to identify eigenposteriors and perform low-dimensional projection of the training data posteriors. The resulted enhanced posteriors are applied as soft targets for training better DNN acoustic model under the student-teacher framework. The present work advances this approach by studying incorporation of sequence discriminative training. We demonstrate how to combine the gains from eigenposterior based enhancement with sequence discrimination to improve ASR using semi-supervised training. Evaluation on AMI meeting corpus yields nearly 4% absolute reduction in word error rate (WER) compared to the baseline DNN trained with cross entropy objective. In this context, eigenposterior enhancement of the soft targets is crucial to enable additive improvement using out-of-domain untranscribed data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ming-Han Yang|AUTHOR Ming-Han Yang]]^^1^^, [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]]^^1^^, [[Yu-Ding Lu|AUTHOR Yu-Ding Lu]]^^1^^, [[Kuan-Yu Chen|AUTHOR Kuan-Yu Chen]]^^1^^, [[Yu Tsao|AUTHOR Yu Tsao]]^^1^^, [[Berlin Chen|AUTHOR Berlin Chen]]^^2^^, [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Academia Sinica, Taiwan; ^^2^^National Taiwan Normal University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3557–3561
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech data typically contain information irrelevant to automatic speech recognition (ASR), such as speaker variability and channel/environmental noise, lurking deep within acoustic features. Such unwanted information is always mixed together to stunt the development of an ASR system. In this paper, we propose a new framework based on autoencoders for acoustic modeling in ASR. Unlike other variants of autoencoder neural networks, our framework is able to isolate phonetic components from a speech utterance by simultaneously taking two kinds of objectives into consideration. The first one relates to the minimization of reconstruction errors and benefits to learn most salient and useful properties of the data. The second one functions in the middlemost code layer, where the categorical distribution of the context-dependent phone states is estimated for phoneme discrimination and the derivation of acoustic scores, the proximity relationship among utterances spoken by the same speaker are preserved, and the intra-utterance noise is modeled and abstracted away. We describe the implementation of the discriminative autoencoders for training tri-phone acoustic models and present TIMIT phone recognition results, which demonstrate that our proposed method outperforms the conventional DNN-based approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zbyněk Zajíc|AUTHOR Zbyněk Zajíc]], [[Marek Hrúz|AUTHOR Marek Hrúz]], [[Luděk Müller|AUTHOR Luděk Müller]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3562–3566
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this paper is to investigate the benefit of information from a speaker change detection system based on Convolutional Neural Network (CNN) when applied to the process of accumulation of statistics for an i-vector generation. The investigation is carried out on the problem of diarization. In our system, the output of the CNN is a probability value of a speaker change in a conversation for a given time segment. According to this probability, we cut the conversation into short segments that are then represented by the i-vector (to describe a speaker in it). We propose a technique to utilize the information from the CNN for the weighting of the acoustic data in a segment to refine the statistics accumulation process. This technique enables us to represent the speaker better in the final i-vector. The experiments on the English part of the CallHome corpus show that our proposed refinement of the statistics accumulation is beneficial with the relative improvement of Diarization Error Rate almost by 16% when compared to the speaker diarization system without statistics refinement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Arindam Jati|AUTHOR Arindam Jati]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3567–3571
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a novel approach, we term Speaker2Vec, to derive a speaker-characteristics manifold learned in an unsupervised manner. The proposed representation can be employed in different applications such as diarization, speaker identification or, as in our evaluation test case, speaker segmentation. Speaker2Vec exploits large amounts of unlabeled training data and the assumption of short-term active-speaker stationarity to derive a speaker embedding using Deep Neural Networks (DNN). We assume that temporally-near speech segments belong to the same speaker, and as such a joint representation connecting these nearby segments can encode their common information. Thus, this bottleneck representation will be capturing mainly speaker-specific information. Such training can take place in a completely unsupervised manner. For testing, our trained model generates the embeddings for the test audio, and applies a simple distance metric to detect speaker-change points. The paper also proposes a strategy for unsupervised adaptation of the DNN models to the application domain. The proposed method outperforms the state-of-the-art speaker segmentation algorithms and MFCC based baseline methods on four evaluation datasets, while it allows for further improvements by employing this embedding into supervised training methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gaël Le Lan|AUTHOR Gaël Le Lan]]^^1^^, [[Delphine Charlet|AUTHOR Delphine Charlet]]^^1^^, [[Anthony Larcher|AUTHOR Anthony Larcher]]^^2^^, [[Sylvain Meignier|AUTHOR Sylvain Meignier]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Orange Labs, France; ^^2^^LIUM (EA 4023), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3572–3576
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates a novel neural scoring method, based on conventional i-vectors, to perform speaker diarization and linking of large collections of recordings. Using triplet loss for training, the network projects i-vectors in a space that better separates speakers in terms of cosine similarity. Experiments are run on two French TV collections built from REPERE [1] and ETAPE [2] campaigns corpora, the system being trained on French Radio data. Results indicate that the proposed approach outperforms conventional cosine and Probabilistic Linear Discriminant Analysis scoring methods on both within- and cross-recording diarization tasks, with a Diarization Error Rate reduction of 14% in average.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yishai Cohen|AUTHOR Yishai Cohen]], [[Itshak Lapidot|AUTHOR Itshak Lapidot]]
</p><p class="cpabstractcardaffiliationlist">Afeka Tel Aviv Academic College of Engineering, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3577–3581
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper focuses on estimating clustering validity by using logistic regression. For many applications it might be important to estimate the quality of the clustering, e.g. in case of speech segments’ clustering, make a decision whether to use the clustered data for speaker verification. In the case of short segments speakers clustering, the common criteria for cluster validity are average cluster purity (ACP), average speaker purity (ASP) and K — the geometric mean between the two measures. As in practice, true labels are not available for evaluation, hence they have to be estimated from the clustering itself. In this paper, mean-shift clustering with PLDA score is applied in order to cluster short speaker segments represented as i-vectors. Different statistical parameters are then estimated on the clustered data and are used to train logistic regression to estimate ACP, ASP and K. It was found that logistic regression can be a good predictor of the actual ACP, ASP and K, and yields reasonable information regarding the clustering quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Guillaume Wisniewksi|AUTHOR Guillaume Wisniewksi]], [[Hervé Bredin|AUTHOR Hervé Bredin]], [[G. Gelly|AUTHOR G. Gelly]], [[Claude Barras|AUTHOR Claude Barras]]
</p><p class="cpabstractcardaffiliationlist">LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3582–3586
</span></p></div>
<div class="cpabstractcardabstract"><p>Real-time speaker diarization has many potential applications, including public security, biometrics or forensics. It can also significantly speed up the indexing of increasingly large multimedia archives. In this paper, we address the issue of low-latency speaker diarization that consists in continuously detecting new or reoccurring speakers within an audio stream, and determining when each speaker is active with a low latency ( e.g. every second). This is in contrast with most existing approaches in speaker diarization that rely on multiple passes over the complete audio recording. The proposed approach combines speaker turn neural embeddings with an incremental structure prediction approach inspired by state-of-the-art Natural Language Processing models for Part-of-Speech tagging and dependency parsing. It can therefore leverage both information describing the utterance and the inherent temporal structure of interactions between speakers to learn, in supervised framework, to identify speakers. Experiments on the Etape broadcast news benchmark validate the approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hervé Bredin|AUTHOR Hervé Bredin]]
</p><p class="cpabstractcardaffiliationlist">LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3587–3591
</span></p></div>
<div class="cpabstractcardabstract"><p>‘pyannote.metrics‘ is an open-source Python library aimed at researchers working in the wide area of speaker diarization. It provides a command line interface (CLI) to improve reproducibility and comparison of speaker diarization research results. Through its application programming interface (API), a large set of evaluation metrics is available for diagnostic purposes of all modules of typical speaker diarization pipelines (speech activity detection, speaker change detection, clustering, and identification). Finally, thanks to visualization capabilities, we show that it can also be used for detailed error analysis purposes. ‘pyannote.metrics‘ can be downloaded from ‘http://pyannote.github.io‘.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhipeng Chen|AUTHOR Zhipeng Chen]], [[Ji Wu|AUTHOR Ji Wu]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3592–3596
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we present a rescoring approach for keyword search (KWS) based on neural networks (NN). This approach exploits only the lattice context in a detected time interval instead of its corresponding audio. The most informative arcs in lattice context are selected and represented as a matrix, where words on arcs are represented in an embedding space with respect to their pronunciations. Then convolutional neural networks (CNNs) are employed to capture distinctive features from this matrix. A rescoring model is trained to minimize term-weighted sigmoid cross entropy so as to match the evaluation metric. Experiments on single-word queries show that lattice context brings complementary gains over normalized posterior scores. Performance on both in-vocabulary (IV) and out-of-vocabulary (OOV) queries are improved by combining NN-based scores with standard posterior scores.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Trmal|AUTHOR Jan Trmal]], [[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Pegah Ghahremani|AUTHOR Pegah Ghahremani]], [[Yiming Wang|AUTHOR Yiming Wang]], [[Vimal Manohar|AUTHOR Vimal Manohar]], [[Hainan Xu|AUTHOR Hainan Xu]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3597–3601
</span></p></div>
<div class="cpabstractcardabstract"><p>The IARPA BABEL program has stimulated worldwide research in keyword search technology for low resource languages, and the NIST OpenKWS evaluations are the de facto benchmark test for such capabilities. The 2016 OpenKWS evaluation featured Georgian speech, and had 10 participants from across the world. This paper describes the Kaldi system developed to assist IARPA in creating a competitive baseline against which participants were evaluated, and to provide a truly open source system to all participants to support their research. This system handily met the BABEL program goals of 0.60 ATWV and 50% WER, achieving 0.70 ATWV and 38% WER with a single ASR system, i.e. without ASR system combination. All except one OpenKWS participant used Kaldi components in their submissions, typically in conjunction with system combination. This paper therefore complements all other OpenKWS-based papers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuri Khokhlov|AUTHOR Yuri Khokhlov]]^^1^^, [[Ivan Medennikov|AUTHOR Ivan Medennikov]]^^1^^, [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]]^^1^^, [[Valentin Mendelev|AUTHOR Valentin Mendelev]]^^1^^, [[Maxim Korenevsky|AUTHOR Maxim Korenevsky]]^^1^^, [[Alexey Prudnikov|AUTHOR Alexey Prudnikov]]^^2^^, [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]]^^3^^, [[Alexander Zatvornitsky|AUTHOR Alexander Zatvornitsky]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^STC-innovations, Russia; ^^2^^Mail.Ru Group, Russia; ^^3^^LIUM (EA 4023), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3602–3606
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the keyword search system developed by the STC team in the framework of OpenKWS 2016 evaluation. The acoustic modeling techniques included i-vectors based speaker adaptation, multilingual speaker-dependent bottleneck features, and a combination of feedforward and recurrent neural networks. To improve the language model, we augmented the training data provided by the organizers with texts generated by the character-level recurrent neural networks trained on different data sets. This led to substantial reductions in the out-of-vocabulary (OOV) and word error rates. The OOV search problem was solved with the help of a novel approach based on lattice generated phone posteriors and a highly optimized decoder. This approach outperformed familiar OOV search implementations in terms of speed and demonstrated comparable or better search quality.
The system was among the top three systems in the evaluation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ming Sun|AUTHOR Ming Sun]]^^1^^, [[David Snyder|AUTHOR David Snyder]]^^2^^, [[Yixin Gao|AUTHOR Yixin Gao]]^^1^^, [[Varun Nagaraja|AUTHOR Varun Nagaraja]]^^1^^, [[Mike Rodehorst|AUTHOR Mike Rodehorst]]^^1^^, [[Sankaran Panchapagesan|AUTHOR Sankaran Panchapagesan]]^^1^^, [[Nikko Strom|AUTHOR Nikko Strom]]^^1^^, [[Spyros Matsoukas|AUTHOR Spyros Matsoukas]]^^1^^, [[Shiv Vitaladevuni|AUTHOR Shiv Vitaladevuni]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon.com, USA; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3607–3611
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we investigate a time delay neural network (TDNN) for a keyword spotting task that requires low CPU, memory and latency. The TDNN is trained with transfer learning and multi-task learning. Temporal subsampling enabled by the time delay architecture reduces computational complexity. We propose to apply singular value decomposition (SVD) to further reduce TDNN complexity. This allows us to first train a larger full-rank TDNN model which is not limited by CPU/memory constraints. The larger TDNN usually achieves better performance. Afterwards, its size can be compressed by SVD to meet the budget requirements. Hidden Markov models (HMM) are used in conjunction with the networks to perform keyword detection and performance is measured in terms of area under the curve (AUC) for detection error tradeoff (DET) curves. Our experimental results on a large in-house far-field corpus show that the full-rank TDNN achieves a 19.7% DET AUC reduction compared to a similar-size deep neural network (DNN) baseline. If we train a larger size full-rank TDNN first and then reduce it via SVD to the comparable size of the DNN, we obtain a 37.6% reduction in DET AUC compared to the DNN baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masayuki Suzuki|AUTHOR Masayuki Suzuki]]^^1^^, [[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^, [[Abhinav Sethy|AUTHOR Abhinav Sethy]]^^2^^, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^2^^, [[Kenneth W. Church|AUTHOR Kenneth W. Church]]^^2^^, [[Mark Drake|AUTHOR Mark Drake]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3612–3616
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a method for searching for symbol sequences in conversations. Symbol sequences can include phone numbers, credit card numbers, and any kind of ticket (identification) numbers and are often communicated in call center conversations. Automatic extraction of these from speech is a key to many automatic speech recognition (ASR) applications such as question answering and summarization. Compared with spoken term detection (STD), symbol sequence searches have two additional problems. First, the entire symbol sequence is typically not observed continuously but in sub sequences, where customers or agents speak these sequences in fragments, while the recipient repeats them to ensure they have the correct sequence. Second, we have to distinguish between different symbol sequences, for example, phone numbers versus ticket numbers or customer identification numbers. To deal with these problems, we propose to apply STD to symbol-sequence fragments and subsequently use confidence scoring to obtain the entire symbol sequence. For the confidence scoring, We propose a long short-term memory (LSTM) based approach that inputs word before and after fragments. We also propose to detect repetitions of fragments and use it for confidence scoring. Our proposed method achieves a 0.87 F-measure, in an eight-digit customer identification number search task, when operating at 20.3% WER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Batuhan Gundogdu|AUTHOR Batuhan Gundogdu]], [[Murat Saraclar|AUTHOR Murat Saraclar]]
</p><p class="cpabstractcardaffiliationlist">Boğaziçi Üniversitesi, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3617–3621
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel approach for query modeling using neural networks for posteriorgram based keyword search (KWS). We aim to help the conventional large vocabulary continuous speech recognition (LVCSR) based KWS systems, especially on out-of-vocabulary (OOV) terms by converting the task into a template matching problem, just like the query-by-example retrieval tasks. For this, we use a dynamic time warping (DTW) based similarity search on the speaker independent posteriorgram space. In order to model the text queries as posteriorgrams, we propose a non-symmetric Siamese neural network structure which both learns a distance measure to be used in DTW and the frame representations for this specific measure. We compare this new technique with similar DTW based systems using other distance measures and query modeling techniques. We also apply system fusion of the proposed system with the LVCSR based baseline KWS system. We show that, the proposed system works significantly better than other similar systems. Furthermore, when combined with the LVSCR based baseline, the proposed system provides up to 37.9% improvement on OOV terms and 9.8% improvement on all terms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suman Samui|AUTHOR Suman Samui]], [[Indrajit Chakrabarti|AUTHOR Indrajit Chakrabarti]], [[Soumya K. Ghosh|AUTHOR Soumya K. Ghosh]]
</p><p class="cpabstractcardaffiliationlist">IIT Kharagpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3622–3626
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a single-channel speech separation method implemented with a deep recurrent neural network (DRNN) using recurrent temporal restricted Boltzmann machines (RTRBM). Although deep neural network (DNN) based speech separation (denoising task) methods perform quite well compared to the conventional statistical model based speech enhancement techniques, in DNN-based methods, the temporal correlations across speech frames are often ignored, resulting in loss of spectral detail in the reconstructed output speech. In order to alleviate this issue, one RTRBM is employed for modelling the acoustic features of input (mixture) signal and two RTRBMs are trained for the two training targets (source signals). Each RTRBM attempts to model the abstractions present in the training data at each time step as well as the temporal dependencies in the training data. The entire network (consisting of three RTRBMs and one recurrent neural network) can be fine-tuned by the joint optimization of the DRNN with an extra masking layer which enforces a reconstruction constraint. The proposed method has been evaluated on the IEEE corpus and TIMIT dataset for speech denoising task. Experimental results have established that the proposed approach outperforms NMF and conventional DNN and DRNN-based speech enhancement methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qizheng Huang|AUTHOR Qizheng Huang]], [[Changchun Bao|AUTHOR Changchun Bao]], [[Xianyun Wang|AUTHOR Xianyun Wang]]
</p><p class="cpabstractcardaffiliationlist">Beijing University of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3627–3631
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper provides an improved codebook-based speech enhancement method using multi-band excitation (MBE) model. It aims to remove the noise between the harmonics, which may exist in codebook-based enhanced speech. In general, the proposed system is based on analysis-with-synthesis (AwS) framework. During the analysis stage, acoustic features are extracted including pitch, harmonic magnitude and voicing from noisy speech. These parameters are obtained on the basis of the spectral magnitudes obtained by codebook-based method. During the synthesis stage, different synthesis strategies for voiced and unvoiced speech are employed. Besides, this paper introduces speech presence probability to modify the codebook-based Wiener filter so that more accurate acoustic parameters can be obtained. The proposed system can eliminate noise not only between the harmonics, but also in the silent segments, especially in low SNR noise environment. Experiments show that, the performance of the proposed method is better than traditional codebook-based method for different types of noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhuo Chen|AUTHOR Zhuo Chen]], [[Yan Huang|AUTHOR Yan Huang]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3632–3636
</span></p></div>
<div class="cpabstractcardabstract"><p>For single-channel speech enhancement, mask learning based approach through neural network has been shown to outperform the feature mapping approach, and to be effective as a pre-processor for automatic speech recognition. However, its assumption that the mixture and clean reference must have the correspondent scale doesn’t hold in data collected from real world, and thus leads to significant performance degradation on parallel recorded data. In this paper, we first extend the mask learning based speech enhancement by integrating two types of restoration layer to address the scale mismatch problem. We further propose a novel residual learning based speech enhancement model via adding different shortcut connections to a feature mapping network. We show such a structure can benefit from both the mask learning and the feature mapping. We evaluate the proposed speech enhancement models on CHiME 3 data. Without retraining the acoustic model, the best bi-direction LSTM with residue connections yields 24.90% relative WER reduction on real data and 34.57% WER on simulated data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bi-Cheng Yan|AUTHOR Bi-Cheng Yan]]^^1^^, [[Chin-Hong Shih|AUTHOR Chin-Hong Shih]]^^1^^, [[Shih-Hung Liu|AUTHOR Shih-Hung Liu]]^^2^^, [[Berlin Chen|AUTHOR Berlin Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Taiwan Normal University, Taiwan; ^^2^^Delta Research Center, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3637–3641
</span></p></div>
<div class="cpabstractcardabstract"><p>Developments of noise robustness techniques are vital to the success of automatic speech recognition (ASR) systems in face of varying sources of environmental interference. Recent studies have shown that exploring low-dimensional structures of speech features can yield good robustness. Along this vein, research on low-rank representation (LRR), which considers the intrinsic structures of speech features lying on some low dimensional subspaces, has gained considerable interest from the ASR community. When speech features are contaminated with various types of environmental noise, its corresponding modulation spectra can be regarded as superpositions of unstructured sparse noise over the inherent linguistic information. As such, we in this paper endeavor to explore the low dimensional structures of modulation spectra, in the hope to obtain more noise-robust speech features. The main contribution is that we propose a novel use of the LRR-based method to discover the subspace structures of modulation spectra, thereby alleviating the negative effects of noise interference. Furthermore, we also extensively compare our approach with several well-practiced feature-based normalization methods. All experiments were conducted and verified on the Aurora-4 database and task. The empirical results show that the proposed LRR-based method can provide significant word error reductions for a typical DNN-HMM hybrid ASR system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Santiago Pascual|AUTHOR Santiago Pascual]]^^1^^, [[Antonio Bonafonte|AUTHOR Antonio Bonafonte]]^^1^^, [[Joan Serrà|AUTHOR Joan Serrà]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universitat Politècnica de Catalunya, Spain; ^^2^^Telefónica I+D, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3642–3646
</span></p></div>
<div class="cpabstractcardabstract"><p>Current speech enhancement techniques operate on the spectral domain and/or exploit some higher-level feature. The majority of them tackle a limited number of noise conditions and rely on first-order statistics. To circumvent these issues, deep networks are being increasingly used, thanks to their ability to learn complex functions from large example sets. In this work, we propose the use of generative adversarial networks for speech enhancement. In contrast to current techniques, we operate at the waveform level, training the model end-to-end, and incorporate 28 speakers and 40 different noise conditions into the same model, such that model parameters are shared across them. We evaluate the proposed model using an independent, unseen test set with two speakers and 20 alternative noise conditions. The enhanced samples confirm the viability of the proposed model, and both objective and subjective evaluations confirm the effectiveness of it. With that, we open the exploration of generative architectures for speech enhancement, which may progressively incorporate further speech-centric design choices to improve their performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soumi Maiti|AUTHOR Soumi Maiti]], [[Michael I. Mandel|AUTHOR Michael I. Mandel]]
</p><p class="cpabstractcardaffiliationlist">CUNY Graduate Center, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3647–3651
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditional noise reduction systems modify a noisy signal to make it more like the original clean signal. For speech, these methods suffer from two main problems: under-suppression of noise and over-suppression of target speech. Instead, synthesizing clean speech based on the noisy signal could produce outputs that are both noise-free and high quality. Our previous work introduced such a system using concatenative synthesis, but it required processing the clean speech at run time, which was slow and not scalable. In order to make such a system scalable, we propose here learning a similarity metric using two separate networks, one network processing the clean segments offline and another processing the noisy segments at run time. This system incorporates a ranking loss to optimize for the retrieval of appropriate clean speech segments. This model is compared against our original on the CHiME2-GRID corpus, measuring ranking performance and subjective listening tests of resyntheses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Themos Stafylakis|AUTHOR Themos Stafylakis]], [[Georgios Tzimiropoulos|AUTHOR Georgios Tzimiropoulos]]
</p><p class="cpabstractcardaffiliationlist">University of Nottingham, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3652–3656
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose an end-to-end deep learning architecture for word-level visual speech recognition. The system is a combination of spatiotemporal convolutional, residual and bidirectional Long Short-Term Memory networks. We train and evaluate it on the Lipreading In-The-Wild benchmark, a challenging database of 500-size target-words consisting of 1.28sec video excerpts from BBC TV broadcasts. The proposed network attains word accuracy equal to 83.0%, yielding 6.8% absolute improvement over the current state-of-the-art, without using information about word boundaries during training or testing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kwanchiva Thangthai|AUTHOR Kwanchiva Thangthai]], [[Richard Harvey|AUTHOR Richard Harvey]]
</p><p class="cpabstractcardaffiliationlist">University of East Anglia, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3657–3661
</span></p></div>
<div class="cpabstractcardabstract"><p>Although there have been some promising results in computer lipreading, there has been a paucity of data on which to train automatic systems. However the recent emergence of the TCD-TIMIT corpus, with around 6000 words, 59 speakers and seven hours of recorded audio-visual speech, allows the deployment of more recent techniques in audio-speech such as Deep Neural Networks (DNNs) and sequence discriminative training.
In this paper we combine the DNN with a Hidden Markov Model (HMM) to the, so called, hybrid DNN-HMM configuration which we train using a variety of sequence discriminative training methods. This is then followed with a weighted finite state transducer. The conclusion is that the DNN offers very substantial improvement over a conventional classifier which uses a Gaussian Mixture Model (GMM) to model the densities even when optimised with Speaker Adaptive Training. Sequence adaptive training offers further improvements depending on the precise variety employed but those improvements are of the order of 10% improvement in word accuracy. Putting these two results together implies that lipreading is moving from something of rather esoteric interest to becoming a practical reality in the foreseeable future.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Wand|AUTHOR Michael Wand]], [[Jürgen Schmidhuber|AUTHOR Jürgen Schmidhuber]]
</p><p class="cpabstractcardaffiliationlist">IDSIA, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3662–3666
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a Lipreading system, i.e. a speech recognition system using only visual features, which uses domain-adversarial training for speaker independence. Domain-adversarial training is integrated into the optimization of a lipreader based on a stack of feedforward and LSTM (Long Short-Term Memory) recurrent neural networks, yielding an end-to-end trainable system which only requires a very small number of frames of untranscribed target data to substantially improve the recognition accuracy on the target speaker. On pairs of different source and target speakers, we achieve a relative accuracy improvement of around 40% with only 15 to 20 seconds of untranscribed target speech data. On multi-speaker training setups, the accuracy improvements are smaller but still substantial.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ahmed Hussen Abdelaziz|AUTHOR Ahmed Hussen Abdelaziz]]
</p><p class="cpabstractcardaffiliationlist">ICSI, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3667–3671
</span></p></div>
<div class="cpabstractcardabstract"><p>Visual speech, i.e., video recordings of speakers’ mouths, plays an important role in improving the robustness properties of automatic speech recognition (ASR) against noise. Optimal fusion of audio and video modalities is still one of the major challenges that attracts significant interest in the realm of audio-visual ASR. Recently, turbo decoders (TDs) have been successful in addressing the audio-visual fusion problem. The idea of the TD framework is to iteratively exchange some kind of soft information between the audio and video decoders until convergence. The forward-backward algorithm (FBA) is mostly applied to the decoding graphs to estimate this soft information. Applying the FBA to the complex graphs that are usually used in large vocabulary tasks may be computationally expensive. In this paper, I propose to apply the forward-backward algorithm to a lattice of most likely state sequences instead of using the entire decoding graph. Using lattices allows for TD to be easily applied to large vocabulary tasks. The proposed approach is evaluated using the newly released TCD-TIMIT corpus, where a standard recipe for large vocabulary ASR is employed. The modified TD performs significantly better than the feature and decision fusion models in all clean and noisy test conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]]^^1^^, [[Tamás Grósz|AUTHOR Tamás Grósz]]^^2^^, [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^2^^, [[László Tóth|AUTHOR László Tóth]]^^3^^, [[Alexandra Markó|AUTHOR Alexandra Markó]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BME, Hungary; ^^2^^University of Szeged, Hungary; ^^3^^MTA-SZTE RGAI, Hungary; ^^4^^MTA-ELTE LingArt, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3672–3676
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we present our initial results in articulatory-to-acoustic conversion based on tongue movement recordings using Deep Neural Networks (DNNs). Despite the fact that deep learning has revolutionized several fields, so far only a few researchers have applied DNNs for this task. Here, we compare various possible feature representation approaches combined with DNN-based regression. As the input, we recorded synchronized 2D ultrasound images and speech signals. The task of the DNN was to estimate Mel-Generalized Cepstrum-based Line Spectral Pair (MGC-LSP) coefficients, which then served as input to a standard pulse-noise vocoder for speech synthesis. As the raw ultrasound images have a relatively high resolution, we experimented with various feature selection and transformation approaches to reduce the size of the feature vectors. The synthetic speech signals resulting from the various DNN configurations were evaluated both using objective measures and a subjective listening test. We found that the representation that used several neighboring image frames in combination with a feature selection method was preferred both by the subjects taking part in the listening experiments, and in terms of the Normalized Mean Squared Error. Our results may be useful for creating Silent Speech Interface applications in the future. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Herman Kamper|AUTHOR Herman Kamper]], [[Shane Settle|AUTHOR Shane Settle]], [[Gregory Shakhnarovich|AUTHOR Gregory Shakhnarovich]], [[Karen Livescu|AUTHOR Karen Livescu]]
</p><p class="cpabstractcardaffiliationlist">TTIC, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3677–3681
</span></p></div>
<div class="cpabstractcardabstract"><p>During language acquisition, infants have the benefit of visual cues to ground spoken language. Robots similarly have access to audio and visual sensors. Recent work has shown that images and spoken captions can be mapped into a meaningful common space, allowing images to be retrieved using speech and vice versa. In this setting of images paired with untranscribed spoken captions, we consider whether computer vision systems can be used to obtain textual labels for the speech. Concretely, we use an image-to-words multi-label visual classifier to tag images with soft textual labels, and then train a neural network to map from the speech to these soft targets. We show that the resulting speech system is able to predict which words occur in an utterance — acting as a spoken bag-of-words classifier — without seeing any parallel speech and text. We find that the model often confuses semantically related words, e.g. “man” and “person”, making it even more effective as a semantic keyword spotter.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cong-Thanh Do|AUTHOR Cong-Thanh Do]], [[Yannis Stylianou|AUTHOR Yannis Stylianou]]
</p><p class="cpabstractcardaffiliationlist">Toshiba Research Europe, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3832–3836
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates the use of perceptually-motivated subband temporal envelope (STE) features and time-delay neural network (TDNN) denoising autoencoder (DAE) to improve deep neural network (DNN)-based automatic speech recognition (ASR). STEs are estimated by full-wave rectification and low-pass filtering of band-passed speech using a Gammatone filter-bank. TDNNs are used either as DAE or acoustic models. ASR experiments are performed on Aurora-4 corpus. STE features provide 2.2% and 3.7% relative word error rate (WER) reduction compared to conventional log-mel filter-bank (FBANK) features when used in ASR systems using DNN and TDNN as acoustic models, respectively. Features enhanced by TDNN DAE are better recognized with ASR system using DNN acoustic models than using TDNN acoustic models. Improved ASR performance is obtained when features enhanced by TDNN DAE are used in ASR system using DNN acoustic models. In this scenario, using STE features provides 9.8% relative WER reduction compared to when using FBANK features. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joe Caroselli|AUTHOR Joe Caroselli]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Richard Rose|AUTHOR Richard Rose]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3877–3881
</span></p></div>
<div class="cpabstractcardabstract"><p>Reverberation is known to degrade the performance of automatic speech recognition (ASR) systems dramatically in far-field conditions. Adopting the weighted prediction error (WPE) approach, we formulate an online dereverberation algorithm for a multi-microphone array. The key contributions of this paper are: (a) we demonstrate that dereverberation using WPE improves performance even when the acoustic models are trained using multi-style training (MTR) with noisy, reverberated speech; (b) we show that the gains from WPE are preserved even in large and diverse real-world data sets; (c) we propose an adaptive version for online multichannel ASR tasks which gives similar gains as the non-causal version; and (d) while the algorithm can just be applied for evaluation, we show that also including dereverberation during training gives increased performance gains. We also report how different parameter settings of the dereverberation algorithm impacts the ASR performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masakiyo Fujimoto|AUTHOR Masakiyo Fujimoto]]
</p><p class="cpabstractcardaffiliationlist">NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3837–3841
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a framework of a factored deep convolutional neural network (CNN) learning for noise robust automatic speech recognition (ASR). Deep CNN architecture, which has attracted great attention in various research areas, has also been successfully applied to ASR. However, to ensure noise robustness, since merely introducing deep CNN architecture into the acoustic modeling of ASR is insufficient, we introduce factored network architecture into deep CNN-based acoustic modeling. The proposed factored deep CNN framework factors out feature enhancement, delta parameter learning, and hidden Markov model state classification into three specific network blocks. By assigning specific roles to each block, the noise robustness of deep CNN-based acoustic models can be improved. With various comparative evaluations, we reveal that the proposed method successfully improves ASR accuracies in noise environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pavlos Papadopoulos|AUTHOR Pavlos Papadopoulos]], [[Ruchir Travadi|AUTHOR Ruchir Travadi]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3842–3846
</span></p></div>
<div class="cpabstractcardabstract"><p>The performance of speech technologies deteriorates in the presence of noise. Additionally, we need these technologies to be able to operate across a variety of noise levels and conditions. SNR estimation can guide the design and operation of such technologies or can be used as a pre-processing tool in database creation (e.g. identify/discard noisy signals). We propose a new method to estimate the global SNR of a speech signal when prior information about the noise that corrupts the signal, and speech boundaries within the signal, are not available. To achieve this goal, we train a neural network that performs non-linear regression to estimate the SNR. We use energy ratios as features, as well as ivectors to provide information about the noise that corrupts the signal. We compare our method against others in the literature, using the Mean Absolute Error (MAE) metric, and show that our method outperforms them consistently.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fengpei Ge|AUTHOR Fengpei Ge]]^^1^^, [[Kehuang Li|AUTHOR Kehuang Li]]^^2^^, [[Bo Wu|AUTHOR Bo Wu]]^^3^^, [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]]^^4^^, [[Yonghong Yan|AUTHOR Yonghong Yan]]^^1^^, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese Academy of Sciences, China; ^^2^^Georgia Institute of Technology, USA; ^^3^^Xidian University, China; ^^4^^Università di Enna Kore, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3847–3851
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a novel data utilization strategy, called multi-channel-condition learning, leveraging upon complementary information captured in microphone array speech to jointly train dereverberation and acoustic deep neural network (DNN) models for robust distant speech recognition. Experimental results, with a single automatic speech recognition (ASR) system, on the REVERB2014 simulated evaluation data show that, on 1-channel testing, the baseline joint training scheme attains a word error rate (WER) of 7.47%, reduced from 8.72% for separate training. The proposed multi-channel-condition learning scheme has been experimented on different channel data combinations and usage showing many interesting implications. Finally, training on all 8-channel data and with DNN-based language model rescoring, a state-of-the-art WER of 4.05% is achieved. We anticipate an even lower WER when combining more top ASR systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dung T. Tran|AUTHOR Dung T. Tran]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3852–3856
</span></p></div>
<div class="cpabstractcardabstract"><p>Although deep neural network (DNN) based acoustic models have obtained remarkable results, the automatic speech recognition (ASR) performance still remains low in noise and reverberant conditions. To address this issue, a speech enhancement front-end is often used before recognition to reduce noise. However, the front-end cannot fully suppress noise and often introduces artifacts that are limiting the ASR performance improvement. Uncertainty decoding has been proposed to better interconnect the speech enhancement front-end and ASR back-end and mitigate the mismatch caused by residual noise and artifacts. By considering features as distributions instead of point estimates, the uncertainty decoding approach modifies the conventional decoding rules to account for the uncertainty emanating from the speech enhancement. Although the concept of uncertainty decoding has been investigated for DNN acoustic models recently, finding efficient ways to incorporate distribution of the enhanced features within a DNN acoustic model still requires further investigations. In this paper, we propose to parameterize the distribution of the enhanced feature and estimate the parameters by backpropagation using an unsupervised adaptation scheme. We demonstrate the effectiveness of the proposed approach on real audio data of the CHiME3 dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu Zhang|AUTHOR Yu Zhang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Yonghong Yan|AUTHOR Yonghong Yan]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3857–3861
</span></p></div>
<div class="cpabstractcardabstract"><p>Distant speech recognition is a highly challenging task due to background noise, reverberation, and speech overlap. Recently, there has been an increasing focus on attention mechanism. In this paper, we explore the attention mechanism embedded within the long short-term memory (LSTM) based acoustic model for large vocabulary distant speech recognition, trained using speech recorded from a single distant microphone (SDM) and multiple distant microphones (MDM). Furthermore, multi-task learning architecture is incorporated to improve robustness in which the network is trained to perform both a primary senone classification task and a secondary feature enhancement task. Experiments were conducted on the AMI meeting corpus. On average our model achieved 3.3% and 5.0% relative improvements in word error rate (WER) over the LSTM baseline model in the SDM and MDM cases, respectively. In addition, the model provided between a 2–4% absolute WER reduction compared to a conventional pipeline of independent processing stage on the MDM task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hengguan Huang|AUTHOR Hengguan Huang]], [[Brian Mak|AUTHOR Brian Mak]]
</p><p class="cpabstractcardaffiliationlist">HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3862–3866
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates a novel multiple-history long short-term memory (MH-LSTM) RNN acoustic model to mitigate the robustness problem of noisy outputs in the form of mis-labeled data and/or mis-alignments. Conceptually, after an RNN is unfolded in time, the hidden units in each layer are re-arranged into ordered sub-layers with a master sub-layer on top and a set of auxiliary sub-layers below it. Only the master sub-layer generates outputs for the next layer whereas the auxiliary sub-layers run in parallel with the master sub-layer but with increasing time lags. Each sub-layer also receives higher-order feedback from a fixed number of sub-layers below it. As a result, each sub-layer maintains a different history of the input speech, and the ensemble of all the different histories lends itself to the model’s robustness. The higher-order connections not only provide shorter feedback paths for error signals to propagate to the farther preceding hidden states to better model the long-term memory, but also more feedback paths to each model parameter and smooth its update during training. Phoneme recognition results on both real TIMIT data as well as synthetic TIMIT data with noisy labels or alignments show that the new model outperforms the conventional LSTM RNN model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suyoun Kim|AUTHOR Suyoun Kim]], [[Ian Lane|AUTHOR Ian Lane]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3867–3871
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-End speech recognition is a recently proposed approach that directly transcribes input speech to text using a single model. End-to-End speech recognition methods including Connectionist Temporal Classification and Attention-based Encoder Decoder Networks have been shown to obtain state-of-the-art performance on a number of tasks and significantly simplify the modeling, training and decoding procedures for speech recognition. In this paper, we extend our prior work on End-to-End speech recognition focusing on the effectiveness of these models in far-field environments. Specifically, we propose introducing Auditory Attention to integrate input from multiple microphones directly within an End-to-End speech recognition model, leveraging the attention mechanism to dynamically tune the model’s attention to the most reliable input sources. We evaluate our proposed model on the CHiME-4 task, and show substantial improvement compared to a model optimized for a single microphone input.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anjali Menon|AUTHOR Anjali Menon]]^^1^^, [[Chanwoo Kim|AUTHOR Chanwoo Kim]]^^2^^, [[Richard M. Stern|AUTHOR Richard M. Stern]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3872–3876
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper discusses a combination of techniques for improving speech recognition accuracy in the presence of reverberation and spatially-separated interfering sound sources. Interaural Time Delay (ITD), observed as a consequence of the difference in arrival times of a sound to the two ears, is an important feature used by the human auditory system to reliably localize and separate sound sources. In addition, the “precedence effect” helps the auditory system differentiate between the direct sound and its subsequent reflections in reverberant environments. This paper uses a cross-correlation-based measure across the two channels of a binaural signal to isolate the target source by rejecting portions of the signal corresponding to larger ITDs. To overcome the effects of reverberation, the steady-state components of speech are suppressed, effectively boosting the onsets, so as to retain the direct sound and suppress the reflections. Experimental results show a significant improvement in recognition accuracy using both these techniques. Cross-correlation-based processing and steady-state suppression are carried out separately, and the order in which these techniques are applied produces differences in the resulting recognition accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Urban Zihlmann|AUTHOR Urban Zihlmann]]
</p><p class="cpabstractcardaffiliationlist">Universität Zürich, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3882–3886
</span></p></div>
<div class="cpabstractcardabstract"><p>The more alcohol a person has consumed, the more mispronunciations occur. This study investigates how deaffrication surfaces in Bernese Swiss German when speakers are moderately intoxicated (0.05–0.08% Vol.), whether these effects can be hidden, and whether a placebo effect interacting with mispronunciation occurs. Five participants reading a text were recorded as follows. In stage I, they read the text before and after drinking placebo alcohol, and finally again after being told to enunciate very clearly. 3–7 days later, the same experiment was repeated with real alcohol. The recordings were then analysed with Praat. Despite interspeaker variation, the following generalisations can be made. The most deaffrication occurs in the C_C context both when speakers are sober and inebriated; affricates in _#, V_C, and V_V position encounter more deaffrication in the alcohol stage; and /͡tʃ/ and ͡kx are deaffricated more when the speaker is intoxicated, with /͡tʃ/ being the most susceptible to mispronunciation. Moreover, when alcohol is consumed, more deaffrication occurs, which cannot consciously be controlled. Furthermore, a statistically significant difference between the pre- and the post-placebo-drinking experiment could be found, which implies that a placebo effect takes place. Nevertheless, the effects of real alcohol are considerably stronger.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cedric Gendrot|AUTHOR Cedric Gendrot]]
</p><p class="cpabstractcardaffiliationlist">LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3926–3930
</span></p></div>
<div class="cpabstractcardabstract"><p>Variability of (French) /ʁ/ is a frequently studied phenomenon showing that /ʁ/ can have multiple realizations. In French, all these studies were undertaken using small read corpora and we have reason to believe that these corpora don’t allow to look at the full picture. Indeed factors such as local word frequency, as well as speech rate can have almost as much influence as phonemic context in the realization of /ʁ/.
According to Ohala’s Aerodynamic Voicing principle, /ʁ/ would tend to be either an unvoiced fricative or a voiced approximant. We chose to analyze word final /ʁ/s as they tend to embrace the largest spectrum of variation. The study realized here is two-fold: a perception study in a specific phonemic context, between /a/ and /l/, where /ʁ/ is realized as an approximant, so as to better understand the parameters and their thresholds necessary for /ʁ/ identification, and provide a measure of rhoticity.
In a second step, keeping the rhoticity measurement in mind, we analyzed the realizations of word final /ʁ/s in two continuous speech corpora and modelled the realization of /ʁ/ using predictors such as diphone and digram frequency, phonemic context and speech rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[N.P. Narendra|AUTHOR N.P. Narendra]], [[Manu Airaksinen|AUTHOR Manu Airaksinen]], [[Paavo Alku|AUTHOR Paavo Alku]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3931–3935
</span></p></div>
<div class="cpabstractcardabstract"><p>In speech analysis, the information about the glottal source is obtained from speech by using glottal inverse filtering (GIF). The accuracy of state-of-the-art GIF methods is sufficiently high when the input speech signal is of high-quality (i.e., with little noise or reverberation). However, in realistic conditions, particularly when GIF is computed from coded telephone speech, the accuracy of GIF methods deteriorates severely. To robustly estimate the glottal source under coded condition, a deep neural network (DNN)-based method is proposed. The proposed method utilizes a DNN to map the speech features extracted from the coded speech to the glottal flow waveform estimated from the corresponding clean speech. To generate the coded telephone speech, adaptive multi-rate (AMR) codec is utilized which is a widely used speech compression method. The proposed glottal source estimation method is compared with two existing GIF methods, closed phase covariance analysis (CP) and iterative adaptive inverse filtering (IAIF). The results indicate that the proposed DNN-based method is capable of estimating glottal flow waveforms from coded telephone speech with a considerably better accuracy in comparison to CP and IAIF.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[George Christodoulides|AUTHOR George Christodoulides]], [[Mathieu Avanzi|AUTHOR Mathieu Avanzi]], [[Anne Catherine Simon|AUTHOR Anne Catherine Simon]]
</p><p class="cpabstractcardaffiliationlist">Université catholique de Louvain, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3936–3940
</span></p></div>
<div class="cpabstractcardabstract"><p>We explore the use of machine learning techniques (notably SVM classifiers and Conditional Random Fields) to automate the prosodic labelling of French speech, based on modelling and simulating the perception of prosodic events by naïve and expert listeners. The models are based on previous work on the perception of syllabic prominence and hesitation-related disfluencies, and on an experiment on the real-time perception of prosodic boundaries. Expert and non-expert listeners annotated samples from three multi-genre corpora (CPROM, CPROM-PFC, LOCAS-F). Automatic prosodic annotation is approached as a sequence labelling problem, drawing on multiple information sources (acoustic features, lexical and shallow syntactic features) in accordance with the experimental findings showing that listeners integrate all such information in their perception of prosodic segmentation and events. We test combinations of features and machine learning methods, and we compare the automatic labelling with expert annotation. The result of this study is a tool that automatically annotates prosodic events by simulating the perception of expert and naïve listeners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Levit|AUTHOR Michael Levit]], [[Yan Huang|AUTHOR Yan Huang]], [[Shuangyu Chang|AUTHOR Shuangyu Chang]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3941–3945
</span></p></div>
<div class="cpabstractcardabstract"><p>A crowdsourcing approach for collecting high-quality speech transcriptions is presented. The approach addresses typical weakness of traditional semi-supervised transcription strategies that show ASR hypotheses to transcribers to help them cope with unclear or ambiguous audio and speed up transcriptions. We explain how the traditional methods introduce bias into transcriptions that make it difficult to objectively measure system improvements against existing baselines, and suggest a two-stage crowdsourcing alternative that, first, iteratively collects transcription hypotheses and, then, asks a different crowd to pick the best of them. We show that this alternative not only outperforms the traditional method in a side-by-side comparison, but it also leads to ASR improvements due to superior quality of acoustic and language models trained on the transcribed data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manu Airaksinen|AUTHOR Manu Airaksinen]], [[Paavo Alku|AUTHOR Paavo Alku]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3946–3950
</span></p></div>
<div class="cpabstractcardabstract"><p>Glottal volume velocity waveform, the acoustical excitation of voiced speech, cannot be acquired through direct measurements in normal production of continuous speech. Glottal inverse filtering (GIF), however, can be used to estimate the glottal flow from recorded speech signals. Unfortunately, the usefulness of GIF algorithms is limited since they are sensitive to noise and call for high-quality recordings. Recently, efforts have been taken to expand the use of GIF by training deep neural networks (DNNs) to learn a statistical mapping between frame-level acoustic features and glottal pulses estimated by GIF. This framework has been successfully utilized in statistical speech synthesis in the form of the GlottDNN vocoder which uses a DNN to generate glottal pulses to be used as the synthesizer’s excitation waveform. In this study, we investigate how the DNN-based generation of glottal pulses is affected by training data variety. The evaluation is done using both objective measures as well as subjective listening tests of synthetic speech. The results suggest that the performance of the glottal pulse generation with DNNs is affected particularly by how well the training corpus suits GIF: processing low-pitched male speech and sustained phonations shows better performance than processing high-pitched female voices or continuous speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simone Hantke|AUTHOR Simone Hantke]], [[Zixing Zhang|AUTHOR Zixing Zhang]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3951–3955
</span></p></div>
<div class="cpabstractcardabstract"><p>In this contribution, we combine the advantages of traditional crowdsourcing with contemporary machine learning algorithms with the aim of ultimately obtaining reliable training data for audio processing in a faster, cheaper and therefore more efficient manner than has been previously possible. We propose a novel crowdsourcing approach, which brings a simulated active learning annotation scenario into a real world environment creating an intelligent and gamified crowdsourcing platform for manual audio annotation. Our platform combines two active learning query strategies with an internally calculated trustability score to efficiently reduce manual labelling efforts. This reduction is achieved in a twofold manner: first our system automatically decides if an instance requires annotation; second, it dynamically decides, depending on the quality of previously gathered annotations, on exactly how many annotations are needed to reliably label an instance. Results presented indicate that our approach drastically reduces the annotation load and is considerably more efficient than conventional methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael McAuliffe|AUTHOR Michael McAuliffe]]^^1^^, [[Elias Stengel-Eskin|AUTHOR Elias Stengel-Eskin]]^^1^^, [[Michaela Socolof|AUTHOR Michaela Socolof]]^^2^^, [[Morgan Sonderegger|AUTHOR Morgan Sonderegger]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^McGill University, Canada; ^^2^^University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3887–3891
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech datasets from many languages, styles, and sources exist in the world, representing significant potential for scientific studies of speech — particularly given structural similarities among all speech datasets. However, studies using multiple speech corpora remain difficult in practice, due to corpus size, complexity, and differing formats. We introduce open-source software for unified corpus analysis: integrating speech corpora and querying across them. Corpora are stored in a custom ‘polyglot persistence’ scheme that combines three sub-databases mirroring different data types: a Neo4j graph database to represent temporal annotation graph structure, and SQL and InfluxDB databases to represent meta- and acoustic data. This scheme abstracts away from the idiosyncratic formats of different speech corpora, while mirroring the structure of different data types improves speed and scalability. A Python API and a GUI both allow for: enriching the database with positional, hierarchical, temporal, and signal measures (e.g. utterance boundaries, f0) that are useful for linguistic analysis; querying the database using a simple query language; and exporting query results to standard formats for further analysis. We describe the software, summarize two case studies using it to examine effects on pitch and duration across languages, and outline planned future development.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent Hughes|AUTHOR Vincent Hughes]], [[Philip Harrison|AUTHOR Philip Harrison]], [[Paul Foulkes|AUTHOR Paul Foulkes]], [[Peter French|AUTHOR Peter French]], [[Colleen Kavanagh|AUTHOR Colleen Kavanagh]], [[Eugenia San Segundo|AUTHOR Eugenia San Segundo]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3892–3896
</span></p></div>
<div class="cpabstractcardabstract"><p>In forensic voice comparison, there is increasing focus on the integration of automatic and phonetic methods to improve the validity and reliability of voice evidence to the courts. In line with this, we present a comparison of long-term measures of the speech signal to assess the extent to which they capture complementary speaker-specific information. Likelihood ratio-based testing was conducted using MFCCs and (linear and Mel-weighted) long-term formant distributions (LTFDs). Fusing automatic and semi-automatic systems yielded limited improvement in performance over the baseline MFCC system, indicating that these measures capture essentially the same speaker-specific information. The output from the best performing system was used to evaluate the contribution of auditory-based analysis of supralaryngeal (filter) and laryngeal (source) voice quality in system testing. Results suggest that the problematic speakers for the (semi-)automatic system are, to some extent, predictable from their supralaryngeal voice quality profiles, with the least distinctive speakers producing the weakest evidence and most misclassifications. However, the misclassified pairs were still easily differentiated via auditory analysis. Laryngeal voice quality may thus be useful in resolving problematic pairs for (semi-)automatic systems, potentially improving their overall performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pablo Arantes|AUTHOR Pablo Arantes]]^^1^^, [[Anders Eriksson|AUTHOR Anders Eriksson]]^^2^^, [[Suska Gutzeit|AUTHOR Suska Gutzeit]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade Federal de São Carlos, Brazil; ^^2^^Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3897–3901
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we compared three long-term fundamental frequency estimates — mean, median and base value — with respect to how fast they approach a stable value, as a function of language, speaking style and speaker. The base value concept was developed in search for an f,,0,, value which should be invariant under prosodic variation. It has since also been tested in forensic phonetics as a possible speaker-specific f,,0,, value. Data used in this study — recorded speech by male and female speakers in seven languages and three speaking styles, spontaneous, phrase reading and word list reading — had been recorded for a previous project. Average stabilisation times for the mean, median and base value are 9.76, 9.67 and 8.01 s. Base values stabilise significantly faster. Languages differ in both average and variability of the stabilisation times. Values range from 7.14 to 11.41 (mean), 7.5 to 11.33 (median) and 6.74 to 9.34 (base value). Spontaneous speech yields the most variable stabilisation times for the three estimators in Italian and Swedish, for the median in French and Portuguese and base value in German. Speakers within each language do not differ significantly in terms of stabilisation time variability for the three estimators.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Volín|AUTHOR Jan Volín]]^^1^^, [[Tereza Tykalová|AUTHOR Tereza Tykalová]]^^2^^, [[Tomáš Bořil|AUTHOR Tomáš Bořil]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Charles University, Czech Republic; ^^2^^CTU, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3902–3906
</span></p></div>
<div class="cpabstractcardabstract"><p>The indexical function of speech prosody signals the membership of a speaker in a social group. The factors of age and gender are relatively easy to establish but their reflection in speech characteristics can be less straightforward as they interact with other social aspects. Therefore, diverse speaker communities should be investigated with the aim of their subsequent comparison. Our study provides data for the population of adult speakers of Czech — a West Slavic language of Central Europe. The sample consists of six age groups (20 to 80 years of age) with balanced representation of gender. The search for age and gender related attributes covered both global acoustic descriptors and linguistically informed prosodic feature extraction. Apart from commonly used measures and methods we also exploited Legendre polynomials, k-means clustering and a newly designed Cumulative Slope Index (CSI). The results specify general deceleration of articulation rate with age and lowering of F0 in aging Czech women, and reveal an increase in CSI of both F0 tracks and intensity curves with age. Furthermore, various melodic shapes were found to be distributed unequally across the age groups.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Julien Plante-Hébert|AUTHOR Julien Plante-Hébert]]^^1^^, [[Victor J. Boucher|AUTHOR Victor J. Boucher]]^^1^^, [[Boutheina Jemel|AUTHOR Boutheina Jemel]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Université de Montréal, Canada; ^^2^^H^opital Rivière-des-Prairies, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3907–3910
</span></p></div>
<div class="cpabstractcardabstract"><p>Our previous work using voice lineups has established that listeners can recognize with near-perfect accuracy the voice of familiar individuals. In a forensic perspective, however, there are limitations to the application of voice lineups in that some witnesses may not wish to recognize the familiar voice of a parent or close friend or else provide unreliable responses. Considering this problem, the present study aimed to isolate the electrophysiological markers of voice familiarity. We recorded the evoked response potentials (ERPs) of 11 participants as they listened to a set of similar voices in varying utterances (standards of voice line ups were used in selecting voices). Within the presented set, only one voice was familiar to the listener (the voice of a parent, close friend, etc.). The ERPs showed a marked difference for heard familiar voices compared to an unfamiliar set. These are the first findings of a neural marker of voice recognition based on voices that are actually familiar to a listener and which take into account utterances rather than isolated vowels. The present results thus indicate that protocols of near-perfect voice recognition can be devised without using behavioral responses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jamison Cooper-Leavitt|AUTHOR Jamison Cooper-Leavitt]]^^1^^, [[Lori Lamel|AUTHOR Lori Lamel]]^^1^^, [[Annie Rialland|AUTHOR Annie Rialland]]^^2^^, [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]^^1^^, [[Gilles Adda|AUTHOR Gilles Adda]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIMSI, France; ^^2^^LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3911–3915
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates vowel elision and morpheme deletion in Embosi (Bantu C25), an under-resourced language spoken in the Republic of Congo. We propose that the observed morpheme deletion is morphological, and that vowel elision is phonological. The study focuses on vowel elision that occurs across word boundaries between the contact of long/short vowels (i.e. CV[long] # V[short].CV), and between the contact of short/short vowels (CV[short] # V[short].CV). Several different categories of morphemes are explored: (i) prepositions ( ya, mo), (ii) class-noun nominal prefixes ( ba, etc.), (iii) singular subject pronouns ( ngá, nɔ, wa). For example, the preposition, ya, regularly deletes allowing for vowel elision if vowel contact occurs between the head of the noun phrase and the previous word. Phonetically motivated speech variants are proposed in the lexicon used for forced alignment (segmentation) enabling these phenomena to be quantified in the corpus so as to develop a dictionary containing relevant phonetic variants.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andy Murphy|AUTHOR Andy Murphy]], [[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3916–3920
</span></p></div>
<div class="cpabstractcardabstract"><p>This study uses the R,,d,, glottal waveshape parameter to simulate the phonatory tense-lax continuum and to explore its affective correlates in terms of activation and valence. Based on a natural utterance which was inverse filtered and source-parameterised, a range of synthesized stimuli varying along the tense-lax continuum were generated using R,,d,, as a control parameter. Two additional stimuli were included, which were versions of the most lax stimuli with additional creak (lax-creaky voice). In a listening test, participants chose an emotion from a set of affective labels and indicated its perceived strength. They also indicated the naturalness of the stimulus and their confidence in their judgment. Results showed that stimuli at the tense end of the range were most frequently associated with angry, at the lax end of the range the association was with sad, and in the intermediate range, the association was with content. Results also indicate, as was found in our earlier work, that a particular stimulus can be associated with more than one affect. Overall these results show that R,,d,, can be used as a single control parameter to generate variation along the tense-lax continuum of phonation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Plínio A. Barbosa|AUTHOR Plínio A. Barbosa]]^^1^^, [[Sandra Madureira|AUTHOR Sandra Madureira]]^^2^^, [[Philippe Boula de Mareüil|AUTHOR Philippe Boula de Mareüil]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade Estadual de Campinas, Brazil; ^^2^^Universidade de São Paulo, Brazil; ^^3^^LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3921–3925
</span></p></div>
<div class="cpabstractcardabstract"><p>This work investigates acoustic and perceptual differences in four language varieties by using a corpus of professional and non-professional speaking styles. The professional stimuli are composed of excerpts of broadcast news and political discourses from six subjects in each case. The non-professional stimuli are made up of recordings of 10 subjects who read a long story and narrated it subsequently. All this material was obtained in four language varieties: Brazilian and European Portuguese, standard French and German. The corpus is balanced for gender. Eight melodic and intensity parameters were automatically obtained from excerpts of 10 to 20 seconds. We showed that 6 out of 8 parameters partially distinguish professional from non-professional style in the four language varieties. Classification and discrimination tests carried out with 12 Brazilian listeners using delexicalised speech showed that these subjects are able to distinguish professional style from non-professional style with about 2/3 of hits irrespective of language. In comparison, an automatic classification using an LDA model performed better in classifying non-professional (96%) against professional styles, but not in classifying professional (42%) against non-professional styles.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gustav Eje Henter|AUTHOR Gustav Eje Henter]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Xin Wang|AUTHOR Xin Wang]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]
</p><p class="cpabstractcardaffiliationlist">NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3956–3960
</span></p></div>
<div class="cpabstractcardabstract"><p>For building flexible and appealing high-quality speech synthesisers, it is desirable to be able to accommodate and reproduce fine variations in vocal expression present in natural speech. Synthesisers can enable control over such output properties by adding adjustable control parameters in parallel to their text input. If not annotated in training data, the values of these control inputs can be optimised jointly with the model parameters. We describe how this established method can be seen as approximate maximum likelihood and MAP inference in a latent variable model. This puts previous ideas of (learned) synthesiser inputs such as sentence-level control vectors on a more solid theoretical footing. We furthermore extend the method by restricting the latent variables to orthogonal subspaces via a sparse prior. This enables us to learn dimensions of variation present also within classes in coarsely annotated speech. As an example, we train an LSTM-based TTS system to learn nuances in emotional expression from a speech database annotated with seven different acted emotions. Listening tests show that our proposal successfully can synthesise speech with discernible differences in expression within each emotion, without compromising the recognisability of synthesised emotions compared to an identical system without learned nuances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Merlijn Blaauw|AUTHOR Merlijn Blaauw]], [[Jordi Bonada|AUTHOR Jordi Bonada]]
</p><p class="cpabstractcardaffiliationlist">Universitat Pompeu Fabra, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4001–4005
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a new model for singing synthesis based on a modified version of the WaveNet architecture. Instead of modeling raw waveform, we model features produced by a parametric vocoder that separates the influence of pitch and timbre. This allows conveniently modifying pitch to match any target melody, facilitates training on more modest dataset sizes, and significantly reduces training and generation times. Our model makes frame-wise predictions using mixture density outputs rather than categorical outputs in order to reduce the required parameter count. As we found overfitting to be an issue with the relatively small datasets used in our experiments, we propose a method to regularize the model and make the autoregressive generation process more robust to prediction errors. Using a simple multi-stream architecture, harmonic, aperiodic and voiced/unvoiced components can all be predicted in a coherent manner. We compare our method to existing parametric statistical and state-of-the-art concatenative methods using quantitative metrics and a listening test. While naive implementations of the autoregressive generation algorithm tend to be inefficient, using a smart algorithm we can greatly speed up the process and obtain a system that’s competitive in both speed and quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuxuan Wang|AUTHOR Yuxuan Wang]]^^1^^, [[R.J. Skerry-Ryan|AUTHOR R.J. Skerry-Ryan]]^^1^^, [[Daisy Stanton|AUTHOR Daisy Stanton]]^^1^^, [[Yonghui Wu|AUTHOR Yonghui Wu]]^^1^^, [[Ron J. Weiss|AUTHOR Ron J. Weiss]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^1^^, [[Zongheng Yang|AUTHOR Zongheng Yang]]^^1^^, [[Ying Xiao|AUTHOR Ying Xiao]]^^1^^, [[Zhifeng Chen|AUTHOR Zhifeng Chen]]^^1^^, [[Samy Bengio|AUTHOR Samy Bengio]]^^1^^, [[Quoc Le|AUTHOR Quoc Le]]^^1^^, [[Yannis Agiomyrgiannakis|AUTHOR Yannis Agiomyrgiannakis]]^^2^^, [[Rob Clark|AUTHOR Rob Clark]]^^2^^, [[Rif A. Saurous|AUTHOR Rif A. Saurous]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4006–4010
</span></p></div>
<div class="cpabstractcardabstract"><p>A text-to-speech synthesis system typically consists of multiple stages, such as a text analysis frontend, an acoustic model and an audio synthesis module. Building these components often requires extensive domain expertise and may contain brittle design choices. In this paper, we present Tacotron, an end-to-end generative text-to-speech model that synthesizes speech directly from characters. Given <text, audio> pairs, the model can be trained completely from scratch with random initialization. We present several key techniques to make the sequence-to-sequence framework perform well for this challenging task. Tacotron achieves a 3.82 subjective 5-scale mean opinion score on US English, outperforming a production parametric system in terms of naturalness. In addition, since Tacotron generates speech at the frame level, it’s substantially faster than sample-level autoregressive methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tim Capes|AUTHOR Tim Capes]], [[Paul Coles|AUTHOR Paul Coles]], [[Alistair Conkie|AUTHOR Alistair Conkie]], [[Ladan Golipour|AUTHOR Ladan Golipour]], [[Abie Hadjitarkhani|AUTHOR Abie Hadjitarkhani]], [[Qiong Hu|AUTHOR Qiong Hu]], [[Nancy Huddleston|AUTHOR Nancy Huddleston]], [[Melvyn Hunt|AUTHOR Melvyn Hunt]], [[Jiangchuan Li|AUTHOR Jiangchuan Li]], [[Matthias Neeracher|AUTHOR Matthias Neeracher]], [[Kishore Prahallad|AUTHOR Kishore Prahallad]], [[Tuomo Raitio|AUTHOR Tuomo Raitio]], [[Ramya Rasipuram|AUTHOR Ramya Rasipuram]], [[Greg Townsend|AUTHOR Greg Townsend]], [[Becci Williamson|AUTHOR Becci Williamson]], [[David Winarsky|AUTHOR David Winarsky]], [[Zhizheng Wu|AUTHOR Zhizheng Wu]], [[Hepeng Zhang|AUTHOR Hepeng Zhang]]
</p><p class="cpabstractcardaffiliationlist">Apple, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4011–4015
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes Apple’s hybrid unit selection speech synthesis system, which provides the voices for Siri with the requirement of naturalness, personality and expressivity. It has been deployed into hundreds of millions of desktop and mobile devices (e.g. iPhone, iPad, Mac, etc.) via iOS and macOS in multiple languages. The system is following the classical unit selection framework with the advantage of using deep learning techniques to boost the performance. In particular, deep and recurrent mixture density networks are used to predict the target and concatenation reference distributions for respective costs during unit selection. In this paper, we present an overview of the run-time TTS engine and the voice building process. We also describe various techniques that enable on-device capability such as preselection optimization, caching for low latency, and unit pruning for low footprint, as well as techniques that improve the naturalness and expressivity of the voice such as the use of long units.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daan van Esch|AUTHOR Daan van Esch]], [[Richard Sproat|AUTHOR Richard Sproat]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4016–4020
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe an expanded taxonomy of semiotic classes for text normalization, building upon the work in [1]. We add a large number of categories of non-standard words (NSWs) that we believe a robust real-world text normalization system will have to be able to process. Our new categories are based upon empirical findings encountered while building text normalization systems across many languages, for both speech recognition and speech synthesis purposes. We believe our new taxonomy is useful both for ensuring high coverage when writing manual grammars, as well as for eliciting training data to build machine learning-based text normalization systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Toru Nakashika|AUTHOR Toru Nakashika]]^^1^^, [[Shinji Takaki|AUTHOR Shinji Takaki]]^^2^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Electro-Communications, Japan; ^^2^^NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4021–4025
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a new energy-based probabilistic model where a restricted Boltzmann machine (RBM) is extended to deal with complex-valued visible units. The RBM that automatically learns the relationships between visible units and hidden units (but without connections in the visible or the hidden units) has been widely used as a feature extractor, a generator, a classifier, pre-training of deep neural networks, etc. However, all the conventional RBMs have assumed the visible units to be either binary-valued or real-valued, and therefore complex-valued data cannot be fed to the RBM.
In various applications, however, complex-valued data is frequently used such examples include complex spectra of speech, fMRI images, wireless signals, and acoustic intensity. For the direct learning of such the complex-valued data, we define the new model called “complex-valued RBM (CRBM)” where the conditional probability of the complex-valued visible units given the hidden units forms a complex-Gaussian distribution. Another important characteristic of the CRBM is to have connections between real and imaginary parts of each of the visible units unlike the conventional real-valued RBM. Our experiments demonstrated that the proposed CRBM can directly encode complex spectra of speech signals without decoupling imaginary number or phase from the complex-value data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]]^^1^^, [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]]^^2^^, [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Tokyo, Japan; ^^2^^Tokyo Institute of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3961–3965
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents sampling-based speech parameter generation using moment-matching networks for Deep Neural Network (DNN)-based speech synthesis. Although people never produce exactly the same speech even if we try to express the same linguistic and para-linguistic information, typical statistical speech synthesis produces completely the same speech, i.e., there is no inter-utterance variation in synthetic speech. To give synthetic speech natural inter-utterance variation, this paper builds DNN acoustic models that make it possible to randomly sample speech parameters. The DNNs are trained so that they make the moments of generated speech parameters close to those of natural speech parameters. Since the variation of speech parameters is compressed into a low-dimensional simple prior noise vector, our algorithm has lower computation cost than direct sampling of speech parameters. As the first step towards generating synthetic speech that has natural inter-utterance variation, this paper investigates whether or not the proposed sampling-based generation deteriorates synthetic speech quality. In evaluation, we compare speech quality of conventional maximum likelihood-based generation and proposed sampling-based generation. The result demonstrates the proposed generation causes no degradation in speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent Pollet|AUTHOR Vincent Pollet]]^^1^^, [[Enrico Zovato|AUTHOR Enrico Zovato]]^^2^^, [[Sufian Irhimeh|AUTHOR Sufian Irhimeh]]^^1^^, [[Pier Batzu|AUTHOR Pier Batzu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nuance Communications, Belgium; ^^2^^Nuance Communications, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3966–3970
</span></p></div>
<div class="cpabstractcardabstract"><p>Bidirectional recurrent neural nets have demonstrated state-of-the-art performance for parametric speech synthesis. In this paper, we introduce a top-down application of recurrent neural net models to unit-selection synthesis. A hierarchical cascaded network graph predicts context phone duration, speech unit encoding and frame-level logF0 information that serves as targets for the search of units. The new approach is compared with an existing state-of-art hybrid system that uses Hidden Markov Models as basis for the statistical unit search.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Erica Cooper|AUTHOR Erica Cooper]]^^1^^, [[Xinyue Wang|AUTHOR Xinyue Wang]]^^1^^, [[Alison Chang|AUTHOR Alison Chang]]^^2^^, [[Yocheved Levitan|AUTHOR Yocheved Levitan]]^^1^^, [[Julia Hirschberg|AUTHOR Julia Hirschberg]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Columbia University, USA; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3971–3975
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes experiments in training HMM-based text-to-speech (TTS) voices on data collected for Automatic Speech Recognition (ASR) training. We compare a number of filtering techniques designed to identify the best utterances from a noisy, multi-speaker corpus for training voices, to exclude speech containing noise and to include speech close in nature to more traditionally-collected TTS corpora. We also evaluate the use of automatic speech recognizers for intelligibility assessment in comparison with crowdsourcing methods. While the goal of this work is to develop natural-sounding and intelligible TTS voices in Low Resource Languages (LRLs) rapidly and easily, without the expense of recording data specifically for this purpose, we focus on English initially to identify the best filtering techniques and evaluation methods. We find that, when a large amount of data is available, selecting from the corpus based on criteria such as standard deviation of f0, fast speaking rate, and hypo-articulation produces the most intelligible voices.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrew Rosenberg|AUTHOR Andrew Rosenberg]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3976–3980
</span></p></div>
<div class="cpabstractcardabstract"><p>Listening tests and Mean Opinion Scores (MOS) are the most commonly used techniques for the evaluation of speech synthesis quality and naturalness. These are invaluable in the assessment of subjective qualities of machine generated stimuli. However, there are a number of challenges in understanding the MOS scores that come out of listening tests.
Primarily, we advocate for the use of non-parametric statistical tests in the calculation of statistical significance when comparing listening test results.
Additionally, based on the results of 46 legacy listening tests, we measure the impact of two sources of bias. Bias introduced by individual participants and synthesized text can a dramatic impact on observed MOS scores. For example, we find that on average the mean difference between the highest and lowest scoring rater is over 2 MOS points (on a 5 point scale). From this observation, we caution against using any statistical test without adjusting for this bias, and provide specific non-parametric recommendations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nagaraj Adiga|AUTHOR Nagaraj Adiga]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]
</p><p class="cpabstractcardaffiliationlist">IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3981–3985
</span></p></div>
<div class="cpabstractcardabstract"><p>The conventional statistical parametric speech synthesis (SPSS) focus on characteristics of the magnitude spectrum of speech for speech synthesis by ignoring phase characteristics of speech. In this work, the role of phase information to improve the naturalness of synthetic speech is explored. The phase characteristics of excitation signal are estimated from the integrated linear prediction residual (ILPR) using an all-pass (AP) filter. The coefficients of the AP filter are estimated by minimizing an entropy based objective function from the cosine phase of the analytical signal obtained from ILPR signal. The AP filter coefficients (APCs) derived from the AP filter are used as features for modeling phase in SPSS. During synthesis time, to generate the excitation signal, frame wise generated APCs are used to add the group delay to the impulse excitation. The proposed method is compared with the group delay based phase excitation used in the STRAIGHT method. The experimental results show that proposed phased modeling having a better perceptual synthesis quality when compared with the STRAIGHT method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jose A. Gonzalez|AUTHOR Jose A. Gonzalez]]^^1^^, [[Lam A. Cheah|AUTHOR Lam A. Cheah]]^^2^^, [[Phil D. Green|AUTHOR Phil D. Green]]^^1^^, [[James M. Gilbert|AUTHOR James M. Gilbert]]^^2^^, [[Stephen R. Ell|AUTHOR Stephen R. Ell]]^^3^^, [[Roger K. Moore|AUTHOR Roger K. Moore]]^^1^^, [[Ed Holdsworth|AUTHOR Ed Holdsworth]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Sheffield, UK; ^^2^^University of Hull, UK; ^^3^^Hull and East Yorkshire Hospitals Trust, UK; ^^4^^Practical Control, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3986–3990
</span></p></div>
<div class="cpabstractcardabstract"><p>To help people who have lost their voice following total laryngectomy, we present a speech restoration system that produces audible speech from articulator movement. The speech articulators are monitored by sensing changes in magnetic field caused by movements of small magnets attached to the lips and tongue. Then, articulator movement is mapped to a sequence of speech parameter vectors using a transformation learned from simultaneous recordings of speech and articulatory data. In this work, this transformation is performed using a type of recurrent neural network (RNN) with fixed latency, which is suitable for real-time processing. The system is evaluated on a phonetically-rich database with simultaneous recordings of speech and articulatory data made by non-impaired subjects. Experimental results show that our RNN-based mapping obtains more accurate speech reconstructions (evaluated using objective quality metrics and a listening test) than articulatory-to-acoustic mappings using Gaussian mixture models (GMMs) or deep neural networks (DNNs). Moreover, our fixed-latency RNN architecture provides comparable performance to an utterance-level batch mapping using bidirectional RNNs (BiRNNs).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Greenwood|AUTHOR David Greenwood]], [[Stephen Laycock|AUTHOR Stephen Laycock]], [[Iain Matthews|AUTHOR Iain Matthews]]
</p><p class="cpabstractcardaffiliationlist">University of East Anglia, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3991–3995
<a href="./IS2017/MEDIA/0894" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Natural movement plays a significant role in realistic speech animation. Numerous studies have demonstrated the contribution visual cues make to the degree we, as human observers, find an animation acceptable.
Rigid head motion is one visual mode that universally co-occurs with speech, and so it is a reasonable strategy to seek a transformation from the speech mode to predict the head pose. Several previous authors have shown that prediction is possible, but experiments are typically confined to rigidly produced dialogue. Natural, expressive, emotive and prosodic speech exhibit motion patterns that are far more difficult to predict with considerable variation in expected head pose.
Recently, Long Short Term Memory (LSTM) networks have become an important tool for modelling speech and natural language tasks. We employ Deep Bi-Directional LSTMs (BLSTM) capable of learning long-term structure in language, to model the relationship that speech has with rigid head motion. We then extend our model by conditioning with prior motion. Finally, we introduce a generative head motion model, conditioned on audio features using a Conditional Variational Autoencoder (CVAE). Each approach mitigates the problems of the one to many mapping that a speech to head pose model must accommodate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mirjam Wester|AUTHOR Mirjam Wester]], [[David A. Braude|AUTHOR David A. Braude]], [[Blaise Potard|AUTHOR Blaise Potard]], [[Matthew P. Aylett|AUTHOR Matthew P. Aylett]], [[Francesca Shaw|AUTHOR Francesca Shaw]]
</p><p class="cpabstractcardaffiliationlist">CereProc, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3996–4000
</span></p></div>
<div class="cpabstractcardabstract"><p>The ability to be interrupted and react in a realistic manner is a key requirement for interactive speech interfaces. While previous systems have long implemented techniques such as ‘barge in’ where speech output can be halted at word or phrase boundaries, less work has explored how to mimic human speech output responses to real-time events like interruptions which require a reaction from the system. Unlike previous work which has focused on incremental production, here we explore a novel re-planning approach. The proposed system is versatile and offers a large range of possible ways to react. A focus group was used to evaluate the approach, where participants interacted with a system reading out a text. The system would react to audio interruptions, either with no reactions, passive reactions, or active negative reactions (i.e. getting increasingly irritated). Participants preferred a reactive system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bartosz Ziółko|AUTHOR Bartosz Ziółko]], [[Tomasz Pȩdzima̧ż|AUTHOR Tomasz Pȩdzima̧ż]], [[Szymon Pałka|AUTHOR Szymon Pałka]]
</p><p class="cpabstractcardaffiliationlist">AGH UST, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4026–4027
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a 3D realtime audio engine which utilizes frustum tracing to create realistic audio auralization, modifying speech in architectural walkthroughs. All audio effects are computed based on both the geometrical (e.g. walls, furniture) and acoustical scene properties (e.g. materials, air attenuation). The sound changes dynamically as we change the point of perception and sound sources. The engine can be configured to use as little as 10 percent of available processing power. Our demonstration will be based on listening radio samples in rooms with similar shape, but different acoustical properties. The described system is a component of a virtual reality trainer for firefighters using Oculus Rift. It allows to conduct dialogues with victims and to locate them based on sound cues.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takayuki Arai|AUTHOR Takayuki Arai]]
</p><p class="cpabstractcardaffiliationlist">Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4028–4029
</span></p></div>
<div class="cpabstractcardabstract"><p>Our physical models of the human vocal tract successfully demonstrate theories such as the source-filter theory of speech production, mechanisms such as the relationship between vocal-tract configuration and vowel quality, and phenomena such as formant frequency estimation. Earlier models took one of two directions: either simplification, showing only a few target themes, or diversification, simulating human articulation more broadly. In this study, we have designed a static, hybrid model. Each model of this type produces one vowel. However, the model also simulates the human articulators more broadly, including the lips, teeth, and tongue. The sagittal block is enclosed with transparent plates so that the inside of the vocal tract is visible from the outside. We also colored the articulators to make them more easily identified. In testing, we confirmed that the vocal-tract models can produce the target vowel. These models have great potential, with applications not only in acoustics and phonetics education, but also pronunciation training in language learning and speech therapy in the clinical setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ikuyo Masuda-Katsuse|AUTHOR Ikuyo Masuda-Katsuse]]
</p><p class="cpabstractcardaffiliationlist">Kindai University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4030–4031
</span></p></div>
<div class="cpabstractcardabstract"><p>A remote articulation test system with multimedia communication has been developed in order that outside speech-language-hearing therapists (STs) can exam pronunciations of the students in special education classes in regular elementary schools and give advice to their teachers. The proposed system has video and voice communication and image transmission functions based on WebRTC. Using image transmission, the ST presents picture cards for the word test to the student and asks what is depicted. Using video / voice communication, the ST confirms the student’s voice and articulation movement. Compared to our previous system in which written words were presented, the proposed system enables a more formal and accurate articulation test.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]], [[Jason Lilley|AUTHOR Jason Lilley]], [[Kathleen McGrath|AUTHOR Kathleen McGrath]]
</p><p class="cpabstractcardaffiliationlist">Nemours Biomedical Research, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4032–4033
</span></p></div>
<div class="cpabstractcardabstract"><p>The Nemours ModelTalker supports voice banking for users diagnosed with ALS/MND and related neurodegenerative diseases. Users record up to 1600 sentences from which a synthetic voice is constructed. For the past two years we have focused on extending and refining a web-based recording tool to support this process. In this demonstration, we illustrate the features of the web-based pipeline that guides patients through the process of setting up to record at home, recording a standard speech inventory, adding custom recordings, and screening alternative versions of their voice and alternative synthesis parameter settings. Finally, we summarize results from 352 individuals with a wide range of speaking ability, who have recently used this voice banking pipeline. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wilbert Heeringa|AUTHOR Wilbert Heeringa]], [[Hans Van de Velde|AUTHOR Hans Van de Velde]]
</p><p class="cpabstractcardaffiliationlist">Fryske Akademy, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4034–4035
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents Visible Vowels, a web app that visualizes variation in f0, formants and duration. It combines user friendliness with maximum functionality and flexibility, using a live plot view.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Achuth Rao M.V.|AUTHOR Achuth Rao M.V.]], [[Shivani Yadav|AUTHOR Shivani Yadav]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3502–3506
</span></p></div>
<div class="cpabstractcardabstract"><p>Snoring is a common symptom of serious chronic disease known as obstructive sleep apnea (OSA). Knowledge about the location of obstruction site (V—Velum, O—Oropharyngeal lateral walls, T—Tongue, E—Epiglottis) in the upper airways is necessary for proper surgical treatment. In this paper we propose a dual source-filter model similar to the source-filter model of speech to approximate the generation process of snore audio. The first filter models the vocal tract from lungs to the point of obstruction with white noise excitation from the lungs. The second filter models the vocal tract from the obstruction point to the lips/nose with impulse train excitation which represents vibrations at the point of obstruction. The filter coefficients are estimated using the closed and open phases of the snore beat cycle. VOTE classification is done by using SVM classifier and filter coefficients as features. The classification experiments are performed on the development set (283 snore audios) of the MUNICH-PASSAU SNORE SOUND CORPUS (MPSSC).We obtain an unweighted average recall (UAR) of 49.58%, which is higher than the INTERSPEECH-2017 snoring sub-challenge baseline technique by ~3% (absolute).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Freitag|AUTHOR Michael Freitag]], [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3507–3511
</span></p></div>
<div class="cpabstractcardabstract"><p>Whilst snoring itself is usually not harmful to a person’s health, it can be an indication of Obstructive Sleep Apnoea (OSA), a serious sleep-related disorder. As a result, studies into using snoring as acoustic based marker of OSA are gaining in popularity. Motivated by this, the INTERSPEECH 2017 ComParE Snoring sub-challenge requires classification from which areas in the upper airways different snoring sounds originate. This paper explores a hybrid approach combining evolutionary feature selection based on competitive swarm optimisation and deep convolutional neural networks (CNN). Feature selection is applied to novel deep spectrum features extracted directly from spectrograms using pre-trained image classification CNN. Key results presented demonstrate that our hybrid approach can substantially increase the performance of a linear support vector machine on a set of low-level features extracted from the Snoring sub-challenge data. Even without subset selection, the deep spectrum features are sufficient to outperform the challenge baseline, and competitive swarm optimisation further improves system performance. In comparison to the challenge baseline, unweighted average recall is increased from 40.6% to 57.6% on the development partition, and from 58.5% to 66.5% on the test partition, using 2246 of the 4096 deep spectrum features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Sandra Ottl|AUTHOR Sandra Ottl]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Michael Freitag|AUTHOR Michael Freitag]], [[Sergey Pugachevskiy|AUTHOR Sergey Pugachevskiy]], [[Alice Baird|AUTHOR Alice Baird]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3512–3516
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a method for automatically detecting various types of snore sounds using image classification convolutional neural network (CNN) descriptors extracted from audio file spectrograms. The descriptors, denoted as deep spectrum features, are derived from forwarding spectrograms through very deep task-independent pre-trained CNNs. Specifically, activations of fully connected layers from two common image classification CNNs, AlexNet and VGG19, are used as feature vectors. Moreover, we investigate the impact of differing spectrogram colour maps and two CNN architectures on the performance of the system. Results presented indicate that deep spectrum features extracted from the activations of the second fully connected layer of AlexNet using a viridis colour map are well suited to the task. This feature space, when combined with a support vector classifier, outperforms the more conventional knowledge-based features of 6 373 acoustic functionals used in the INTERSPEECH ComParE 2017 Snoring sub-challenge baseline system. In comparison to the baseline, unweighted average recall is increased from 40.6% to 44.8% on the development partition, and from 58.5% to 67.0% on the test partition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Tavarez|AUTHOR David Tavarez]], [[Xabier Sarasola|AUTHOR Xabier Sarasola]], [[Agustin Alonso|AUTHOR Agustin Alonso]], [[Jon Sanchez|AUTHOR Jon Sanchez]], [[Luis Serrano|AUTHOR Luis Serrano]], [[Eva Navas|AUTHOR Eva Navas]], [[Inma Hernáez|AUTHOR Inma Hernáez]]
</p><p class="cpabstractcardaffiliationlist">Universidad del País Vasco, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3517–3521
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces the different systems developed by Aholab Signal Processing Laboratory for The INTERSPEECH 2017 Computational Paralinguistics Challenge, which includes three different subtasks: Addressee, Cold and Snoring classification. Several classification strategies and features related with the spectrum, prosody and phase have been tested separately and further combined by using different fusion techniques, such as early fusion by means of multi-feature vectors, late fusion of the standalone classifier scores and label fusion via weighted voting. The obtained results show that the applied fusion methods improve the performance of the standalone detectors and provide systems capable of outperforming the baseline systems in terms of UAR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^1^^, [[Róbert Busa-Fekete|AUTHOR Róbert Busa-Fekete]]^^2^^, [[Tamás Grósz|AUTHOR Tamás Grósz]]^^3^^, [[László Tóth|AUTHOR László Tóth]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Szeged, Hungary; ^^2^^Yahoo!, USA; ^^3^^University of Szeged, Hungary; ^^4^^MTA-SZTE RGAI, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3522–3526
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study we deal with the three sub-challenges of the Interspeech ComParE Challenge 2017, where the goal is to identify child-directed speech, speakers having a cold, and different types of snoring sounds. For the first two sub-challenges we propose a simple, two-step feature extraction and classification scheme: first we perform frame-level classification via Deep Neural Networks (DNNs), and then we extract utterance-level features from the DNN outputs. By utilizing these features for classification, we were able to match the performance of the standard paralinguistic approach (which involves extracting thousands of features, many of them being completely irrelevant to the actual task). As for the Snoring Sub-Challenge, we divided the recordings into segments, and averaged out some frame-level features segment-wise, which were then used for utterance-level classification. When combining the predictions of the proposed approaches with those got by the standard paralinguistic approach, we managed to outperform the baseline values of the Cold and Snoring sub-challenges on the hidden test sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heysem Kaya|AUTHOR Heysem Kaya]]^^1^^, [[Alexey A. Karpov|AUTHOR Alexey A. Karpov]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Namık Kemal Üniversitesi, Turkey; ^^2^^Russian Academy of Sciences, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3527–3531
</span></p></div>
<div class="cpabstractcardabstract"><p>The field of paralinguistics is growing rapidly with a wide range of applications that go beyond recognition of emotions, laughter and personality. The research flourishes in multiple directions such as signal representation and classification, addressing the issues of the domain. Apart from the noise robustness, an important issue with real life data is the imbalanced nature: some classes of states/traits are under-represented. Combined with the high dimensionality of the feature vectors used in the state-of-the-art analysis systems, this issue poses the threat of over-fitting. While the kernel trick can be employed to handle the dimensionality issue, regular classifiers inherently aim to minimize the misclassification error and hence are biased towards the majority class. A solution to this problem is over-sampling of the minority class(es). However, this brings increased memory/computational costs, while not bringing any new information to the classifier. In this work, we propose a new weighting scheme on instances of the original dataset, employing Weighted Kernel Extreme Learning Machine, and inspired from that, introducing the Weighted Partial Least Squares Regression based classifier. The proposed methods are applied on all three INTERSPEECH ComParE 2017 challenge corpora, giving better or competitive results compared to the challenge baselines.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Stefan Steidl|AUTHOR Stefan Steidl]]
</p><p class="cpabstractcardaffiliationlist">FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Björn Schuller|AUTHOR Björn Schuller]]^^1^^, [[Anton Batliner|AUTHOR Anton Batliner]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Björn Schuller|AUTHOR Björn Schuller]]^^1^^, [[Stefan Steidl|AUTHOR Stefan Steidl]]^^2^^, [[Anton Batliner|AUTHOR Anton Batliner]]^^3^^, [[Elika Bergelson|AUTHOR Elika Bergelson]]^^4^^, [[Jarek Krajewski|AUTHOR Jarek Krajewski]]^^5^^, [[Christoph Janott|AUTHOR Christoph Janott]]^^6^^, [[Andrei Amatuni|AUTHOR Andrei Amatuni]]^^4^^, [[Marisa Casillas|AUTHOR Marisa Casillas]]^^7^^, [[Amanda Seidl|AUTHOR Amanda Seidl]]^^8^^, [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]]^^9^^, [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]^^10^^, [[Guillermo Hidalgo|AUTHOR Guillermo Hidalgo]]^^5^^, [[Sebastian Schnieder|AUTHOR Sebastian Schnieder]]^^5^^, [[Clemens Heiser|AUTHOR Clemens Heiser]]^^6^^, [[Winfried Hohenhorst|AUTHOR Winfried Hohenhorst]]^^11^^, [[Michael Herzog|AUTHOR Michael Herzog]]^^12^^, [[Maximilian Schmitt|AUTHOR Maximilian Schmitt]]^^3^^, [[Kun Qian|AUTHOR Kun Qian]]^^6^^, [[Yue Zhang|AUTHOR Yue Zhang]]^^13^^, [[George Trigeorgis|AUTHOR George Trigeorgis]]^^13^^, [[Panagiotis Tzirakis|AUTHOR Panagiotis Tzirakis]]^^13^^, [[Stefanos Zafeiriou|AUTHOR Stefanos Zafeiriou]]^^13^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^FAU Erlangen-Nürnberg, Germany; ^^3^^Universität Passau, Germany; ^^4^^Duke University, USA; ^^5^^Bergische Universität Wuppertal, Germany; ^^6^^Technische Universität München, Germany; ^^7^^MPI for Psycholinguistics, The Netherlands; ^^8^^Purdue University, USA; ^^9^^University of Manitoba, Canada; ^^10^^University of California at Merced, USA; ^^11^^Alfried Krupp Krankenhaus, Germany; ^^12^^Carl-Thiem-Klinikum Cottbus, Germany; ^^13^^Imperial College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3442–3446
</span></p></div>
<div class="cpabstractcardabstract"><p>The INTERSPEECH 2017 Computational Paralinguistics Challenge addresses three different problems for the first time in research competition under well-defined conditions: In the Addressee sub-challenge, it has to be determined whether speech produced by an adult is directed towards another adult or towards a child; in the Cold sub-challenge, speech under cold has to be told apart from ‘healthy’ speech; and in the Snoring sub-challenge, four different types of snoring have to be classified. In this paper, we describe these sub-challenges, their conditions, and the baseline feature extraction and classifiers, which include data-learnt feature representations by end-to-end learning with convolutional and recurrent neural networks, and bag-of-audio-words for the first time in the challenge series.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jarek Krajewski|AUTHOR Jarek Krajewski]]^^1^^, [[Sebastian Schieder|AUTHOR Sebastian Schieder]]^^2^^, [[Anton Batliner|AUTHOR Anton Batliner]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Bergische Universität Wuppertal, Germany; ^^2^^; ^^3^^Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christoph Janott|AUTHOR Christoph Janott]]^^1^^, [[Anton Batliner|AUTHOR Anton Batliner]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität München, Germany; ^^2^^Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elika Bergelson|AUTHOR Elika Bergelson]]^^1^^, [[Andrei Amatuni|AUTHOR Andrei Amatuni]]^^1^^, [[Marisa Casillas|AUTHOR Marisa Casillas]]^^2^^, [[Amanda Seidl|AUTHOR Amanda Seidl]]^^3^^, [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]]^^4^^, [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Duke University, USA; ^^2^^MPI for Psycholinguistics, The Netherlands; ^^3^^Purdue University, USA; ^^4^^University of Manitoba, Canada; ^^5^^University of California at Merced, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mark Huckvale|AUTHOR Mark Huckvale]]^^1^^, [[András Beke|AUTHOR András Beke]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University College London, UK; ^^2^^MTA, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3447–3451
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes an evaluation of four different voice feature sets for detecting symptoms of the common cold in speech as part of the Interspeech 2017 Computational Paralinguistics Challenge. The challenge corpus consists of 630 speakers in three partitions, of which approximately one third had a “severe” cold at the time of recording. Success on the task is measured in terms of unweighted average recall of cold/not-cold classification from short extracts of the recordings. In this paper we review previous voice features used for studying changes in health and devise four basic types of features for evaluation: voice quality features, vowel spectra features, modulation spectra features, and spectral distribution features. The evaluation shows that each feature set provides some useful information to the task, with features from the modulation spectrogram being most effective. Feature-level fusion of the feature sets shows small performance improvements on the development test set. We discuss the results in terms of the most suitable features for detecting symptoms of cold and address issues arising from the design of the challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Danwei Cai|AUTHOR Danwei Cai]]^^1^^, [[Zhidong Ni|AUTHOR Zhidong Ni]]^^1^^, [[Wenbo Liu|AUTHOR Wenbo Liu]]^^1^^, [[Weicheng Cai|AUTHOR Weicheng Cai]]^^1^^, [[Gang Li|AUTHOR Gang Li]]^^2^^, [[Ming Li|AUTHOR Ming Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sun Yat-sen University, China; ^^2^^JSC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3452–3456
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an end-to-end deep learning framework to detect speech paralinguistics using perception aware spectrum as input. Existing studies show that speech under cold has distinct variations of energy distribution on low frequency components compared with the speech under ‘healthy’ condition. This motivates us to use perception aware spectrum as the input to an end-to-end learning framework with small scale dataset. In this work, we try both Constant Q Transform (CQT) spectrum and Gammatone spectrum in different end-to-end deep learning networks, where both spectrums are able to closely mimic the human speech perception and transform it into 2D images. Experimental results show the effectiveness of the proposed perception aware spectrum with end-to-end deep learning approach on Interspeech 2017 Computational Paralinguistics Cold sub-Challenge. The final fusion result of our proposed method is 8% better than that of the provided baseline in terms of UAR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Johannes Wagner|AUTHOR Johannes Wagner]]^^1^^, [[Thiago Fraga-Silva|AUTHOR Thiago Fraga-Silva]]^^2^^, [[Yvan Josse|AUTHOR Yvan Josse]]^^2^^, [[Dominik Schiller|AUTHOR Dominik Schiller]]^^1^^, [[Andreas Seiderer|AUTHOR Andreas Seiderer]]^^1^^, [[Elisabeth André|AUTHOR Elisabeth André]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Augsburg, Germany; ^^2^^Vocapia Research, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3457–3461
</span></p></div>
<div class="cpabstractcardabstract"><p>The realization of language through vocal sounds involves a complex interplay between the lungs, the vocal cords, and a series of resonant chambers (e.g. mouth and nasal cavities). Due to their connection to the outside world, these body parts are popular spots for viruses and bacteria to enter the human organism. Affected people may suffer from an upper respiratory tract infection (URTIC) and consequently their voice often sounds breathy, raspy or sniffly. In this paper, we investigate the audible effects of a cold on a phonetic level. Results on a German corpus show that the articulation of consonants is more impaired than that of vowels. Surprisingly, nasal sounds do not follow this trend in our experiments. We finally try to predict a speaker’s health condition by fusing decisions we derive from single phonemes. The presented work is part of the INTERSPEECH 2017 Computational Paralinguistics Challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Akshay Kalkunte Suresh|AUTHOR Akshay Kalkunte Suresh]]^^1^^, [[Srinivasa Raghavan K.M.|AUTHOR Srinivasa Raghavan K.M.]]^^2^^, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^PES Institute of Technology, India; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3462–3466
</span></p></div>
<div class="cpabstractcardabstract"><p>We consider the problem of automatically detecting if a speaker is suffering from common cold from his/her speech. When a speaker has symptoms of cold, his/her voice quality changes compared to the normal one. We hypothesize that such a change in voice quality could be reflected in lower likelihoods from a model built using normal speech. In order to capture this, we compute a 120-dimensional posteriorgram feature in each frame using Gaussian mixture model from 120 states of 40 three-states phonetic hidden Markov models trained on approximately 16.4 hours of normal English speech. Finally, a fixed 5160-dimensional phoneme state posteriorgram (PSP) feature vector for each utterance is obtained by computing statistics from the posteriorgram feature trajectory. Experiments on the 2017-Cold sub-challenge data show that when the decisions from bag-of-audio-words (BoAW) and end-to-end (e2e) are combined with those from PSP features with unweighted majority rule, the UAR on the development set becomes 69% which is 2.9% (absolute) better than the best of the UARs obtained by the baseline schemes. When the decisions from ComParE, BoAW and PSP features are combined with simple majority rule, it results in a UAR of 68.52% on the test set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tin Lay Nwe|AUTHOR Tin Lay Nwe]], [[Huy Dat Tran|AUTHOR Huy Dat Tran]], [[Wen Zheng Terence Ng|AUTHOR Wen Zheng Terence Ng]], [[Bin Ma|AUTHOR Bin Ma]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3467–3471
</span></p></div>
<div class="cpabstractcardabstract"><p>Snoring is caused by the narrowing of the upper airway and it is excited by different locations within the upper airways. This irregularity could lead to the presence of Obstructive Sleep Apnea Syndrome (OSAS). Diagnosis of OSAS could therefore be made by snoring sound analysis. This paper proposes the novel method to automatically classify snoring sounds by their excitation locations for ComParE2017 challenge. We propose 3 sub-systems for classification. In the first system, we propose to integrate Bhattacharyya distance based Gaussian Mixture Model (GMM) supervectors to a set of static features provided by ComParE2017 challenge. The Bhattacharyya distance based GMM supervectors characterize the spectral dissimilarity measure among snore sounds excited by different locations. And, we employ Support Vector Machine (SVM) for classification. In the second system, we perform feature selection on static features provided by the challenge and conduct classification using Random Forest. In the third system, we extract spectrogram from audio and employ Convolutional Neural Network (CNN) for snore sound classification. Then, we fuse 3 sub-systems to produce final classification results. The experimental results show that the proposed system performs better than the challenge baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tatsuya Kitamura|AUTHOR Tatsuya Kitamura]]^^1^^, [[Hironori Takemoto|AUTHOR Hironori Takemoto]]^^2^^, [[Hisanori Makinae|AUTHOR Hisanori Makinae]]^^3^^, [[Tetsutaro Yamaguchi|AUTHOR Tetsutaro Yamaguchi]]^^4^^, [[Kotaro Maki|AUTHOR Kotaro Maki]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Konan University, Japan; ^^2^^Chiba Institute of Technology, Japan; ^^3^^National Research Institute of Police Science, Japan; ^^4^^Showa University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3472–3476
</span></p></div>
<div class="cpabstractcardabstract"><p>The nasal and paranasal cavities have a labyrinthine shape and their acoustic properties affect speech sounds. In this study, we explored the transfer function of the nasal and paranasal cavities, as well as the contribution of each paranasal cavity, using acoustical and numerical methods. A physical model of the nasal and paranasal cavities was formed using data from a high-resolution 3D X-ray CT and a 3D printer. The data was acquired from a female subject during silent nasal breathing. The transfer function of the physical model was then measured by introducing a white noise signal at the glottis and measuring its acoustic response at a point 20 mm away from the nostrils. We also calculated the transfer function of the 3D model using a finite-difference time-domain or FDTD method. The results showed that the gross shape and the frequency of peaks and dips of the measured and calculated transfer functions were similar, suggesting that both methods used in this study were reliable. The results of FDTD simulations evaluating the paranasal sinuses individually suggested that they contribute not only to spectral dips but also to peaks, which is contrary to the traditional theories regarding the production of speech sounds.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marc Arnela|AUTHOR Marc Arnela]]^^1^^, [[Saeed Dabbaghchian|AUTHOR Saeed Dabbaghchian]]^^2^^, [[Oriol Guasch|AUTHOR Oriol Guasch]]^^1^^, [[Olov Engwall|AUTHOR Olov Engwall]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universitat Ramon Llull, Spain; ^^2^^KTH, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3477–3481
</span></p></div>
<div class="cpabstractcardabstract"><p>Three-dimensional computational acoustic models need very detailed 3D vocal tract geometries to generate high quality sounds. Static geometries can be obtained from Magnetic Resonance Imaging (MRI), but it is not currently possible to capture dynamic MRI-based geometries with sufficient spatial and time resolution. One possible solution consists in interpolating between static geometries, but this is a complex task. We instead propose herein to use a semi-polar grid to extract 2D cross-sections from the static 3D geometries, and then interpolate them to obtain the vocal tract dynamics. Other approaches such as the adaptive grid have also been explored. In this method, cross-sections are defined perpendicular to the vocal tract midline, as typically done in 1D to obtain the vocal tract area functions. However, intersections between adjacent cross-sections may occur during the interpolation process, especially when the vocal tract midline quickly changes its orientation. In contrast, the semi-polar grid prevents these intersections because the plane orientations are fixed over time. Finite element simulations of static vowels are first conducted, showing that 3D acoustic wave propagation is not significantly altered when the semi-polar grid is used instead of the adaptive grid. The vowel-vowel sequence [ʀiptai] is finally simulated to demonstrate the method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Arvind Vasudevan|AUTHOR Arvind Vasudevan]]^^1^^, [[Victor Zappi|AUTHOR Victor Zappi]]^^2^^, [[Peter Anderson|AUTHOR Peter Anderson]]^^1^^, [[Sidney Fels|AUTHOR Sidney Fels]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of British Columbia, Canada; ^^2^^Istituto Italiano di Tecnologia, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3482–3486
</span></p></div>
<div class="cpabstractcardabstract"><p>A balance between the simplicity and speed of lumped-element vocal fold models and the completeness and complexity of continuum-models is required to achieve fast high-quality articulatory speech synthesis. We develop and implement a novel self-oscillating vocal-fold model, composed of a 1D unsteady fluid model loosely coupled with a 2D FEM structural model. The flow model is capable of robustly handling irregular geometries, different boundary conditions, closure of the glottis and unsteady flow states. A method for a fast decoupled solution of the flow equations that does not require the computation of the Jacobian is provided. The model is coupled with a 2D real-time finite-difference wave-solver for simulating vocal tract acoustics and a 1D wave-reflection analog representation of the trachea. The simulation results are shown to agree with existing data in literature, and give realistic pressure-velocity distributions, glottal width and glottal flow values. In addition, the model is more than an order of magnitude faster to run than comparable 2D Navier-Stokes fluid solvers, while better capturing transitional flow than simple Bernoulli-based flow models. The vocal fold model provides an alternative to simple lumped-element models for faster higher-quality articulatory speech synthesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tiina Murtola|AUTHOR Tiina Murtola]], [[Jarmo Malinen|AUTHOR Jarmo Malinen]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3487–3491
</span></p></div>
<div class="cpabstractcardabstract"><p>A time-domain model of vowel production is used to simulate fundamental frequency glides over the first vocal tract resonance. A vocal tract geometry extracted from MRI data of a female speaker pronouncing [i] is used. The model contains direct feedback from the acoustic loads to vocal fold tissues and the inertial effect of the full air column on the glottal flow. The simulations reveal that a perturbation pattern in the fundamental frequency, namely, a jump and locking to the vocal tract resonance, is accompanied by a specific pattern of glottal waveform changes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Niyazi Cem Degirmenci|AUTHOR Niyazi Cem Degirmenci]]^^1^^, [[Johan Jansson|AUTHOR Johan Jansson]]^^2^^, [[Johan Hoffman|AUTHOR Johan Hoffman]]^^1^^, [[Marc Arnela|AUTHOR Marc Arnela]]^^3^^, [[Patricia Sánchez-Martín|AUTHOR Patricia Sánchez-Martín]]^^3^^, [[Oriol Guasch|AUTHOR Oriol Guasch]]^^3^^, [[Sten Ternström|AUTHOR Sten Ternström]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^BCAM, Spain; ^^3^^Universitat Ramon Llull, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3492–3496
</span></p></div>
<div class="cpabstractcardabstract"><p>A unified approach for the numerical simulation of vowels is presented, which accounts for the self-oscillations of the vocal folds including contact, the generation of acoustic waves and their propagation through the vocal tract, and the sound emission outwards the mouth. A monolithic incompressible fluid-structure interaction model is used to simulate the interaction between the glottal jet and the vocal folds, whereas the contact model is addressed by means of a level set application of the Eikonal equation. The coupling with acoustics is done through an acoustic analogy stemming from a simplification of the acoustic perturbation equations. This coupling is one-way in the sense that there is no feedback from the acoustics to the flow and mechanical fields.
All the involved equations are solved together at each time step and in a single computational run, using the finite element method (FEM). As an application, the production of vowel [i] has been addressed. Despite the complexity of all physical phenomena to be simulated simultaneously, which requires resorting to massively parallel computing, the formant locations of vowel [i] have been well recovered.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saeed Dabbaghchian|AUTHOR Saeed Dabbaghchian]]^^1^^, [[Marc Arnela|AUTHOR Marc Arnela]]^^2^^, [[Olov Engwall|AUTHOR Olov Engwall]]^^1^^, [[Oriol Guasch|AUTHOR Oriol Guasch]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^Universitat Ramon Llull, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3497–3501
<a href="./IS2017/MEDIA/1614" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a method to automatically generate deformable 3D vocal tract geometries from the surrounding structures in a biomechanical model. This allows us to couple 3D biomechanics and acoustics simulations. The basis of the simulations is muscle activation trajectories in the biomechanical model, which move the articulators to the desired articulatory positions. The muscle activation trajectories for a vowel-vowel utterance are here defined through interpolation between the determined activations of the start and end vowel. The resulting articulatory trajectories of flesh points on the tongue surface and jaw are similar to corresponding trajectories measured using Electromagnetic Articulography, hence corroborating the validity of interpolating muscle activation. At each time step in the articulatory transition, a 3D vocal tract tube is created through a cavity extraction method based on first slicing the geometry of the articulators with a semi-polar grid to extract the vocal tract contour in each plane and then reconstructing the vocal tract through a smoothed 3D mesh-generation using the extracted contours. A finite element method applied to these changing 3D geometries simulates the acoustic wave propagation. We present the resulting acoustic pressure changes on the vocal tract boundary and the formant transitions for the utterance [ʀiptai].</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[James Allen|AUTHOR James Allen]]
</p><p class="cpabstractcardaffiliationlist">University of Rochester, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 833
</span></p></div>
<div class="cpabstractcardabstract"><p>I will describe the current status of a long-term effort at developing dialogue systems that go beyond simple task execution models to systems that involve collaborative problem solving. Such systems involve open-ended discussion and the tasks cannot be accomplished without extensive interaction (e.g., 10 turns or more). The key idea is that dialogue itself arises from an agent’s ability for collaborative problem solving (CPS). In such dialogues, agents may introduce, modify and negotiate goals; propose and discuss the merits possible paths to solutions; explicitly discuss progress as the two agents work towards the goals; and evaluate how well a goal was accomplished. To complicate matters, user utterances in such settings are much more complex than seen in simple task execution dialogues and requires full semantic parsing. A key question we have been exploring in the past few years is how much of dialogue can be accounted for by domain-independent mechanisms. I will discuss these issues and draw examples from a dialogue system we have built that, except for the specialized domain reasoning required in each case, uses the same architecture to perform three different tasks: collaborative blocks world planning, when the system and user build structures and may have differing goals; biocuration, in which a biologist and the system interact in order to build executable causal models of biological pathways; and collaborative composition, where the user and system collaborate to compose simple pieces of music.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]]^^1^^, [[Kanishka Rao|AUTHOR Kanishka Rao]]^^1^^, [[Tara N. Sainath|AUTHOR Tara N. Sainath]]^^1^^, [[Bo Li|AUTHOR Bo Li]]^^1^^, [[Leif Johnson|AUTHOR Leif Johnson]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 939–943
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we conduct a detailed evaluation of various all-neural, end-to-end trained, sequence-to-sequence models applied to the task of speech recognition. Notably, each of these systems directly predicts graphemes in the written domain, without using an external pronunciation lexicon, or a separate language model. We examine several sequence-to-sequence models including connectionist temporal classification (CTC), the recurrent neural network (RNN) transducer, an attention-based model, and a model which augments the RNN transducer with an attention mechanism.
We find that the sequence-to-sequence models are competitive with traditional state-of-the-art approaches on dictation test sets, although the baseline, which uses a separate pronunciation and language model, outperforms these models on voice-search test sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Albert Zeyer|AUTHOR Albert Zeyer]], [[Eugen Beck|AUTHOR Eugen Beck]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 944–948
</span></p></div>
<div class="cpabstractcardabstract"><p>We formulate a generalized hybrid HMM-NN training procedure using the full-sum over the hidden state-sequence and identify CTC as a special case of it. We present an analysis of the alignment behavior of such a training procedure and explain the strong localization of label output behavior of full-sum training (also referred to as peaky or spiky behavior). We show how to avoid that behavior by using a state prior. We discuss the temporal decoupling between output label position/time-frame, and the corresponding evidence in the input observations when this is trained with BLSTM models. We also show a way how to overcome this by jointly training a FFNN. We implemented the Baum-Welch alignment algorithm in CUDA to be able to do fast soft realignments on GPU. We have published this code along with some of our experiments as part of RETURNN, RWTH’s extensible training framework for universal recurrent neural networks. We finish with experimental validation of our study on WSJ and Switchboard.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takaaki Hori|AUTHOR Takaaki Hori]]^^1^^, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^1^^, [[Yu Zhang|AUTHOR Yu Zhang]]^^2^^, [[William Chan|AUTHOR William Chan]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MERL, USA; ^^2^^MIT, USA; ^^3^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 949–953
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a state-of-the-art end-to-end Automatic Speech Recognition (ASR) model. We learn to listen and write characters with a joint Connectionist Temporal Classification (CTC) and attention-based encoder-decoder network. The encoder is a deep Convolutional Neural Network (CNN) based on the VGG network. The CTC network sits on top of the encoder and is jointly trained with the attention-based decoder. During the beam search process, we combine the CTC predictions, the attention-based decoder predictions and a separately trained LSTM language model. We achieve a 5–10% error reduction compared to prior systems on spontaneous Japanese and Chinese speech, and our end-to-end model beats out traditional hybrid ASR systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Liang Lu|AUTHOR Liang Lu]]^^1^^, [[Lingpeng Kong|AUTHOR Lingpeng Kong]]^^2^^, [[Chris Dyer|AUTHOR Chris Dyer]]^^3^^, [[Noah A. Smith|AUTHOR Noah A. Smith]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^TTIC, USA; ^^2^^Carnegie Mellon University, USA; ^^3^^DeepMind, UK; ^^4^^University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 954–958
</span></p></div>
<div class="cpabstractcardabstract"><p>Segmental conditional random fields (SCRFs) and connectionist temporal classification (CTC) are two sequence labeling methods used for end-to-end training of speech recognition models. Both models define a transcription probability by marginalizing decisions about latent segmentation alternatives to derive a sequence probability: the former uses a globally normalized joint model of segment labels and durations, and the latter classifies each frame as either an output symbol or a “continuation” of the previous label. In this paper, we train a recognition model by optimizing an interpolation between the SCRF and CTC losses, where the same recurrent neural network (RNN) encoder is used for feature extraction for both outputs. We find that this multitask objective improves recognition accuracy when decoding with either the SCRF or CTC models. Additionally, we show that CTC can also be used to pretrain the RNN encoder, which improves the convergence rate when learning the joint model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[George Saon|AUTHOR George Saon]], [[Michael Picheny|AUTHOR Michael Picheny]], [[David Nahamoo|AUTHOR David Nahamoo]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 959–963
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent work on end-to-end automatic speech recognition (ASR) has shown that the connectionist temporal classification (CTC) loss can be used to convert acoustics to phone or character sequences. Such systems are used with a dictionary and separately-trained Language Model (LM) to produce word sequences. However, they are not truly end-to-end in the sense of mapping acoustics directly to words without an intermediate phone representation. In this paper, we present the first results employing direct acoustics-to-word CTC models on two well-known public benchmark tasks: Switchboard and CallHome. These models do not require an LM or even a decoder at run-time and hence recognize speech with minimal complexity. However, due to the large number of word output units, CTC word models require orders of magnitude more data to train reliably compared to traditional systems. We present some techniques to mitigate this issue. Our CTC word model achieves a word error rate of 13.0%/18.8% on the Hub5-2000 Switchboard/CallHome test sets without any LM or decoder compared with 9.6%/16.0% for phone-based CTC with a 4-gram LM. We also present rescoring results on CTC word model lattices to quantify the performance benefits of a LM, and contrast the performance of word and phone CTC models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 964–968
</span></p></div>
<div class="cpabstractcardabstract"><p>Long Short-Term Memory Recurrent Neural Networks (LSTMs) are good at modeling temporal variations in speech recognition tasks, and have become an integral component of many state-of-the-art ASR systems. More recently, LSTMs have been extended to model variations in the speech signal in two dimensions, namely time and frequency [1, 2]. However, one of the problems with two-dimensional LSTMs, such as Grid-LSTMs, is that the processing in both time and frequency occurs sequentially, thus increasing computational complexity. In this work, we look at minimizing the dependence of the Grid-LSTM with respect to previous time and frequency points in the sequence, thus reducing computational complexity. Specifically, we compare reducing computation using a bidirectional Grid-LSTM (biGrid-LSTM) with non-overlapping frequency sub-band processing, a PyraMiD-LSTM [3] and a frequency-block Grid-LSTM (fbGrid-LSTM) for parallel time-frequency processing. We find that the fbGrid-LSTM can reduce computation costs by a factor of four with no loss in accuracy, on a 12,500 hour Voice Search task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aharon Satt|AUTHOR Aharon Satt]], [[Shai Rozenberg|AUTHOR Shai Rozenberg]], [[Ron Hoory|AUTHOR Ron Hoory]]
</p><p class="cpabstractcardaffiliationlist">IBM, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1089–1093
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a new implementation of emotion recognition from the para-lingual information in the speech, based on a deep neural network, applied directly to spectrograms. This new method achieves higher recognition accuracy compared to previously published results, while also limiting the latency. It processes the speech input in smaller segments — up to 3 seconds, and splits a longer input into non-overlapping parts to reduce the prediction latency.
The deep network comprises common neural network tools — convolutional and recurrent networks — which are shown to effectively learn the information that represents emotions directly from spectrograms. Convolution-only lower-complexity deep network achieves a prediction accuracy of 66% over four emotions (tested on IEMOCAP — a common evaluation corpus), while a combined convolution-LSTM higher-complexity model achieves 68%.
The use of spectrograms in the role of speech-representing features enables effective handling of background non-speech signals such as music (excl. singing) and crowd noise, even at noise levels comparable with the speech signal levels. Using harmonic modeling to remove non-speech components from the spectrogram, we demonstrate significant improvement of the emotion recognition accuracy in the presence of unknown background non-speech signals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruo Zhang|AUTHOR Ruo Zhang]], [[Ando Atsushi|AUTHOR Ando Atsushi]], [[Satoshi Kobashikawa|AUTHOR Satoshi Kobashikawa]], [[Yushi Aono|AUTHOR Yushi Aono]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1094–1097
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we propose a novel emotion recognition method modeling interaction and transition in dialogue. Conventional emotion recognition utilizes intra-features such as MFCCs or F0s within individual utterance. However, human perceive emotions not only through individual utterances but also by contextual information. The proposed method takes in account the contextual effect of utterance in dialogue, which the conventional method fails to. Proposed method introduces Emotion Interaction and Transition (EIT) models which is constructed by end-to-end LSTMs. The inputs of EIT model are the previous emotions of both target and opponent speaker, estimated by state-of-the-art utterance emotion recognition model. The experimental results show that the proposed method improves overall accuracy and average precision by a relative error reduction of 18.8% and 22.6% respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[John Gideon|AUTHOR John Gideon]]^^1^^, [[Soheil Khorram|AUTHOR Soheil Khorram]]^^1^^, [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]]^^1^^, [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]]^^2^^, [[Emily Mower Provost|AUTHOR Emily Mower Provost]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Michigan, USA; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1098–1102
</span></p></div>
<div class="cpabstractcardabstract"><p>Many paralinguistic tasks are closely related and thus representations learned in one domain can be leveraged for another. In this paper, we investigate how knowledge can be transferred between three paralinguistic tasks: speaker, emotion, and gender recognition. Further, we extend this problem to cross-dataset tasks, asking how knowledge captured in one emotion dataset can be transferred to another. We focus on progressive neural networks and compare these networks to the conventional deep learning method of pre-training and fine-tuning. Progressive neural networks provide a way to transfer knowledge and avoid the forgetting effect present when pre-training neural networks on different tasks. Our experiments demonstrate that: (1) emotion recognition can benefit from using representations originally learned for different paralinguistic tasks and (2) transfer learning can effectively leverage additional datasets to improve the performance of emotion recognition systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Srinivas Parthasarathy|AUTHOR Srinivas Parthasarathy]], [[Carlos Busso|AUTHOR Carlos Busso]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1103–1107
</span></p></div>
<div class="cpabstractcardabstract"><p>An appealing representation of emotions is the use of emotional attributes such as arousal (passive versus active), valence (negative versus positive) and dominance (weak versus strong). While previous studies have considered these dimensions as orthogonal descriptors to represent emotions, there are strong theoretical and practical evidences showing the interrelation between these emotional attributes. This observation suggests that predicting emotional attributes with a unified framework should outperform machine learning algorithms that separately predict each attribute. This study presents methods to jointly learn emotional attributes by exploiting their interdependencies. The framework relies on multi-task learning (MTL) implemented with deep neural networks (DNN) with shared hidden layers. The framework provides a principled approach to learn shared feature representations that maximize the performance of regression models. The results of within-corpus and cross-corpora evaluation show the benefits of MTL over single task learning (STL). MTL achieves gains on concordance correlation coefficient (CCC) as high as 4.7% for within-corpus evaluations, and 14.0% for cross-corpora evaluations. The visualization of the activations of the last hidden layers illustrates that MTL creates better feature representation. The best structure has shared layers followed by attribute-dependent layers, capturing better the relation between attributes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Duc Le|AUTHOR Duc Le]], [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]
</p><p class="cpabstractcardaffiliationlist">University of Michigan, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1108–1112
</span></p></div>
<div class="cpabstractcardabstract"><p>Estimating continuous emotional states from speech as a function of time has traditionally been framed as a regression problem. In this paper, we present a novel approach that moves the problem into the classification domain by discretizing the training labels at different resolutions. We employ a multi-task deep bidirectional long-short term memory (BLSTM) recurrent neural network (RNN) trained with cost-sensitive Cross Entropy loss to model these labels jointly. We introduce an emotion decoding algorithm that incorporates long- and short-term temporal properties of the signal to produce more robust time series estimates. We show that our proposed approach achieves competitive audio-only performance on the RECOLA dataset, relative to previously published works as well as other strong regression baselines. This work provides a link between regression and classification, and contributes an alternative approach for continuous emotion recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaebok Kim|AUTHOR Jaebok Kim]], [[Gwenn Englebienne|AUTHOR Gwenn Englebienne]], [[Khiet P. Truong|AUTHOR Khiet P. Truong]], [[Vanessa Evers|AUTHOR Vanessa Evers]]
</p><p class="cpabstractcardaffiliationlist">University of Twente, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1113–1117
</span></p></div>
<div class="cpabstractcardabstract"><p>One of the challenges in Speech Emotion Recognition (SER) “in the wild” is the large mismatch between training and test data (e.g. speakers and tasks). In order to improve the generalisation capabilities of the emotion models, we propose to use Multi-Task Learning (MTL) and use gender and naturalness as auxiliary tasks in deep neural networks. This method was evaluated in within-corpus and various cross-corpus classification experiments that simulate conditions “in the wild”. In comparison to Single-Task Learning (STL) based state of the art methods, we found that our MTL method proposed improved performance significantly. Particularly, models using both gender and naturalness achieved more gains than those using either gender or naturalness separately. This benefit was also found in the high-level representations of the feature space, obtained from our method proposed, where discriminative emotional clusters could be observed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jorge C. Lucero|AUTHOR Jorge C. Lucero]]
</p><p class="cpabstractcardaffiliationlist">Universidade de Brasília, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 969–973
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper shows the application of a functional version of principal component analysis to build a parametrization of vocal tract area functions for vowel production. Sets of measured area values for ten vowels are expressed as smooth functional data and next decomposed into a mean area function and a basis of orthogonal eigenfunctions. Interpretations of the first four eigenfunctions are provided in terms of tongue movements and vocal tract length variations. Also, an alternative set of eigenfunctions with closer association to specific regions of the vocal tract is obtained via a varimax rotation. The general intention of the paper is to show the benefits of a functional approach to analyze vocal tract shapes and motivate further applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ganesh Sivaraman|AUTHOR Ganesh Sivaraman]]^^1^^, [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]^^1^^, [[Martijn Wieling|AUTHOR Martijn Wieling]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Maryland, USA; ^^2^^Rijksuniversiteit Groningen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 974–978
</span></p></div>
<div class="cpabstractcardabstract"><p>The focus of this paper is estimating articulatory movements of the tongue and lips from acoustic speech data. While there are several potential applications of such a method in speech therapy and pronunciation training, performance of such acoustic-to-articulatory inversion systems is not very high due to limited availability of simultaneous acoustic and articulatory data, substantial speaker variability, and variable methods of data collection. This paper therefore evaluates the impact of speaker, language and accent variability on the performance of an acoustic-to-articulatory speech inversion system. The articulatory dataset used in this study consists of 21 Dutch speakers reading Dutch and English words and sentences, and 22 UK English speakers reading English words and sentences. We trained several acoustic-to-articulatory speech inversion systems both based on deep and shallow neural network architectures in order to estimate electromagnetic articulography (EMA) sensor positions, as well as vocal tract variables (TVs). Our results show that with appropriate feature and target normalization, a speaker-independent speech inversion system trained on data from one language is able to estimate sensor positions (or TVs) for the same language correlating at about r = 0.53 with the actual sensor positions (or TVs). Cross-language results show a reduced performance of r = 0.47.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takayuki Arai|AUTHOR Takayuki Arai]]
</p><p class="cpabstractcardaffiliationlist">Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 979–983
<a href="./IS2017/MEDIA/0617" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>We have developed two types of mechanical models of the human vocal tract. The first model was designed for the retroflex approximant [r] and the alveolar lateral approximant [l]. It consisted of the main vocal tract and a flapping tongue, where the front half of the tongue can be rotated against the palate. When the tongue is short and rotated approximately 90 degrees, the retroflex approximant [r] is produced. The second model was designed for [b], [m], and [w]. Besides the main vocal tract, this model contains a movable lower lip for lip closure and a nasal cavity with a controllable velopharyngeal port. In the present study, we joined these two mechanical models to form a new model containing the main vocal tract, the flapping tongue, the movable lower lip, and the nasal cavity with the controllable velopharyngeal port. This integrated model now makes it possible to produce consonant sequences. Therefore, we examined the sequence [br], in particular, adjusting the timing of the lip and lingual gestures to produce the best sound. Because the gestures are visually observable from the outside of this model, the timing of the gestures were examined with the use of a high-speed video camera.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leonardo Badino|AUTHOR Leonardo Badino]]^^1^^, [[Luca Franceschi|AUTHOR Luca Franceschi]]^^1^^, [[Raman Arora|AUTHOR Raman Arora]]^^2^^, [[Michele Donini|AUTHOR Michele Donini]]^^1^^, [[Massimiliano Pontil|AUTHOR Massimiliano Pontil]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Istituto Italiano di Tecnologia, Italy; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 984–988
</span></p></div>
<div class="cpabstractcardabstract"><p>We address the speaker-independent acoustic inversion (AI) problem, also referred to as acoustic-to-articulatory mapping. The scarce availability of multi-speaker articulatory data makes it difficult to learn a mapping which generalizes from a limited number of training speakers and reliably reconstructs the articulatory movements of unseen speakers. In this paper, we propose a Multi-task Learning (MTL)-based approach that explicitly separates the modeling of each training speaker AI peculiarities from the modeling of AI characteristics that are shared by all speakers. Our approach stems from the well known Regularized MTL approach and extends it to feed-forward deep neural networks (DNNs). Given multiple training speakers, we learn for each an acoustic-to-articulatory mapping represented by a DNN. Then, through an iterative procedure, we search for a canonical speaker-independent DNN that is “similar” to all speaker-dependent DNNs. The degree of similarity is controlled by a regularization parameter. We report experiments on the University of Wisconsin X-ray Microbeam Database under different training/testing experimental settings. The results obtained indicate that our MTL-trained canonical DNN largely outperforms a standardly trained (i.e., single task learning-based) speaker independent DNN.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hidetsugu Uchida|AUTHOR Hidetsugu Uchida]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 989–993
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel acoustic-to-articulatory mapping model based on mixture of probabilistic canonical correlation analysis (mPCCA). In PCCA, it is assumed that two different kinds of data are observed as results from different linear transforms of a common latent variable. It is expected that this variable represents a common factor which is inherent in the different domains, such as acoustic and articulatory feature spaces. mPCCA is an expansion of PCCA and it can model a much more complex structure. In mPCCA, covariance matrices of a joint probabilistic distribution of acoustic-articulatory data are structuralized reasonably by using transformation coefficients of the linear transforms. Even if the number of components in mPCCA increases, the structuralized covariance matrices can be expected to avoid over-fitting. Training and mapping processes of the mPCCA-based mapping model are reasonably derived by using the EM algorithm. Experiments using MOCHA-TIMIT show that the proposed mapping method has achieved better mapping performance than the conventional GMM-based mapping.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanner Sorensen|AUTHOR Tanner Sorensen]]^^1^^, [[Asterios Toutios|AUTHOR Asterios Toutios]]^^1^^, [[Johannes Töger|AUTHOR Johannes Töger]]^^2^^, [[Louis Goldstein|AUTHOR Louis Goldstein]]^^1^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Lund University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 994–998
</span></p></div>
<div class="cpabstractcardabstract"><p>Real-time magnetic resonance imaging (rtMRI) provides information about the dynamic shaping of the vocal tract during speech production. This paper introduces and evaluates a method for quantifying articulatory strategies using rtMRI. The method decomposes the formation and release of a constriction in the vocal tract into the contributions of individual articulators such as the jaw, tongue, lips, and velum. The method uses an anatomically guided factor analysis and dynamical principles from the framework of Task Dynamics. We evaluated the method within a test-retest repeatability framework. We imaged healthy volunteers (n = 8, 4 females, 4 males) in two scans on the same day and quantified inter-study agreement with the intraclass correlation coefficient and mean within-subject standard deviation. The evaluation established a limit on effect size and intra-group differences in articulatory strategy which can be studied using the method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Snyder|AUTHOR David Snyder]], [[Daniel Garcia-Romero|AUTHOR Daniel Garcia-Romero]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 999–1003
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates replacing i-vectors for text-independent speaker verification with embeddings extracted from a feed-forward deep neural network. Long-term speaker characteristics are captured in the network by a temporal pooling layer that aggregates over the input speech. This enables the network to be trained to discriminate between speakers from variable-length speech segments. After training, utterances are mapped directly to fixed-dimensional speaker embeddings and pairs of embeddings are scored using a PLDA-based backend. We compare performance with a traditional i-vector baseline on NIST SRE 2010 and 2016. We find that the embeddings outperform i-vectors for short speech segments and are competitive on long duration test conditions. Moreover, the two representations are complementary, and their fusion improves on the baseline at all operating points. Similar systems have recently shown promising results when trained on very large proprietary datasets, but to the best of our knowledge, these are the best results reported for speaker-discriminative neural networks when trained and tested on publicly available corpora.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jesús Villalba|AUTHOR Jesús Villalba]]^^1^^, [[Niko Brümmer|AUTHOR Niko Brümmer]]^^2^^, [[Najim Dehak|AUTHOR Najim Dehak]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^Nuance Communications, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1004–1008
</span></p></div>
<div class="cpabstractcardabstract"><p>Probabilistic linear discriminant analysis (PLDA) is the de facto standard for backends in i-vector speaker recognition. If we try to extend the PLDA paradigm using non-linear models, e.g., deep neural networks, the posterior distributions of the latent variables and the marginal likelihood become intractable. In this paper, we propose to approach this problem using stochastic gradient variational Bayes. We generalize the PLDA model to let i-vectors depend non-linearly on the latent factors. We approximate the evidence lower bound (ELBO) by Monte Carlo sampling using the reparametrization trick. This enables us to optimize of the ELBO using backpropagation to jointly estimate the parameters that define the model and the approximate posteriors of the latent factors. We also present a reformulation of the likelihood ratio, which we call Q-scoring. Q-scoring makes possible to efficiently score the speaker verification trials for this model. Experimental results on NIST SRE10 suggest that more data might be required to exploit the potential of this method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1009–1013
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a novel framework to improve performance of gender independent i-Vector PLDA based speaker recognition using convolutional neural network (CNN). Convolutional layers of a CNN offer robustness to variations in input features including those due to gender. A CNN is trained for ASR with a linear bottleneck layer. Bottleneck features extracted using the CNN are then used to train a gender-independent UBM to obtain frame posteriors for training an i-Vector extractor matrix. To preserve speaker specific information, a hybrid approach to training the i-Vector extractor matrix using MFCC features with corresponding frame posteriors derived from bottleneck features is proposed. On the NIST SRE10 C5 condition pooled trials, our approach reduces the EER and minDCF 2010 by +14.62% and +14.42% respectively compared to a standard mfcc based gender-independent speaker recognition system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suwon Shon|AUTHOR Suwon Shon]]^^1^^, [[Seongkyu Mun|AUTHOR Seongkyu Mun]]^^1^^, [[Wooil Kim|AUTHOR Wooil Kim]]^^2^^, [[Hanseok Ko|AUTHOR Hanseok Ko]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Korea University, Korea; ^^2^^Incheon National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1014–1018
</span></p></div>
<div class="cpabstractcardabstract"><p>In real-life conditions, mismatch between development and test domain degrades speaker recognition performance. To solve the issue, many researchers explored domain adaptation approaches using matched in-domain dataset. However, adaptation would be not effective if the dataset is insufficient to estimate channel variability of the domain. In this paper, we explore the problem of performance degradation under such a situation of insufficient channel information. In order to exploit limited in-domain dataset effectively, we propose an unsupervised domain adaptation approach using Autoencoder based Domain Adaptation (AEDA). The proposed approach combines an autoencoder with a denoising autoencoder to adapt resource-rich development dataset to test domain. The proposed technique is evaluated on the Domain Adaptation Challenge 13 experimental protocols that is widely used in speaker recognition for domain mismatched condition. The results show significant improvements over baselines and results from other prior studies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abbas Khosravani|AUTHOR Abbas Khosravani]], [[Mohammad Mehdi Homayounpour|AUTHOR Mohammad Mehdi Homayounpour]]
</p><p class="cpabstractcardaffiliationlist">Amirkabir University of Technology, Iran</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1019–1023
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we propose to estimate the parameters of the probabilistic linear discriminant analysis (PLDA) in text-independent i-vector speaker verification framework using a nonparametric form rather than maximum likelihood estimation (MLE) obtained by an EM algorithm. In this approach the between-speaker covariance matrix that represents global information about the speaker variability is replaced with a local estimation computed on a nearest neighbor basis for each target speaker. The nonparametric between- and within-speaker scatter matrices can better exploit the discriminant information in training data and is more adapted to sample distribution especially when it does not satisfy Gaussian assumption as in i-vectors without length-normalization. We evaluated this approach on the recent NIST 2016 speaker recognition evaluation (SRE) as well as NIST 2010 core condition and found significant performance improvement compared with a generatively trained PLDA model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jesús Jorrín|AUTHOR Jesús Jorrín]], [[Paola García|AUTHOR Paola García]], [[Luis Buera|AUTHOR Luis Buera]]
</p><p class="cpabstractcardaffiliationlist">Nuance Communications, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1024–1028
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we explore deep neural network bottleneck features (BNF) in the context of speaker clustering. A straightforward manner to deal with speaker clustering is to reuse the bottleneck features extracted from speaker recognition. However, the selection of a bottleneck architecture or nonlinearity impacts the performance of both systems. In this work, we analyze the bottleneck features obtained for speaker recognition and test them in a speaker clustering scenario. We observe that there are deep neural network topologies that work better for both cases, even when their classification criteria (senone classification) is loosely met. We present results that outperform a traditional MFCC system by 21% for speaker recognition and between 20% and 37% in clustering using the same topology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kätlin Aare|AUTHOR Kätlin Aare]]^^1^^, [[Pärtel Lippus|AUTHOR Pärtel Lippus]]^^2^^, [[Juraj Šimko|AUTHOR Juraj Šimko]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Tartu, Estonia; ^^2^^University of Tartu, Estonia; ^^3^^University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1029–1033
</span></p></div>
<div class="cpabstractcardabstract"><p>In addition to typological, turn-taking or sociolinguistic factors, presence of creaky voice in spontaneous interaction is also influenced by the syntactic and phonological properties of speech. For example, creaky voice is reportedly more frequent in function words than content words, has been observed to accompany unstressed syllables and ends of phrases, and is associated with relaxation and reduced speech.
In Estonian, creaky voice is frequently used by all speakers. In this paper, we observe the use of creaky voice in spontaneous Estonian in connection to syllabic properties of words, lexical stress, word class, lengthening, and timing in phrases.
The results indicate that creak occurs less in syllables with primary stress than in unstressed syllables. However, syllables with secondary stress are most frequently creaky. In content words, the primary stressed syllables creak less frequently and unstressed syllables more frequently compared to function words. The stress-related pattern is similar in both function and content words, but more contrastive in content words. The probability of creakiness increases considerably with non-final lengthening within words, and for all syllables towards the end of the intonational phrase.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1034–1038
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes cross-speaker variation in the voice source correlates of focal accentuation and deaccentuation. A set of utterances with varied narrow focus placement as well as broad focus and deaccented renditions were produced by six speakers of English. These were manually inverse filtered and parameterized on a pulse-by-pulse basis using the LF source model. Z-normalized F0, EE, OQ and RD parameters (selected through correlation and factor analysis) were used to generate speaker specific baseline voice profiles and to explore cross-speaker variation in focal and non-focal (post- and prefocal) syllables. As expected, source parameter values were found to differ in the focal and postfocal portions of the utterance. For four of the six speakers the measures revealed a trend of tenser phonation on the focal syllable (an increase in EE and F0 and typically, a decrease in OQ and RD) as well as increased laxness in the postfocal part of the utterance. For two of the speakers, however, the measurements showed a different trend. These speakers had very high F0 and often high EE on the focal accent. In these cases, RD and OQ values tended to be raised rather than lowered. The possible reasons for these differences are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sishir Kalita|AUTHOR Sishir Kalita]], [[Wendy Lalhminghlui|AUTHOR Wendy Lalhminghlui]], [[Luke Horo|AUTHOR Luke Horo]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]], [[Samarendra Dandapat|AUTHOR Samarendra Dandapat]]
</p><p class="cpabstractcardaffiliationlist">IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1039–1043
</span></p></div>
<div class="cpabstractcardabstract"><p>The present work proposed an approach to characterize the word-final glottal stops in Mizo and Assam Sora language. Generally, glottal stops have more strong glottal and ventricular constriction at the coda position than at the onset. However, the primary source characteristics of glottal stops are irregular glottal cycles, abrupt glottal closing, and reduced open cycle. These changes will not only affect the vocal quality parameters but may also significantly affect the vocal tract characteristics due to changes in the subglottal coupling behavior. This motivates to analyze the dynamic vocal tract characteristics in terms of source behavior, apart from the excitation source features computed from the Linear Prediction (LP) residual for the acoustic characterization of the word-final glottal stops. The dominant resonance frequency (DRF) of the vocal tract using Hilbert Envelope of Numerator Group Delay (HNGD) are extracted at every sample instants as a cue to study this deviation. The gradual increase in the DRF and significantly lower duration for which subglottal coupling is occurring is observed for the glottal stop region for both the languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Parham Mokhtari|AUTHOR Parham Mokhtari]], [[Hiroshi Ando|AUTHOR Hiroshi Ando]]
</p><p class="cpabstractcardaffiliationlist">NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1044–1048
</span></p></div>
<div class="cpabstractcardabstract"><p>Iterative adaptive inverse filtering (IAIF) [1] remains among the state-of-the-art algorithms for estimating glottal flow from the recorded speech signal. Here, we re-examine IAIF in light of its foundational, classical model of voiced (non-nasalized) speech, wherein the overall spectral tilt is caused only by lip-radiation and glottal effects, while the vocal-tract transfer function contains formant peaks but is otherwise not tilted. In contrast, IAIF initially models and cancels the formants after only a first-order preemphasis of the speech signal, which is generally not enough to completely remove spectral tilt.
Iterative optimal preemphasis (IOP) is therefore proposed to replace IAIF’s initial step. IOP is a rapidly converging algorithm that models a signal (then inverse-filters it) with one real pole (zero) at a time, until spectral tilt is flattened. IOP-IAIF is evaluated on sustained /a/ in a range of voice qualities from weak-breathy to shouted-tense. Compared with standard IAIF, IOP-IAIF yields: (i) an acceptable glottal flow even for a weak breathy voice that the standard algorithm failed to handle; (ii) generally smoother glottal flows that nevertheless retain pulse shape and closed phase; and (iii) enhanced separation of voice qualities in both normalized amplitude quotient (NAQ) and glottal harmonic spectra.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yaniv Sheena|AUTHOR Yaniv Sheena]]^^1^^, [[Míša Hejná|AUTHOR Míša Hejná]]^^2^^, [[Yossi Adi|AUTHOR Yossi Adi]]^^1^^, [[Joseph Keshet|AUTHOR Joseph Keshet]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Bar-Ilan University, Israel; ^^2^^Aarhus University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1049–1053
</span></p></div>
<div class="cpabstractcardabstract"><p>Pre-aspiration is defined as the period of glottal friction occurring in sequences of vocalic/consonantal sonorants and phonetically voiceless obstruents. We propose two machine learning methods for automatic measurement of pre-aspiration duration: a feedforward neural network, which works at the frame level; and a structured prediction model, which relies on manually designed feature functions, and works at the segment level. The input for both algorithms is a speech signal of an arbitrary length containing a single obstruent, and the output is a pair of times which constitutes the pre-aspiration boundaries. We train both models on a set of manually annotated examples. Results suggest that the structured model is superior to the frame-based model as it yields higher accuracy in predicting the boundaries and generalizes to new speakers and new languages. Finally, we demonstrate the applicability of our structured prediction algorithm by replicating linguistic analysis of pre-aspiration in Aberystwyth English with high correlation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kiranpreet Nara|AUTHOR Kiranpreet Nara]]
</p><p class="cpabstractcardaffiliationlist">University of Toronto, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1054–1058
</span></p></div>
<div class="cpabstractcardabstract"><p>While all languages of the world use modal phonation, many also rely on other phonation types such as breathy or creaky voice. For example, Gujarati, an Indo-Aryan language, makes a distinction between breathy and modal phonation among consonants and vowels: /b^^ɦ^^aɾ/ ‘burden’, /baɾ/ ‘twelve’, and /ba̤ɾ/ ‘outside’ [1, 2]. This study, which is a replication and an extension of Khan [3], aims to determine the acoustic and articulatory parameters that distinguish breathy and modal vowels. The participants of this study are heritage and native Gujarati speakers.
The materials consisted of 40 target words with the modal and breathy pairs of the three vowel qualities: /a/ vs /a̤/, /e/ vs /e̤/, and /o/ vs /o̤/. The participants uttered the words in the context of a sentence. Acoustic measurements such as H1-H2, H1-A1, harmonic-to-noise ratio and articulatory measurements such as contact quotient were calculated throughout the vowel duration.
The results of the Smoothing Spline ANOVA analyses indicated that measures such as H1-A1, harmonic to noise ratio, and contact quotient distinguished modal and breathy vowels for native speakers. Heritage speakers also had a contrast between breathy and modal vowels, however the contrast is not as robust as that of native speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xin Wang|AUTHOR Xin Wang]], [[Shinji Takaki|AUTHOR Shinji Takaki]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]
</p><p class="cpabstractcardaffiliationlist">NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1059–1063
</span></p></div>
<div class="cpabstractcardabstract"><p>A recurrent-neural-network-based F0 model for text-to-speech (TTS) synthesis that generates F0 contours given textual features is proposed. In contrast to related F0 models, the proposed one is designed to learn the temporal correlation of F0 contours at multiple levels. The frame-level correlation is covered by feeding back the F0 output of the previous frame as the additional input of the current frame; meanwhile, the correlation over long-time spans is similarly modeled but by using F0 features aggregated over the phoneme and syllable. Another difference is that the output of the proposed model is not the interpolated continuous-valued F0 contour but rather a sequence of discrete symbols, including quantized F0 levels and a symbol for the unvoiced condition. By using the discrete F0 symbols, the proposed model avoids the influence of artificially interpolated F0 curves. Experiments demonstrated that the proposed F0 model, which was trained using a dropout strategy, generated smooth F0 contours with relatively better perceived quality than those from baseline RNN models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Viacheslav Klimkov|AUTHOR Viacheslav Klimkov]]^^1^^, [[Adam Nadolski|AUTHOR Adam Nadolski]]^^1^^, [[Alexis Moinet|AUTHOR Alexis Moinet]]^^2^^, [[Bartosz Putrycz|AUTHOR Bartosz Putrycz]]^^1^^, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^2^^, [[Thomas Merritt|AUTHOR Thomas Merritt]]^^2^^, [[Thomas Drugman|AUTHOR Thomas Drugman]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon.com, Poland; ^^2^^Amazon.com, UK; ^^3^^Amazon.com, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1064–1068
</span></p></div>
<div class="cpabstractcardabstract"><p>Phrasing structure is one of the most important factors in increasing the naturalness of text-to-speech (TTS) systems, in particular for long-form reading. Most existing TTS systems are optimized for isolated short sentences, and completely discard the larger context or structure of the text.
This paper presents how we have built phrasing models based on data extracted from audiobooks. We investigate how various types of textual features can improve phrase break prediction: part-of-speech (POS), guess POS (GPOS), dependency tree features and word embeddings. These features are fed into a bidirectional LSTM or a CART baseline. The resulting systems are compared using both objective and subjective evaluations. Using BiLSTM and word embeddings proves to be beneficial.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kou Tanaka|AUTHOR Kou Tanaka]]^^1^^, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^2^^, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^3^^, [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NAIST, Japan; ^^2^^NTT, Japan; ^^3^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1069–1073
</span></p></div>
<div class="cpabstractcardabstract"><p>Electrolaryngeal (EL) speech produced by a laryngectomee using an electrolarynx to mechanically generate artificial excitation sounds severely suffers from unnatural fundamental frequency (F,,0,,) patterns caused by monotonic excitation sounds. To address this issue, we have previously proposed EL speech enhancement systems using statistical F,,0,, pattern prediction methods based on a Gaussian Mixture Model (GMM), making it possible to predict the underlying F,,0,, pattern of EL speech from its spectral feature sequence. Our previous work revealed that the naturalness of the predicted F,,0,, pattern can be improved by incorporating a physically based generative model of F,,0,, patterns into the GMM-based statistical F,,0,, prediction system within a Product-of-Expert framework. However, one drawback of this method is that it requires an iterative procedure to obtain a predicted F,,0,, pattern, making it difficult to realize a real-time system. In this paper, we propose yet another approach to physically based statistical F,,0,, pattern prediction by using a HMM-GMM framework. This approach is noteworthy in that it allows to generate an F,,0,, pattern that is both statistically likely and physically natural without iterative procedures. Experimental results demonstrated that the proposed method was capable of generating F,,0,, patterns more similar to those in normal speech than the conventional GMM-based method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nobukatsu Hojo|AUTHOR Nobukatsu Hojo]]^^1^^, [[Yasuhito Ohsugi|AUTHOR Yasuhito Ohsugi]]^^2^^, [[Yusuke Ijima|AUTHOR Yusuke Ijima]]^^1^^, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTT, Japan; ^^2^^University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1074–1078
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a method to extract prosodic features from a speech signal by leveraging auxiliary linguistic information. A prosodic feature extractor called the statistical phrase/accent command estimation (SPACE) has recently been proposed. This extractor is based on a statistical model formulated as a stochastic counterpart of the Fujisaki model, a well-founded mathematical model representing the control mechanism of vocal fold vibration. The key idea of this approach is that a phrase/accent command pair sequence is modeled as an output sequence of a path-restricted hidden Markov model (HMM) so that estimating the state transition amounts to estimating the phrase/accent commands. Since the phrase and accent commands are related to linguistic information, we may expect to improve the command estimation accuracy by using them as auxiliary information for the inference. To model the relationship between the phrase/accent commands and linguistic information, we construct a deep neural network (DNN) that maps the linguistic feature vectors to the state posterior probabilities of the HMM. Thus, given a pitch contour and linguistic information, we can estimate phrase/accent commands via state decoding. We call this method “DNN-SPACE.” Experimental results revealed that using linguistic information was effective in improving the command estimation accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zofia Malisz|AUTHOR Zofia Malisz]]^^1^^, [[Harald Berthelsen|AUTHOR Harald Berthelsen]]^^2^^, [[Jonas Beskow|AUTHOR Jonas Beskow]]^^1^^, [[Joakim Gustafson|AUTHOR Joakim Gustafson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^STTS, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1079–1083
<a href="./IS2017/MEDIA/1355" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>This work aims to improve text-to-speech synthesis for Wikipedia by advancing and implementing models of prosodic prominence. We propose a new system architecture with explicit prominence modeling and test the first component of the architecture. We automatically extract a phonetic feature related to prominence from the speech signal in the ARCTIC corpus. We then modify the label files and train an experimental TTS system based on the feature using Merlin, a statistical-parametric DNN-based engine. Test sentences with contrastive prominence on the word-level are synthesised and separate listening tests a) evaluating the level of prominence control in generated speech, and b) naturalness, are conducted. Our results show that the prominence feature-enhanced system successfully places prominence on the appropriate words and increases perceived naturalness relative to the baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simon Betz|AUTHOR Simon Betz]], [[Jana Voße|AUTHOR Jana Voße]], [[Sina Zarrieß|AUTHOR Sina Zarrieß]], [[Petra Wagner|AUTHOR Petra Wagner]]
</p><p class="cpabstractcardaffiliationlist">Universität Bielefeld, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1084–1088
</span></p></div>
<div class="cpabstractcardabstract"><p>Lengthening is the ideal hesitation strategy for synthetic speech and dialogue systems: it is unobtrusive and hard to notice, because it occurs frequently in everyday speech before phrase boundaries, in accentuation, and in hesitation. Despite its elusiveness, it allows valuable extra time for computing or information highlighting in incremental spoken dialogue systems. The elusiveness of the matter, however, poses a challenge for extracting lengthening instances from corpus data: we suspect a recall problem, as human annotators might not be able to consistently label lengthening instances. We address this issue by filtering corpus data for instances of lengthening, using a simple classification method, based on a threshold for normalized phone duration. The output is then manually labeled for disfluency. This is compared to an existing, fully manual disfluency annotation, showing that recall is significantly higher with semi-automatic pre-classification. This shows that it is inevitable to use semi-automatic pre-selection to gather enough candidate data points for manual annotation and subsequent lengthening analyses. Also, it is desirable to further increase the performance of the automatic classification. We evaluate in detail human versus semi-automatic annotation and train another classifier on the resulting dataset to check the integrity of the disfluent – non-disfluent distinction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Akira Tamamori|AUTHOR Akira Tamamori]], [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]], [[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]], [[Kazuya Takeda|AUTHOR Kazuya Takeda]], [[Tomoki Toda|AUTHOR Tomoki Toda]]
</p><p class="cpabstractcardaffiliationlist">Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1118–1122
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we propose a speaker-dependent WaveNet vocoder, a method of synthesizing speech waveforms with WaveNet, by utilizing acoustic features from existing vocoder as auxiliary features of WaveNet. It is expected that WaveNet can learn a sample-by-sample correspondence between speech waveform and acoustic features. The advantage of the proposed method is that it does not require (1) explicit modeling of excitation signals and (2) various assumptions, which are based on prior knowledge specific to speech. We conducted both subjective and objective evaluation experiments on CMU-ARCTIC database. From the results of the objective evaluation, it was demonstrated that the proposed method could generate high-quality speech with phase information recovered, which was lost by a mel-cepstrum vocoder. From the results of the subjective evaluation, it was demonstrated that the sound quality of the proposed method was significantly improved from mel-cepstrum vocoder, and the proposed method could capture source excitation information more accurately.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu Gu|AUTHOR Yu Gu]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1123–1127
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a waveform modeling and generation method for speech bandwidth extension (BWE) using stacked dilated convolutional neural networks (CNNs) with causal or non-causal convolutional layers. Such dilated CNNs describe the predictive distribution for each wideband or high-frequency speech sample conditioned on the input narrowband speech samples. Distinguished from conventional frame-based BWE approaches, the proposed methods can model the speech waveforms directly and therefore avert the spectral conversion and phase estimation problems. Experimental results prove that the BWE methods proposed in this paper can achieve better performance than the state-of-the-art frame-based approach utilizing recurrent neural networks (RNNs) incorporating long short-term memory (LSTM) cells in subjective preference tests.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shinji Takaki|AUTHOR Shinji Takaki]]^^1^^, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^2^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NII, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1128–1132
</span></p></div>
<div class="cpabstractcardabstract"><p>In statistical parametric speech synthesis (SPSS) systems using the high-quality vocoder, acoustic features such as mel-cepstrum coefficients and F0 are predicted from linguistic features in order to utilize the vocoder to generate speech waveforms. However, the generated speech waveform generally suffers from quality deterioration such as buzziness caused by utilizing the vocoder. Although several attempts such as improving an excitation model have been investigated to alleviate the problem, it is difficult to completely avoid it if the SPSS system is based on the vocoder. To overcome this problem, there have recently been attempts to directly model waveform samples. Superior performance has been demonstrated, but computation time and latency are still issues. With the aim to construct another type of DNN-based speech synthesizer with neither the vocoder nor computational explosion, we investigated direct modeling of frequency spectra and waveform generation based on phase recovery. In this framework, STFT spectral amplitudes that include harmonic information derived from F0 are directly predicted through a DNN-based acoustic model and we use Griffin and Lim’s approach to recover phase and generate waveforms. The experimental results showed that the proposed system synthesized speech without buzziness and outperformed one generated from a conventional system using the vocoder.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Srikanth Ronanki|AUTHOR Srikanth Ronanki]], [[Oliver Watts|AUTHOR Oliver Watts]], [[Simon King|AUTHOR Simon King]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1133–1137
</span></p></div>
<div class="cpabstractcardabstract"><p>Current approaches to statistical parametric speech synthesis using Neural Networks generally require input at the same temporal resolution as the output, typically a frame every 5ms, or in some cases at waveform sampling rate. It is therefore necessary to fabricate highly-redundant frame-level (or sample-level) linguistic features at the input. This paper proposes the use of a hierarchical encoder-decoder model to perform the sequence-to-sequence regression in a way that takes the input linguistic features at their original timescales, and preserves the relationships between words, syllables and phones. The proposed model is designed to make more effective use of suprasegmental features than conventional architectures, as well as being computationally efficient. Experiments were conducted on prosodically-varied audiobook material because the use of supra-segmental features is thought to be particularly important in this case. Both objective measures and results from subjective listening tests, which asked listeners to focus on prosody, show that the proposed method performs significantly better than a conventional architecture that requires the linguistic input to be at the acoustic frame rate.
We provide code and a recipe to enable our system to be reproduced using the Merlin toolkit.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]], [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]], [[Akira Tamamori|AUTHOR Akira Tamamori]], [[Tomoki Toda|AUTHOR Tomoki Toda]]
</p><p class="cpabstractcardaffiliationlist">Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1138–1142
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a statistical voice conversion (VC) technique with theWaveNet-based waveform generation. VC based on a Gaussian mixture model (GMM) makes it possible to convert the speaker identity of a source speaker into that of a target speaker. However, in the conventional vocoding process, various factors such as F₀ extraction errors, parameterization errors and over-smoothing effects of converted feature trajectory cause the modeling errors of the speech waveform, which usually bring about sound quality degradation of the converted voice. To address this issue, we apply a direct waveform generation technique based on a WaveNet vocoder to VC. In the proposed method, first, the acoustic features of the source speaker are converted into those of the target speaker based on the GMM. Then, the waveform samples of the converted voice are generated based on the WaveNet vocoder conditioned on the converted acoustic features. In this paper, to investigate the modeling accuracies of the converted speech waveform, we compare several types of the acoustic features for training and synthesizing based on the WaveNet vocoder. The experimental results confirmed that the proposed VC technique achieves higher conversion accuracy on speaker individuality with comparable sound quality compared to the conventional VC technique.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent Wan|AUTHOR Vincent Wan]]^^1^^, [[Yannis Agiomyrgiannakis|AUTHOR Yannis Agiomyrgiannakis]]^^1^^, [[Hanna Silen|AUTHOR Hanna Silen]]^^1^^, [[Jakub Vít|AUTHOR Jakub Vít]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, UK; ^^2^^University of West Bohemia, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1143–1147
</span></p></div>
<div class="cpabstractcardabstract"><p>A neural network model that significant improves unit-selection-based Text-To-Speech synthesis is presented. The model employs a sequence-to-sequence LSTM-based autoencoder that compresses the acoustic and linguistic features of each unit to a fixed-size vector referred to as an embedding. Unit-selection is facilitated by formulating the target cost as an L₂ distance in the embedding space. In open-domain speech synthesis the method achieves a 0.2 improvement in the MOS, while for limited-domain it reaches the cap of 4.5 MOS. Furthermore, the new TTS system halves the gap between the previous unit-selection system and WaveNet in terms of quality while retaining low computational cost and latency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hiroyuki Miyoshi|AUTHOR Hiroyuki Miyoshi]], [[Yuki Saito|AUTHOR Yuki Saito]], [[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1268–1272
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice conversion (VC) using sequence-to-sequence learning of context posterior probabilities is proposed. Conventional VC using shared context posterior probabilities predicts target speech parameters from the context posterior probabilities estimated from the source speech parameters. Although conventional VC can be built from non-parallel data, it is difficult to convert speaker individuality such as phonetic property and speaking rate contained in the posterior probabilities because the source posterior probabilities are directly used for predicting target speech parameters. In this work, we assume that the training data partly include parallel speech data and propose sequence-to-sequence learning between the source and target posterior probabilities. The conversion models perform non-linear and variable-length transformation from the source probability sequence to the target one. Further, we propose a joint training algorithm for the modules. In contrast to conventional VC, which separately trains the speech recognition that estimates posterior probabilities and the speech synthesis that predicts target speech parameters, our proposed method jointly trains these modules along with the proposed probability conversion modules. Experimental results demonstrate that our approach outperforms the conventional VC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]], [[Yu Zhang|AUTHOR Yu Zhang]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1273–1277
</span></p></div>
<div class="cpabstractcardabstract"><p>An ability to model a generative process and learn a latent representation for speech in an unsupervised fashion will be crucial to process vast quantities of unlabelled speech data. Recently, deep probabilistic generative models such as Variational Autoencoders (VAEs) have achieved tremendous success in modeling natural images. In this paper, we apply a convolutional VAE to model the generative process of natural speech. We derive latent space arithmetic operations to disentangle learned latent representations. We demonstrate the capability of our model to modify the phonetic content or the speaker identity for speech segments using the derived operations, without the need for parallel supervisory data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tetsuya Hashimoto|AUTHOR Tetsuya Hashimoto]], [[Hidetsugu Uchida|AUTHOR Hidetsugu Uchida]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1278–1282
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a novel approach to parallel-data-free and many-to-many voice conversion (VC). As 1-to-1 conversion has less flexibility, researchers focus on many-to-many conversion, where speaker identity is often represented using speaker space bases. In this case, utterances of the same sentences have to be collected from many speakers. This study aims at overcoming this constraint to realize a parallel-data-free and many-to-many conversion. This is made possible by integrating deep neural networks (DNNs) with eigenspace using a non-parallel speech corpus. In our previous study, many-to-many conversion was implemented using DNN, whose training was assisted by EVGMM conversion. By realizing the function of EVGMM equivalently by constructing eigenspace with a non-parallel speech corpus, the desired conversion is made possible. A key technique here is to estimate covariance terms without given parallel data between source and target speakers. Experiments show that objective assessment scores are comparable to those of the baseline system trained with parallel data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takuhiro Kaneko|AUTHOR Takuhiro Kaneko]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Kaoru Hiramatsu|AUTHOR Kaoru Hiramatsu]], [[Kunio Kashino|AUTHOR Kunio Kashino]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1283–1287
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a training framework for sequence-to-sequence voice conversion (SVC). A well-known problem regarding a conventional VC framework is that acoustic-feature sequences generated from a converter tend to be over-smoothed, resulting in buzzy-sounding speech. This is because a particular form of similarity metric or distribution for parameter training of the acoustic model is assumed so that the generated feature sequence that averagely fits the training target example is considered optimal. This over-smoothing occurs as long as a manually constructed similarity metric is used. To overcome this limitation, our proposed SVC framework uses a similarity metric implicitly derived from a generative adversarial network, enabling the measurement of the distance in the high-level abstract space. This would enable the model to mitigate the over-smoothing problem caused in the low-level data space. Furthermore, we use convolutional neural networks to model the long-range context-dependencies. This also enables the similarity metric to have a shift-invariant property; thus, making the model robust against misalignment errors involved in the parallel data. We tested our framework on a non-native-to-native VC task. The experimental results revealed that the use of the proposed framework had a certain effect in improving naturalness, clarity, and speaker individuality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Luc Ardaillon|AUTHOR Luc Ardaillon]], [[Axel Roebel|AUTHOR Axel Roebel]]
</p><p class="cpabstractcardaffiliationlist">STMS (UMR 9912), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1288–1292
</span></p></div>
<div class="cpabstractcardabstract"><p>Improving expressiveness in singing voice synthesis systems requires to perform realistic timbre transformations, e.g. for varying voice intensity. In order to sing louder, singers tend to open their mouth more widely, which changes the vocal tract’s shape and resonances. This study shows, by means of signal analysis and simulations, that the main effect of mouth opening is an increase of the 1^st formant’s frequency (F,,1,,) and a decrease of its bandwidth (BW,,1,,). From these observations, we then propose a rule for producing a mouth opening effect, by modifying F,,1,, and BW,,1,,, and an approach to apply this effect on real voice sounds. This approach is based on pole modification, by changing the AR coefficients of an estimated all-pole model of the spectral envelope. Finally, listening tests have been conducted to evaluate the effectiveness of the proposed effect.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seyed Hamidreza Mohammadi|AUTHOR Seyed Hamidreza Mohammadi]], [[Alexander Kain|AUTHOR Alexander Kain]]
</p><p class="cpabstractcardaffiliationlist">Oregon Health & Science University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1293–1297
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose an architecture called siamese autoencoders for extracting and switching pre-determined styles of speech signals while retaining the content. We apply this architecture to a voice conversion task in which we define the content to be the linguistic message and the style to be the speaker’s voice. We assume two or more data streams with the same content but unique styles. The architecture is composed of two or more separate but shared-weight autoencoders that are joined by loss functions at the hidden layers. A hidden vector is composed of style and content sub-vectors and the loss functions constrain the encodings to decompose style and content. We can select an intended target speaker either by supplying the associated style vector, or by extracting a new style vector from a new utterance, using a proposed style extraction algorithm. We focus on in-training speakers but perform some initial experiments for out-of-training speakers as well. We propose and study several types of loss functions. The experiment results show that the proposed many-to-many model is able to convert voices successfully; however, its performance does not surpass that of the state-of-the-art one-to-one model’s.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Kain|AUTHOR Alexander Kain]]^^1^^, [[Max Del Giudice|AUTHOR Max Del Giudice]]^^2^^, [[Kris Tjaden|AUTHOR Kris Tjaden]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Oregon Health & Science University, USA; ^^2^^Independent Researcher, USA; ^^3^^University at Buffalo, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1148–1152
</span></p></div>
<div class="cpabstractcardabstract"><p>We examine existing and novel automatically-derived acoustic metrics that are predictive of speech intelligibility. We hypothesize that the degree of variability in feature space is correlated with the extent of a speaker’s phonemic inventory, their degree of articulatory displacements, and thus with their degree of perceived speech intelligibility. We begin by using fully-automatic F1/F2 formant frequency trajectories for both vowel space area calculation and as input to a proposed class-separability metric. We then switch to representing vowels by means of short-term spectral features, and measure vowel separability in that space. Finally, we consider the case where phonetic labeling is unavailable; here we calculate short-term spectral features for the entire speech utterance and then estimate their entropy based on the length of a minimum spanning tree. In an alternative approach, we propose to first segment the speech signal using a hidden Markov model, and then calculate spectral feature separability based on the automatically-derived classes. We apply all approaches to a database with healthy controls as well as speakers with mild dysarthria, and report the resulting coefficients of determination.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Toshio Irino|AUTHOR Toshio Irino]]^^1^^, [[Eri Takimoto|AUTHOR Eri Takimoto]]^^1^^, [[Toshie Matsui|AUTHOR Toshie Matsui]]^^1^^, [[Roy D. Patterson|AUTHOR Roy D. Patterson]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1153–1157
</span></p></div>
<div class="cpabstractcardabstract"><p>An auditory model was developed to explain the results of behavioral experiments on perception of speaker size with voiced speech sounds. It is based on the dynamic, compressive gammachirp (dcGC) filterbank and a weighting function (SSI weight) derived from a theory of size-shape segregation in the auditory system. Voiced words with and without high-frequency emphasis (+6 dB/octave) were produced using a speech vocoder (STRAIGHT). The SSI weighting function reduces the effect of glottal pulse excitation in voiced speech, which, in turn, makes it possible for the model to explain the individual subject variability in the data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[L. ten Bosch|AUTHOR L. ten Bosch]], [[L. Boves|AUTHOR L. Boves]], [[M. Ernestus|AUTHOR M. Ernestus]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1158–1162
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates the processes in comprehending spoken noun-noun compounds, using data from the BALDEY database. BALDEY contains lexicality judgments and reaction times (RTs) for Dutch stimuli for which also linguistic information is included. Two different approaches are combined. The first is based on regression by Dynamic Survival Analysis, which models decisions and RTs as a consequence of the fact that a cumulative density function exceeds some threshold. The parameters of that function are estimated from the observed RT data. The second approach is based on DIANA, a process-oriented computational model of human word comprehension, which simulates the comprehension process with the acoustic stimulus as input. DIANA gives the identity and the number of the word candidates that are activated at each 10 ms time step.
Both approaches show how the processes involved in comprehending compounds change during a stimulus. Survival Analysis shows that the impact of word duration varies during the course of a stimulus. The density of word and non-word hypotheses in DIANA shows a corresponding pattern with different regimes. We show how the approaches complement each other, and discuss additional ways in which data and process models can be combined.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohsen Zareian Jahromi|AUTHOR Mohsen Zareian Jahromi]], [[Jan Østergaard|AUTHOR Jan Østergaard]], [[Jesper Jensen|AUTHOR Jesper Jensen]]
</p><p class="cpabstractcardaffiliationlist">Aalborg University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1163–1167
</span></p></div>
<div class="cpabstractcardabstract"><p>Inspired by the DANTALE II listening test paradigm, which is used for determining the intelligibility of noisy speech, we assess the hypothesis that humans maximize the probability of correct decision when recognizing words contaminated by additive Gaussian, speech-shaped noise. We first propose a statistical Gaussian communication and classification scenario, where word models are built from short term spectra of human speech, and optimal classifiers in the sense of maximum a posteriori estimation are derived. Then, we perform a listening test, where the participants are instructed to make their best guess of words contaminated with speech-shaped Gaussian noise. Comparing the human’s performance to that of the optimal classifier reveals that at high SNR, humans perform comparable to the optimal classifier. However, at low SNR, the human performance is inferior to that of the optimal classifier. This shows that, at least in this specialized task, humans are generally not able to maximize the probability of correct decision, when recognizing words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rainer Huber|AUTHOR Rainer Huber]], [[Constantin Spille|AUTHOR Constantin Spille]], [[Bernd T. Meyer|AUTHOR Bernd T. Meyer]]
</p><p class="cpabstractcardaffiliationlist">Carl von Ossietzky Universität Oldenburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1168–1172
</span></p></div>
<div class="cpabstractcardabstract"><p>A new, single-ended, i.e. reference-free measure for the prediction of perceived listening effort of noisy speech is presented. It is based on phoneme posterior probabilities (or posteriorgrams) obtained from a deep neural network of an automatic speech recognition system. Additive noisy or other distortions of speech tend to smear the posteriorgrams. The smearing is quantified by a performance measure, which is used as a predictor for the perceived listening effort required to understand the noisy speech. The proposed measure was evaluated using a database obtained from the subjective evaluation of noise reduction algorithms of commercial hearing aids. Listening effort ratings of processed noisy speech samples were gathered from 20 hearing-impaired subjects. Averaged subjective ratings were compared with corresponding predictions computed by the proposed new method, the ITU-T standard P.563 for single-ended speech quality assessment, the American National Standard ANIQUE+ for single-ended speech quality assessment, and a single-ended SNR estimator. The proposed method achieved a good correlation with mean subjective ratings and clearly outperformed the standard speech quality measures and the SNR estimator.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chris Neufeld|AUTHOR Chris Neufeld]]
</p><p class="cpabstractcardaffiliationlist">University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1173–1177
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper demonstrates that a low-level, linear description of the response properties of auditory neurons can exhibit some of the high-level properties of the categorical perception of human speech. In particular, it is shown that the non-linearities observed in the human perception of speech sounds which span a categorical boundaries can be understood as arising rather naturally from a low-level statistical description of phonemic contrasts in the time-frequency plane, understood here as the receptive field of auditory neurons. The TIMIT database was used to train a model auditory neuron which discriminates between /s/ and /sh/, and a computer simulation was conducted which demonstrates that the neuron responds categorically to a linear continuum of synthetic fricative sounds which span the /s/-/sh/ boundary. The response of the model provides a good fit to human labeling behavior, and in addition, is able to account for asymmetries in reaction time across the two categories.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yannan Wang|AUTHOR Yannan Wang]]^^1^^, [[Jun Du|AUTHOR Jun Du]]^^1^^, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^Georgia Institute of Technology, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1178–1182
</span></p></div>
<div class="cpabstractcardabstract"><p>In contrast to the conventional minimum mean squared error (MMSE) training criterion for nonlinear spectral mapping based on deep neural networks (DNNs), we propose a probabilistic learning framework to estimate the DNN parameters for single-channel speech separation. A statistical analysis of the prediction error vector at the DNN output reveals that it follows a unimodal density for each log power spectral component. By characterizing the prediction error vector as a multivariate Gaussian density with zero mean vector and an unknown covariance matrix, we present a maximum likelihood (ML) approach to DNN parameter learning. Our experiments on the Speech Separation Challenge (SSC) corpus show that the proposed learning approach can achieve a better generalization capability and a faster convergence than MMSE-based DNN learning. Furthermore, we demonstrate that the ML-trained DNN consistently outperforms MMSE-trained DNN in all the objective measures of speech quality and intelligibility in single-channel speech separation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takuya Higuchi|AUTHOR Takuya Higuchi]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Kateřina Žmolíková|AUTHOR Kateřina Žmolíková]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1183–1187
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper extends a deep clustering algorithm for use with time-frequency masking-based beamforming and perform separation with an unknown number of sources. Deep clustering is a recently proposed single-channel source separation algorithm, which projects inputs into the embedding space and performs clustering in the embedding domain. In deep clustering, bi-directional long short-term memory (BLSTM) recurrent neural networks are trained to make embedding vectors orthogonal for different speakers and concurrent for the same speaker. Then, by clustering the embedding vectors at test time, we can estimate time-frequency masks for separation. In this paper, we extend the deep clustering algorithm to a multiple microphone setup and incorporate deep clustering-based time-frequency mask estimation into masking-based beamforming, which has been shown to be more effective than masking for automatic speech recognition. Moreover, we perform source counting by computing the rank of the covariance matrix of the embedding vectors. With our proposed approach, we can perform masking-based beamforming in a multiple-speaker case without knowing the number of speakers. Experimental results show that our proposed deep clustering-based beamformer achieves comparable source separation performance to that obtained with a complex Gaussian mixture model-based beamformer, which requires the number of sources in advance for mask estimation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shadi Pirhosseinloo|AUTHOR Shadi Pirhosseinloo]], [[Kostas Kokkinakis|AUTHOR Kostas Kokkinakis]]
</p><p class="cpabstractcardaffiliationlist">University of Kansas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1188–1192
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we address the problem of speech source separation by relying on time-frequency binary masks to segregate binaural mixtures. We describe an algorithm which can tackle reverberant mixtures and can extract the original sources while preserving their original spatial locations. The performance of the proposed algorithm is evaluated objectively and subjectively, by assessing the estimated interaural time differences versus their theoretical values and by testing for localization acuity in normal-hearing listeners for different spatial locations in a reverberant room. Experimental results indicate that the proposed algorithm is capable of preserving the spatial information of the recovered source signals while keeping the signal-to-distortion and signal-to-interference ratios high.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Kuan-Ting Kuo|AUTHOR Kuan-Ting Kuo]]
</p><p class="cpabstractcardaffiliationlist">National Chiao Tung University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1193–1197
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a new stochastic learning machine for speech separation based on the variational recurrent neural network (VRNN). This VRNN is constructed from the perspectives of generative stochastic network and variational auto-encoder. The idea is to faithfully characterize the randomness of hidden state of a recurrent neural network through variational learning. The neural parameters under this latent variable model are estimated by maximizing the variational lower bound of log marginal likelihood. An inference network driven by the variational distribution is trained from a set of mixed signals and the associated source targets. A novel supervised VRNN is developed for speech separation. The proposed VRNN provides a stochastic point of view which accommodates the uncertainty in hidden states and facilitates the analysis of model construction. The masking function is further employed in network outputs for speech separation. The benefit of using VRNN is demonstrated by the experiments on monaural speech separation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Valentin Andrei|AUTHOR Valentin Andrei]], [[Horia Cucu|AUTHOR Horia Cucu]], [[Corneliu Burileanu|AUTHOR Corneliu Burileanu]]
</p><p class="cpabstractcardaffiliationlist">UPB, Romania</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1198–1202
</span></p></div>
<div class="cpabstractcardabstract"><p>The intent of this work is to demonstrate how deep learning techniques can be successfully used to detect overlapped speech on independent short timeframes. A secondary objective is to provide an understanding on how the duration of the signal frame influences the accuracy of the method. We trained a deep neural network with heterogeneous layers and obtained close to 80% inference accuracy on frames going as low as 25 milliseconds. The proposed system provides higher detection quality than existing work and can predict overlapped speech with up to 3 simultaneous speakers. The method exposes low response latency and does not require a high amount of computing power.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xu Li|AUTHOR Xu Li]], [[Junfeng Li|AUTHOR Junfeng Li]], [[Yonghong Yan|AUTHOR Yonghong Yan]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1203–1207
</span></p></div>
<div class="cpabstractcardabstract"><p>Monaural speech segregation is an important problem in robust speech processing and has been formulated as a supervised learning problem. In supervised learning methods, the ideal binary mask (IBM) is usually used as the target because of its simplicity and large speech intelligibility gains. Recently, the ideal ratio mask (IRM) has been found to improve the speech quality over the IBM. However, the IRM was originally defined in anechoic conditions and did not consider the effect of reverberation. In this paper, the IRM is extended to reverberant conditions where the direct sound and early reflections of target speech are regarded as the desired signal. Deep neural networks (DNNs) is employed to estimate the extended IRM in the noisy reverberant conditions. The estimated IRM is then applied to the noisy reverberant mixture for speech segregation. Experimental results show that the estimated IRM provides substantial improvements in speech intelligibility and speech quality over the unprocessed mixture signals under various noisy and reverberant conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sergio I. Quiroz|AUTHOR Sergio I. Quiroz]], [[Marzena Żygis|AUTHOR Marzena Żygis]]
</p><p class="cpabstractcardaffiliationlist">Leibniz-ZAS, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1208–1212
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates calling melodies produced by 21 Standard German native speakers on a discourse completion task across two contexts: (i) routine context — calling a child from afar to come in for dinner; (ii) urgent context — calling a child from afar for a chastising. The intent of this investigation is to bring attention to other calling melodies found in German beside the vocative chant and to give an insight to their acoustic profile.
Three major melodies were identified in the two contexts: vocative chant (100% of routine context productions), urgent call (100% of male urgent context productions, 52.2% female productions), and stern call (47.8% female urgent context productions). A subsequent quantitative analysis was carried out on these calls across these parameters: (i) tonal scaling at tonal landmarks; (ii) proportional alignment of selected tonal landmarks with respect to the stressed or last vowel; and (iii) amplitude (integral and RMS) and (iv) duration of the stressed vowel, stressed syllable, and word. The resulting data were analyzed using a linear mixed model approach.
The results point to significant differences in the contours produced in the aforementioned parameters. We also proposed a phonological description of the contours in the framework of Autosegmental-Metrical Phonology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Juraj Šimko|AUTHOR Juraj Šimko]], [[Antti Suni|AUTHOR Antti Suni]], [[Katri Hiovain|AUTHOR Katri Hiovain]], [[Martti Vainio|AUTHOR Martti Vainio]]
</p><p class="cpabstractcardaffiliationlist">University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1213–1217
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a novel, data-driven approach to assessing mutual similarities and differences among a group of languages, based on purely prosodic characteristics, namely f,,0,, and energy envelope signals. These signals are decomposed using continuous wavelet transform; the components represent f,,0,, and energy patterns on three levels of prosodic hierarchy roughly corresponding to syllables, words and phrases. Unigram language models with states derived from a combination of Δ-features obtained from these components are trained and compared using a mutual perplexity measure. In this pilot study we apply this approach to a small corpus of spoken material from seven languages (Estonian, Finnish, Hungarian, German, Swedish, Russian and Slovak) with a rich history of mutual language contacts. We present similarity trees (dendrograms) derived from the models using the hierarchically decomposed prosodic signals separately as well as combined, and compare them with patterns obtained from non-decomposed signals. We show that (1) plausible similarity patterns, reflecting language family relationships and the known contact history can be obtained even from a relatively small data set, and (2) the hierarchical decomposition approach using both f,,0,, and energy provides the most comprehensive results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Ho Kwan Ip|AUTHOR Martin Ho Kwan Ip]], [[Anne Cutler|AUTHOR Anne Cutler]]
</p><p class="cpabstractcardaffiliationlist">Western Sydney University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1218–1222
</span></p></div>
<div class="cpabstractcardabstract"><p>In English and Dutch, listeners entrain to prosodic contours to predict where focus will fall in an utterance. However, is this strategy universally available, even in languages with different phonological systems? In a phoneme detection experiment, we examined whether prosodic entrainment is also found in Mandarin Chinese, a tone language, where in principle the use of pitch for lexical identity may take precedence over the use of pitch cues to salience. Consistent with the results from Germanic languages, response times were facilitated when preceding intonation predicted accent on the target-bearing word. Acoustic analyses revealed greater F0 range in the preceding intonation of the predicted-accent sentences. These findings have implications for how universal and language-specific mechanisms interact in the processing of salience.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Katharina Zahner|AUTHOR Katharina Zahner]]^^1^^, [[Heather Kember|AUTHOR Heather Kember]]^^2^^, [[Bettina Braun|AUTHOR Bettina Braun]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Konstanz, Germany; ^^2^^Western Sydney University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1223–1227
</span></p></div>
<div class="cpabstractcardabstract"><p>Intonation languages signal pragmatic functions (e.g. information structure) by means of different pitch accent types. Acoustically, pitch accent types differ in the alignment of pitch peaks (and valleys) in regard to stressed syllables, which makes the position of pitch peaks an unreliable cue to lexical stress (even though pitch peaks and lexical stress often coincide in intonation languages). We here investigate the effect of pitch accent type on lexical activation in English. Results of a visual-world eye-tracking study show that Australian English listeners temporarily activate SWW-words ( musical) if presented with WSW-words ( museum) with early-peak accents (H+!H*), compared to medial-peak accents (L+H*). Thus, in addition to signalling pragmatic functions, the alignment of tonal targets immediately affects lexical activation in English.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Luca Rognoni|AUTHOR Luca Rognoni]], [[Judith Bishop|AUTHOR Judith Bishop]], [[Miriam Corris|AUTHOR Miriam Corris]]
</p><p class="cpabstractcardaffiliationlist">Appen, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1228–1232
</span></p></div>
<div class="cpabstractcardabstract"><p>A hand-labelled Pashto speech data set containing spontaneous conversations is analysed in order to propose an intonational inventory of Pashto. Basic intonation patterns observed in the language are summarised. The relationship between pitch accent and part of speech (PoS), which was also annotated for each word in the data set, is briefly addressed.
The results are compared with the intonational literature on Persian, a better-described and closely-related language. The results show that Pashto intonation patterns are similar to Persian, as well as reflecting common intonation patterns such as falling tone for statements and WH-questions, and yes/no questions ending in a rising tone. The data also show that the most frequently used intonation pattern in Pashto is the so-called hat pattern. The distribution of pitch accent is quite free both in Persian and Pashto, but there is a stronger association of pitch accent with content than with function words, as is typical of stress-accent languages.
The phonetic realisation of focus appears to be conveyed with the same acoustic cues as in Persian, with a higher pitch excursion and longer duration of the stressed syllable of the word in focus. The data also suggest that post-focus compression (PFC) is present in Pashto.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kikuo Maekawa|AUTHOR Kikuo Maekawa]]
</p><p class="cpabstractcardaffiliationlist">NINJAL, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1233–1237
</span></p></div>
<div class="cpabstractcardabstract"><p>F0 downtrend observed in spontaneous monologues in the Corpus of Spontaneous Japanese was analyzed with special attention to the modeling of final lowering. In addition to the previous finding that the domain of final lowering covers all tones in the final accentual phrase, it turned out that the last L tone in the penultimate accentual phrase played important role in the control of final lowering. It is this tone that first reached the bottom of the speaker’s pitch range in the time course of utterance; it also turned out that the phonetic realization of this tone is the most stable of all tones in terms of the F0 variability. Regression model of F0 downtrends is generated by generalized linear mixed-effect modeling and evaluated by cross-validation. The mean prediction error of z-normalized F0 values in the best model was 0.25 standard deviation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xi Ma|AUTHOR Xi Ma]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Jia Jia|AUTHOR Jia Jia]], [[Mingxing Xu|AUTHOR Mingxing Xu]], [[Helen Meng|AUTHOR Helen Meng]], [[Lianhong Cai|AUTHOR Lianhong Cai]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1238–1242
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, an emotion-pair based framework is proposed for speech emotion recognition, which constructs more discriminative feature subspaces for every two different emotions (emotion-pair) to generate more precise emotion bi-classification results. Furthermore, it is found that in the dimensional emotion space, the distances between some of the archetypal emotions are closer than the others. Motivated by this, a Naive Bayes classifier based decision fusion strategy is proposed, which aims at capturing such useful emotion distribution information in deciding the final emotion category for emotion recognition. We evaluated the classification framework on the USC IEMOCAP database. Experimental results demonstrate that the proposed method outperforms the hierarchical binary decision tree approach on both weighted accuracy (WA) and unweighted accuracy (UA). Moreover, our framework possesses the advantages that it can be fully automatically generated without empirical guidance and is easier to be parallelized.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurabh Sahu|AUTHOR Saurabh Sahu]]^^1^^, [[Rahul Gupta|AUTHOR Rahul Gupta]]^^2^^, [[Ganesh Sivaraman|AUTHOR Ganesh Sivaraman]]^^1^^, [[Wael AbdAlmageed|AUTHOR Wael AbdAlmageed]]^^3^^, [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Maryland, USA; ^^2^^Amazon.com, USA; ^^3^^University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1243–1247
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, generative adversarial networks and adversarial auto-encoders have gained a lot of attention in machine learning community due to their exceptional performance in tasks such as digit classification and face recognition. They map the auto-encoder’s bottleneck layer output (termed as code vectors) to different noise Probability Distribution Functions (PDFs), that can be further regularized to cluster based on class information. In addition, they also allow a generation of synthetic samples by sampling the code vectors from the mapped PDFs. Inspired by these properties, we investigate the application of adversarial auto-encoders to the domain of emotion recognition. Specifically, we conduct experiments on the following two aspects: (i) their ability to encode high dimensional feature vector representations for emotional utterances into a compressed space (with a minimal loss of emotion class discriminability in the compressed space), and (ii) their ability to regenerate synthetic samples in the original feature space, to be later used for purposes such as training emotion recognition classifiers. We demonstrate promise of adversarial auto-encoders with regards to these aspects on the Interactive Emotional Dyadic Motion Capture (IEMOCAP) corpus and present our analysis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ting Dang|AUTHOR Ting Dang]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Julien Epps|AUTHOR Julien Epps]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]
</p><p class="cpabstractcardaffiliationlist">University of New South Wales, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1248–1252
</span></p></div>
<div class="cpabstractcardabstract"><p>Existing continuous emotion prediction systems implicitly assume that prediction certainty does not vary with time. However, perception differences among raters and other possible sources of variability suggest that prediction certainty varies with time, which warrants deeper consideration. In this paper, the correlation between the inter-rater variability and the uncertainty of predicted emotion is firstly studied. A new paradigm that estimates the uncertainty in prediction is proposed based on the strong correlation uncovered in the RECOLA database. This is implemented by including the inter-rater variability as a representation of the uncertainty information in a probabilistic Gaussian Mixture Regression (GMR) model. In addition, we investigate the correlation between the uncertainty and the performance of a typical emotion prediction system utilizing average rating as the ground truth, by comparing the prediction performance in the lower and higher uncertainty regions. As expected, it is observed that the performance in lower uncertainty regions is better than that in higher uncertainty regions, providing a path for improving emotion prediction systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soheil Khorram|AUTHOR Soheil Khorram]]^^1^^, [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]]^^1^^, [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]]^^2^^, [[Melvin McInnis|AUTHOR Melvin McInnis]]^^1^^, [[Emily Mower Provost|AUTHOR Emily Mower Provost]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Michigan, USA; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1253–1257
</span></p></div>
<div class="cpabstractcardabstract"><p>The goal of continuous emotion recognition is to assign an emotion value to every frame in a sequence of acoustic features. We show that incorporating long-term temporal dependencies is critical for continuous emotion recognition tasks. To this end, we first investigate architectures that use dilated convolutions. We show that even though such architectures outperform previously reported systems, the output signals produced from such architectures undergo erratic changes between consecutive time steps. This is inconsistent with the slow moving ground-truth emotion labels that are obtained from human annotators. To deal with this problem, we model a downsampled version of the input signal and then generate the output signal through upsampling. Not only does the resulting downsampling/upsampling network achieve good performance, it also generates smooth output trajectories. Our method yields the best known audio-only performance on the RECOLA dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1258–1262
</span></p></div>
<div class="cpabstractcardabstract"><p>Modulations of the voice convey affect, and the precise mapping of voice-to-affect may vary for different languages. However, affect-related modulations occur relative to the baseline affect-neutral voice, which tends to differ from language to language. Little is known about the characteristic long-term voice settings for different languages, and how they influence the use of voice quality to signal affect. In this paper, data from a voice-to-affect perception test involving Russian, English, Spanish and Japanese subjects is re-examined to glean insights concerning likely baseline settings in these languages. The test used synthetic stimuli with different voice qualities (modelled on a male voice), with or without extreme f,,0,, contours as might be associated with affect. Cross-language differences in affect ratings for modal and tense voice suggest that the baseline in Spanish and Japanese is inherently tenser than in Russian and English, and that as a corollary, tense voice serves as a more potent cue to high-activation affects in the latter languages. A relatively tenser baseline in Japanese and Spanish is further suggested by the fact that tense voice can be associated with intimate, a low activation state, just as readily as with the high-activation state interested.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Neumann|AUTHOR Michael Neumann]], [[Ngoc Thang Vu|AUTHOR Ngoc Thang Vu]]
</p><p class="cpabstractcardaffiliationlist">Universität Stuttgart, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1263–1267
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech emotion recognition is an important and challenging task in the realm of human-computer interaction. Prior work proposed a variety of models and feature sets for training a system. In this work, we conduct extensive experiments using an attentive convolutional neural network with multi-view learning objective function. We compare system performance using different lengths of the input signal, different types of acoustic features and different types of emotion speech (improvised/scripted). Our experimental results on the Interactive Emotional Motion Capture (IEMOCAP) database reveal that the recognition performance strongly depends on the type of speech data independent of the choice of input features. Furthermore, we achieved state-of-the-art results on the improvised speech data of IEMOCAP.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haşim Sak|AUTHOR Haşim Sak]], [[Matt Shannon|AUTHOR Matt Shannon]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1298–1302
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce an encoder-decoder recurrent neural network model called Recurrent Neural Aligner (RNA) that can be used for sequence to sequence mapping tasks. Like connectionist temporal classification (CTC) models, RNA defines a probability distribution over target label sequences including blank labels corresponding to each time step in input. The probability of a label sequence is calculated by marginalizing over all possible blank label positions. Unlike CTC, RNA does not make a conditional independence assumption for label predictions; it uses the predicted label at time t-1 as an additional input to the recurrent model when predicting the label at time t. We apply this model to end-to-end speech recognition. RNA is capable of streaming recognition since the decoder does not employ attention mechanism. The model is trained on transcribed acoustic data to predict graphemes and no external language and pronunciation models are used for decoding. We employ an approximate dynamic programming method to optimize negative log likelihood, and a sampling-based sequence discriminative training technique to fine-tune the model to minimize expected word error rate. We show that the model achieves competitive accuracy without using an external language model nor doing beam search decoding.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Golan Pundak|AUTHOR Golan Pundak]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1303–1307
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, very deep networks, with as many as hundreds of layers, have shown great success in image classification tasks. One key component that has enabled such deep models is the use of “skip connections”, including either residual or highway connections, to alleviate the vanishing and exploding gradient problems. While these connections have been explored for speech, they have mainly been explored for feed-forward networks. Since recurrent structures, such as LSTMs, have produced state-of-the-art results on many of our Voice Search tasks, the goal of this work is to thoroughly investigate different approaches to adding depth to recurrent structures. Specifically, we experiment with novel Highway-LSTM models with bottlenecks skip connections and show that a 10 layer model can outperform a state-of-the-art 5 layer LSTM model with the same number of parameters by 2% relative WER. In addition, we experiment with Recurrent Highway layers and find these to be on par with Highway-LSTM models, when given sufficient depth.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mirco Ravanelli|AUTHOR Mirco Ravanelli]]^^1^^, [[Philemon Brakel|AUTHOR Philemon Brakel]]^^2^^, [[Maurizio Omologo|AUTHOR Maurizio Omologo]]^^1^^, [[Yoshua Bengio|AUTHOR Yoshua Bengio]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FBK, Italy; ^^2^^Université de Montréal, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1308–1312
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech recognition is largely taking advantage of deep learning, showing that substantial benefits can be obtained by modern Recurrent Neural Networks (RNNs). The most popular RNNs are Long Short-Term Memory (LSTMs), which typically reach state-of-the-art performance in many tasks thanks to their ability to learn long-term dependencies and robustness to vanishing gradients. Nevertheless, LSTMs have a rather complex design with three multiplicative gates, that might impair their efficient implementation. An attempt to simplify LSTMs has recently led to Gated Recurrent Units (GRUs), which are based on just two multiplicative gates.
This paper builds on these efforts by further revising GRUs and proposing a simplified architecture potentially more suitable for speech recognition. The contribution of this work is two-fold. First, we suggest to remove the reset gate in the GRU design, resulting in a more efficient single-gate architecture. Second, we propose to replace tanh with ReLU activations in the state update equations. Results show that, in our implementation, the revised architecture reduces the per-epoch training time with more than 30% and consistently improves recognition performance across different tasks, input features, and noisy conditions when compared to a standard GRU.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Chen Shen|AUTHOR Chen Shen]]
</p><p class="cpabstractcardaffiliationlist">National Chiao Tung University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1313–1317
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a new stochastic learning approach to construct a latent variable model for recurrent neural network (RNN) based speech recognition. A hybrid generative and discriminative stochastic network is implemented to build a deep classification model. In the implementation, we conduct stochastic modeling for hidden states of recurrent neural network based on the variational auto-encoder. The randomness of hidden neurons is represented by the Gaussian distribution with mean and variance parameters driven by neural weights and learned from variational inference. Importantly, the class labels of input speech frames are incorporated to regularize this deep model to sample the informative and discriminative features for reconstruction of classification outputs. We accordingly propose the stochastic RNN (SRNN) to reflect the probabilistic property in RNN classification system. A stochastic error backpropagation algorithm is implemented. The experiments on speech recognition using TIMIT and Aurora4 show the merit of the proposed SRNN.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Ratajczak|AUTHOR Martin Ratajczak]]^^1^^, [[Sebastian Tschiatschek|AUTHOR Sebastian Tschiatschek]]^^2^^, [[Franz Pernkopf|AUTHOR Franz Pernkopf]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität Graz, Austria; ^^2^^ETH Zürich, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1318–1322
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce a simple and efficient frame and segment level RNN model (FS-RNN) for phone classification. It processes the input at frame level and segment level by bidirectional gated RNNs. This type of processing is important to exploit the (temporal) information more effectively compared to (i) models which solely process the input at frame level and (ii) models which process the input on segment level using features obtained by heuristic aggregation of frame level features. Furthermore, we incorporated the activations of the last hidden layer of the FS-RNN as an additional feature type in a neural higher-order CRF (NHO-CRF). In experiments, we demonstrated excellent performance on the TIMIT phone classification task, reporting a performance of 13.8% phone error rate for the FS-RNN model and 11.9% when combined with the NHO-CRF. In both cases we significantly exceeded the state-of-the-art performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kyu J. Han|AUTHOR Kyu J. Han]], [[Seongjun Hahm|AUTHOR Seongjun Hahm]], [[Byung-Hak Kim|AUTHOR Byung-Hak Kim]], [[Jungsuk Kim|AUTHOR Jungsuk Kim]], [[Ian Lane|AUTHOR Ian Lane]]
</p><p class="cpabstractcardaffiliationlist">Capio, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1323–1327
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we explore the effectiveness of a variety of Deep Learning-based acoustic models for conversational telephony speech, specifically TDNN, bLSTM and CNN-bLSTM models. We evaluated these models on both research testsets, such as Switchboard and CallHome, as well as recordings from a real-world call-center application. Our best single system, consisting of a single CNN-bLSTM acoustic model, obtained a WER of 5.7% on the Switchboard testset, and in combination with other models a WER of 5.3% was obtained. On the CallHome testset a WER of 10.1% was achieved with model combination. On the test data collected from real-world call-centers, even with model adaptation using application specific data, the WER was significantly higher at 15.0%. We performed an error analysis on the real-world data and highlight the areas where speech recognition still has challenges.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nigel G. Ward|AUTHOR Nigel G. Ward]]^^1^^, [[Jason C. Carlson|AUTHOR Jason C. Carlson]]^^1^^, [[Olac Fuentes|AUTHOR Olac Fuentes]]^^1^^, [[Diego Castan|AUTHOR Diego Castan]]^^2^^, [[Elizabeth E. Shriberg|AUTHOR Elizabeth E. Shriberg]]^^2^^, [[Andreas Tsiartas|AUTHOR Andreas Tsiartas]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at El Paso, USA; ^^2^^SRI International, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1447–1451
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech conveys many things beyond content, including aspects of stance and attitude that have not been much studied. Considering 14 aspects of stance as they occur in radio news stories, we investigated the extent to which they could be inferred from prosody. By using time-spread prosodic features and by aggregating local estimates, many aspects of stance were at least somewhat predictable, with results significantly better than chance for many stance aspects, including, across English, Mandarin and Turkish, good, typical, local, background, new information, and relevant to a large group.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gina-Anne Levow|AUTHOR Gina-Anne Levow]], [[Richard A. Wright|AUTHOR Richard A. Wright]]
</p><p class="cpabstractcardaffiliationlist">University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1452–1456
</span></p></div>
<div class="cpabstractcardabstract"><p>Stance-taking, the expression of opinions or attitudes, informs the process of negotiation, argumentation, and decision-making. While receiving significant attention in text materials in work on the related areas of subjectivity and sentiment analysis, the expression of stance in speech remains less explored. Prior analysis of the acoustics of stance-expression in conversational speech has identified some significant differences across dimensions of stance-related behavior. However, that analysis, as in much prior work, relied on simple functionals of pitch, energy, and duration, including maxima, minima, means, and ranges. In contrast, the current work focuses on exploiting measures that capture the dynamics of the pitch and energy contour. We employ features based on subband autocorrelation measures of pitch change and variants of the modulation spectrum. Using a corpus of conversational speech manually annotated for dimensions of stance-taking, we demonstrate that these measures of pitch and energy dynamics can help to characterize and distinguish among stance-related behaviors in speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Valentin Barriere|AUTHOR Valentin Barriere]], [[Chloé Clavel|AUTHOR Chloé Clavel]], [[Slim Essid|AUTHOR Slim Essid]]
</p><p class="cpabstractcardaffiliationlist">LTCI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1457–1461
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, the main goal is to detect a movie reviewer’s opinion using hidden conditional random fields. This model allows us to capture the dynamics of the reviewer’s opinion in the transcripts of long unsegmented audio reviews that are analyzed by our system. High level linguistic features are computed at the level of inter-pausal segments. The features include syntactic features, a statistical word embedding model and subjectivity lexicons. The proposed system is evaluated on the ICT-MMMO corpus. We obtain a F1-score of 82%, which is better than logistic regression and recurrent neural network approaches. We also offer a discussion that sheds some light on the capacity of our system to adapt the word embedding model learned from general written texts data to spoken movie reviews and thus model the dynamics of the opinion.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qinyi Luo|AUTHOR Qinyi Luo]]^^1^^, [[Rahul Gupta|AUTHOR Rahul Gupta]]^^2^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1462–1466
</span></p></div>
<div class="cpabstractcardabstract"><p>Transfer learning (TL) involves leveraging information from sources outside the domain at hand for enhancing model performances. Popular TL methods either directly use the data or adapt the models learned on out-of-domain resources and incorporate them within in-domain models. TL methods have shown promise in several applications such as text classification, cross-domain language classification and emotion recognition. In this paper, we propose TL methods to computational human behavioral trait modeling. Many behavioral traits are abstract constructs (e.g., sincerity of an individual), and are often conceptually related to other constructs (e.g., level of deception) making TL methods an attractive option for their modeling. We consider the problem of automatically predicting human sincerity and deception from behavioral data while leveraging transfer of knowledge from each other. We compare our methods against baseline models trained only on in-domain data. Our best models achieve an Unweighted Average Recall (UAR) of 72.02% in classifying deception (baseline: 69.64%). Similarly, applied methods achieve Spearman’s/Pearson’s correlation values of 49.37%/48.52% between true and predicted sincerity scores (baseline: 46.51%/41.58%), indicating the success and the potential of TL for such human behavior tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anne Schröder|AUTHOR Anne Schröder]], [[Simon Stone|AUTHOR Simon Stone]], [[Peter Birkholz|AUTHOR Peter Birkholz]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Dresden, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1467–1471
</span></p></div>
<div class="cpabstractcardabstract"><p>The detection of deception in human speech is a difficult task but can be performed above chance level by human listeners even when only audio data is provided. Still, it is highly contested, which speech features could be used to help identify lies. In this study, we examined a set of phonetic and paralinguistic cues and their influence on the credibility of speech using an analysis-by-synthesis approach. 33 linguistically neutral utterances with different manipulated cues (unfilled pauses, phonation type, higher speech rate, tremolo and raised F0) were synthesized using articulatory synthesis. These utterances were presented to 50 subjects who were asked to choose the more credible utterance. From those choices, a credibility score was calculated for each cue. The results show a significant increase in credibility when a tremolo is inserted or the breathiness is increased, and a decrease in credibility when a pause is inserted or the F0 is raised. Other cues also had a significant, but less pronounced influence on the credibility while some only showed trends. In summary, the study showed that the credibility of a factually unverifiable utterance is in parts controlled by the presented paralinguistic cues.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gideon Mendels|AUTHOR Gideon Mendels]], [[Sarah Ita Levitan|AUTHOR Sarah Ita Levitan]], [[Kai-Zhan Lee|AUTHOR Kai-Zhan Lee]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1472–1476
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic deception detection is an important problem with far-reaching implications for many disciplines. We present a series of experiments aimed at automatically detecting deception from speech. We use the Columbia X-Cultural Deception (CXD) Corpus, a large-scale corpus of within-subject deceptive and non-deceptive speech, for training and evaluating our models. We compare the use of spectral, acoustic-prosodic, and lexical feature sets, using different machine learning models. Finally, we design a single hybrid deep model with both acoustic and lexical features trained jointly that achieves state-of-the-art results on the CXD corpus.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^1^^, [[SRE’16 I4U Group|AUTHOR SRE’16 I4U Group]]
</p><p class="cpabstractcardaffiliationlist">^^1^^A*STAR, Singapore;</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1328–1332
</span></p></div>
<div class="cpabstractcardabstract"><p>The 2016 speaker recognition evaluation (SRE’16) is the latest edition in the series of benchmarking events conducted by the National Institute of Standards and Technology (NIST). I4U is a joint entry to SRE’16 as the result from the collaboration and active exchange of information among researchers from sixteen ’’I’’nstitutes and ’’U’’niversities across ’’4’’ continents. The joint submission and several of its 32 sub-systems were among top-performing systems. A lot of efforts have been devoted to two major challenges, namely, unlabeled training data and dataset shift from Switchboard-Mixer to the new Call My Net dataset. This paper summarizes the lessons learned, presents our shared view from the sixteen research groups on recent advances, major paradigm shift, and common tool chain used in speaker recognition as we have witnessed in SRE’16. More importantly, we look into the intriguing question of fusing a large ensemble of sub-systems and the potential benefit of large-scale collaboration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pedro A. Torres-Carrasquillo|AUTHOR Pedro A. Torres-Carrasquillo]]^^1^^, [[Fred Richardson|AUTHOR Fred Richardson]]^^1^^, [[Shahan Nercessian|AUTHOR Shahan Nercessian]]^^1^^, [[Douglas Sturim|AUTHOR Douglas Sturim]]^^1^^, [[William Campbell|AUTHOR William Campbell]]^^1^^, [[Youngjune Gwon|AUTHOR Youngjune Gwon]]^^1^^, [[Swaroop Vattam|AUTHOR Swaroop Vattam]]^^1^^, [[Najim Dehak|AUTHOR Najim Dehak]]^^2^^, [[Harish Mallidi|AUTHOR Harish Mallidi]]^^2^^, [[Phani Sankar Nidadavolu|AUTHOR Phani Sankar Nidadavolu]]^^2^^, [[Ruizhi Li|AUTHOR Ruizhi Li]]^^2^^, [[Reda Dehak|AUTHOR Reda Dehak]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT Lincoln Laboratory, USA; ^^2^^Johns Hopkins University, USA; ^^3^^EPITA LRDE, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1333–1337
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, the NIST 2016 SRE system that resulted from the collaboration between MIT Lincoln Laboratory and the team at Johns Hopkins University is presented. The submissions for the 2016 evaluation consisted of three fixed condition submissions and a single system open condition submission. The primary submission on the fixed (and core) condition resulted in an actual DCF of .618. Details of the submissions are discussed along with some discussion and observations of the 2016 evaluation campaign.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniele Colibro|AUTHOR Daniele Colibro]]^^1^^, [[Claudio Vair|AUTHOR Claudio Vair]]^^1^^, [[Emanuele Dalmasso|AUTHOR Emanuele Dalmasso]]^^1^^, [[Kevin Farrell|AUTHOR Kevin Farrell]]^^2^^, [[Gennady Karvitsky|AUTHOR Gennady Karvitsky]]^^3^^, [[Sandro Cumani|AUTHOR Sandro Cumani]]^^4^^, [[Pietro Laface|AUTHOR Pietro Laface]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nuance Communications, Italy; ^^2^^Nuance Communications, USA; ^^3^^Nuance Communications, Israel; ^^4^^Politecnico di Torino, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1338–1342
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the Nuance–Politecnico di Torino (NPT) speaker recognition system submitted to the NIST SRE16 evaluation campaign. Included are the results of post-evaluation tests, focusing on the analysis of the performance of generative and discriminative classifiers, and of score normalization. The submitted system combines the results of four GMM-IVector models, two DNN-IVector models and a GMM-SVM acoustic system. Each system exploits acoustic front-end parameters that differ by feature type and dimension. We analyze the main components of our submission, which contributed to obtaining 8.1% EER and 0.532 actual C,,primary,, in the challenging SRE16 Fixed condition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunlei Zhang|AUTHOR Chunlei Zhang]], [[Fahimeh Bahmaninezhad|AUTHOR Fahimeh Bahmaninezhad]], [[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[Chengzhu Yu|AUTHOR Chengzhu Yu]], [[Navid Shokouhi|AUTHOR Navid Shokouhi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1343–1347
</span></p></div>
<div class="cpabstractcardabstract"><p>This study describes systems submitted by the Center for Robust Speech Systems (CRSS) from the University of Texas at Dallas (UTD) to the 2016 National Institute of Standards and Technology (NIST) Speaker Recognition Evaluation (SRE).We developed 4 UBM and DNN i-vector based speaker recognition systems with alternate data sets and feature representations. Given that the emphasis of the NIST SRE 2016 is on language mismatch between training and enrollment/test data, so-called domain mismatch, in our system development we focused on: (i) utilizing unlabeled in-domain data for centralizing i-vectors to alleviate the domain mismatch; (ii) selecting the proper data sets and optimizing configurations for training LDA/PLDA; (iii) introducing a newly proposed dimension reduction technique which incorporates unlabeled in-domain data before PLDA training; (iv) unsupervised speaker clustering of unlabeled data and using them alone or with previous SREs for PLDA training, and finally (v) score calibration using unlabeled data with “pseudo” speaker labels generated from speaker clustering. NIST evaluations show that our proposed methods were very successful for the given task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oldřich Plchot|AUTHOR Oldřich Plchot]]^^1^^, [[Pavel Matějka|AUTHOR Pavel Matějka]]^^1^^, [[Anna Silnova|AUTHOR Anna Silnova]]^^1^^, [[Ondřej Novotný|AUTHOR Ondřej Novotný]]^^1^^, [[Mireia Diez Sánchez|AUTHOR Mireia Diez Sánchez]]^^1^^, [[Johan Rohdin|AUTHOR Johan Rohdin]]^^1^^, [[Ondřej Glembek|AUTHOR Ondřej Glembek]]^^1^^, [[Niko Brümmer|AUTHOR Niko Brümmer]]^^2^^, [[Albert Swart|AUTHOR Albert Swart]]^^2^^, [[Jesús Jorrín-Prieto|AUTHOR Jesús Jorrín-Prieto]]^^3^^, [[Paola García|AUTHOR Paola García]]^^3^^, [[Luis Buera|AUTHOR Luis Buera]]^^3^^, [[Patrick Kenny|AUTHOR Patrick Kenny]]^^4^^, [[Jahangir Alam|AUTHOR Jahangir Alam]]^^4^^, [[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Brno University of Technology, Czech Republic; ^^2^^Nuance Communications, South Africa; ^^3^^Nuance Communications, Spain; ^^4^^CRIM, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1348–1352
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a condensed description and analysis of the joint submission for NIST SRE 2016, by Agnitio, BUT and CRIM (ABC). We concentrate on challenges that arose during development and we analyze the results obtained on the evaluation data and on our development sets. We show that testing on mismatched, non-English and short duration data introduced in NIST SRE 2016 is a difficult problem for current state-of-the-art systems. Testing on this data brought back the issue of score normalization and it also revealed that the bottleneck features (BN), which are superior when used for telephone English, are lacking in performance against the standard acoustic features like Mel Frequency Cepstral Coefficients (MFCCs). We offer ABC’s insights, findings and suggestions for building a robust system suitable for mismatched, non-English and relatively noisy data such as those in NIST SRE 2016.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]]^^1^^, [[Timothée Kheyrkhah|AUTHOR Timothée Kheyrkhah]]^^1^^, [[Audrey Tong|AUTHOR Audrey Tong]]^^1^^, [[Craig Greenberg|AUTHOR Craig Greenberg]]^^1^^, [[Douglas Reynolds|AUTHOR Douglas Reynolds]]^^2^^, [[Elliot Singer|AUTHOR Elliot Singer]]^^2^^, [[Lisa Mason|AUTHOR Lisa Mason]]^^3^^, [[Jaime Hernandez-Cordero|AUTHOR Jaime Hernandez-Cordero]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NIST, USA; ^^2^^MIT Lincoln Laboratory, USA; ^^3^^DoD, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1353–1357
</span></p></div>
<div class="cpabstractcardabstract"><p>In 2016, the National Institute of Standards and Technology (NIST) conducted the most recent in an ongoing series of speaker recognition evaluations (SRE) to foster research in robust text-independent speaker recognition, as well as measure performance of current state-of-the-art systems. Compared to previous NIST SREs, SRE16 introduced several new aspects including: an entirely online evaluation platform, a fixed training data condition, more variability in test segment duration (uniformly distributed between 10s and 60s), the use of non-English (Cantonese, Cebuano, Mandarin and Tagalog) conversational telephone speech (CTS) collected outside North America, and providing labeled and unlabeled development (a.k.a. validation) sets for system hyperparameter tuning and adaptation. The introduction of the new non-English CTS data made SRE16 more challenging due to domain/channel and language mismatches as compared to previous SREs. A total of 66 research organizations from industry and academia registered for SRE16, out of which 43 teams submitted 121 valid system outputs that produced scores. This paper presents an overview of the evaluation and analysis of system performance over all primary evaluation conditions. Initial results indicate that effective use of the development data was essential for the top performing systems, and that domain/channel, language, and duration mismatch had an adverse impact on system performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hideki Kawahara|AUTHOR Hideki Kawahara]]^^1^^, [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]]^^2^^, [[Masanori Morise|AUTHOR Masanori Morise]]^^3^^, [[Hideki Banno|AUTHOR Hideki Banno]]^^4^^, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^5^^, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^Health Science University of Hokkaido, Japan; ^^3^^University of Yamanashi, Japan; ^^4^^Meijo University, Japan; ^^5^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1358–1362
</span></p></div>
<div class="cpabstractcardabstract"><p>We formulated and implemented a procedure to generate aliasing-free excitation source signals. It uses a new antialiasing filter in the continuous time domain followed by an IIR digital filter for response equalization. We introduced a cosine-series-based general design procedure for the new antialiasing function. We applied this new procedure to implement the antialiased Fujisaki-Ljungqvist model. We also applied it to revise our previous implementation of the antialiased Fant-Liljencrants model. A combination of these signals and a lattice implementation of the time varying vocal tract model provides a reliable and flexible basis to test f,,o,, extractors and source aperiodicity analysis methods. MATLAB implementations of these antialiased excitation source models are available as part of our open source tools for speech science.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ana Ramírez López|AUTHOR Ana Ramírez López]], [[Shreyas Seshadri|AUTHOR Shreyas Seshadri]], [[Lauri Juvela|AUTHOR Lauri Juvela]], [[Okko Räsänen|AUTHOR Okko Räsänen]], [[Paavo Alku|AUTHOR Paavo Alku]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1363–1367
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaking style conversion is the technology of converting natural speech signals from one style to another. In this study, we focus on normal-to-Lombard conversion. This can be used, for example, to enhance the intelligibility of speech in noisy environments. We propose a parametric approach that uses a vocoder to extract speech features. These features are mapped using Bayesian GMMs from utterances spoken in normal style to the corresponding features of Lombard speech. Finally, the mapped features are converted to a Lombard speech waveform with the vocoder. Two vocoders were compared in the proposed normal-to-Lombard conversion: a recently developed glottal vocoder that decomposes speech into glottal flow excitation and vocal tract, and the widely used STRAIGHT vocoder. The conversion quality was evaluated in two subjective listening tests measuring subjective similarity and naturalness. The similarity test results show that the system is able to convert normal speech into Lombard speech for the two vocoders. However, the subjective naturalness of the converted Lombard speech was clearly better using the glottal vocoder in comparison to STRAIGHT.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lauri Juvela|AUTHOR Lauri Juvela]]^^1^^, [[Bajibabu Bollepalli|AUTHOR Bajibabu Bollepalli]]^^1^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^2^^, [[Paavo Alku|AUTHOR Paavo Alku]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalto University, Finland; ^^2^^NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1368–1372
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural network-based models that generate glottal excitation waveforms from acoustic features have been found to give improved quality in statistical parametric speech synthesis. Until now, however, these models have been trained separately from the acoustic model. This creates mismatch between training and synthesis, as the synthesized acoustic features used for the excitation model input differ from the original inputs, with which the model was trained on. Furthermore, due to the errors in predicting the vocal tract filter, the original excitation waveforms do not provide perfect reconstruction of the speech waveform even if predicted without error. To address these issues and to make the excitation model more robust against errors in acoustic modeling, this paper proposes two modifications to the excitation model training scheme. First, the excitation model is trained in a connected manner, with inputs generated by the acoustic model. Second, the target glottal waveforms are re-estimated by performing glottal inverse filtering with the predicted vocal tract filters. The results show that both of these modifications improve performance measured in MSE and MFCC distortion, and slightly improve the subjective quality of the synthetic speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Sorin|AUTHOR Alexander Sorin]], [[Slava Shechtman|AUTHOR Slava Shechtman]], [[Asaf Rendel|AUTHOR Asaf Rendel]]
</p><p class="cpabstractcardaffiliationlist">IBM, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1373–1377
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, a glottal vocoder has been integrated in the IBM concatenative TTS system and certain configurable global voice transformations were defined in the vocoder parameter space. The vocoder analysis employs a novel robust glottal source parameter estimation strategy. The vocoder is applied to the voiced speech only, while unvoiced speech is kept unparameterized, thus contributing to the perceived naturalness of the synthesized speech.
The semi-parametric system enables independent modifications of the glottal source and vocal tract components on-the-fly by embedding the voice transformations in the synthesis process. The transformations effect ranges from slight voice altering to a complete change of the perceived speaker personality. Pitch modifications enhance these changes. At the same time, the voice transformations are simple enough to be easily controlled externally to the system. This allows the users either to fine tune the voice sound or to create instantly multiple distinct virtual voices. In both cases, the synthesis is based on a large and meticulously cleaned concatenative TTS voice with a broad phonetic coverage. In this paper we present the system and provide subjective evaluations of its voice modification capabilities.
The technology presented in this paper is implemented in IBM Watson TTS service.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rodrigo Manríquez|AUTHOR Rodrigo Manríquez]]^^1^^, [[Sean D. Peterson|AUTHOR Sean D. Peterson]]^^2^^, [[Pavel Prado|AUTHOR Pavel Prado]]^^1^^, [[Patricio Orio|AUTHOR Patricio Orio]]^^3^^, [[Matías Zañartu|AUTHOR Matías Zañartu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad Técnica Federico Santa María, Chile; ^^2^^University of Waterloo, Canada; ^^3^^Universidad de Valparaíso, Chile</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1378–1382
</span></p></div>
<div class="cpabstractcardabstract"><p>Physiological-based synthesis using low order lumped-mass models of phonation have been shown to mimic and predict complex physical phenomena observed in normal and pathological speech production, and have received significant attention due to their ability to efficiently perform comprehensive parametric investigations that are cost prohibitive with more advanced computational tools. Even though these numerical models have been shown to be useful research and clinical tools, several physiological aspects of them remain to be explored. One of the key components that has been neglected is the natural fluctuation of the laryngeal muscle activity that affects the configuration of the model parameters. In this study, a physiologically-based laryngeal muscle activation model that accounts for random fluctuations is proposed. The method is expected to improve the ability to model muscle related pathologies, such as muscle tension dysphonia and Parkinson’s disease. The mathematical framework and underlying assumptions are described, and the effects of the added random muscle activity is tested in a well-known body-cover model of the vocal folds with acoustic propagation and interaction. Initial simulations illustrate that the random fluctuations in the muscle activity impact the resulting kinematics to varying degrees depending on the laryngeal configuration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Felipe Espic|AUTHOR Felipe Espic]], [[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]], [[Simon King|AUTHOR Simon King]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1383–1387
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a simple new representation for the FFT spectrum tailored to statistical parametric speech synthesis. It consists of four feature streams that describe magnitude, phase and fundamental frequency using real numbers. The proposed feature extraction method does not attempt to decompose the speech structure (e.g., into source+filter or harmonics+noise). By avoiding the simplifications inherent in decomposition, we can dramatically reduce the “phasiness” and “buzziness” typical of most vocoders. The method uses simple and computationally cheap operations and can operate at a lower frame rate than the 200 frames-per-second typical in many systems. It avoids heuristics and methods requiring approximate or iterative solutions, including phase unwrapping.
Two DNN-based acoustic models were built — from male and female speech data — using the Merlin toolkit. Subjective comparisons were made with a state-of-the-art baseline, using the STRAIGHT vocoder. In all variants tested, and for both male and female voices, the proposed method substantially outperformed the baseline. We provide source code to enable our complete system to be replicated.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heather Kember|AUTHOR Heather Kember]]^^1^^, [[Ann-Kathrin Grohe|AUTHOR Ann-Kathrin Grohe]]^^2^^, [[Katharina Zahner|AUTHOR Katharina Zahner]]^^3^^, [[Bettina Braun|AUTHOR Bettina Braun]]^^3^^, [[Andrea Weber|AUTHOR Andrea Weber]]^^2^^, [[Anne Cutler|AUTHOR Anne Cutler]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Western Sydney University, Australia; ^^2^^Universität Tübingen, Germany; ^^3^^Universität Konstanz, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1388–1392
</span></p></div>
<div class="cpabstractcardabstract"><p>English and German have similar prosody, but their speakers realize some pitch falls (not rises) in subtly different ways. We here test for asymmetry in perception. An ABX discrimination task requiring F0 slope or duration judgements on isolated vowels revealed no cross-language difference in duration or F0 fall discrimination, but discrimination of rises (realized similarly in each language) was less accurate for English than for German listeners. This unexpected finding may reflect greater sensitivity to rising patterns by German listeners, or reduced sensitivity by English listeners as a result of extensive exposure to phrase-final rises (“uptalk”) in their language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Luying Hou|AUTHOR Luying Hou]], [[Bert Le Bruyn|AUTHOR Bert Le Bruyn]], [[René Kager|AUTHOR René Kager]]
</p><p class="cpabstractcardaffiliationlist">Universiteit Utrecht, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1393–1397
</span></p></div>
<div class="cpabstractcardabstract"><p>It has been observed that the interpretation of pronouns can depend on their accentuation patterns in parallel sentences as “John hit Bill and then George hit him”, in which ‘him’ refers to Bill when unaccented but shifts to John when accented. While accentuation is widely regarded as a means of disambiguation, some studies have noticed that it also extends to unambiguous anaphors [7–10]. From the perspective of production, however, no strong experimental confirmation was found for the ‘shift’ function of accented pronouns, which is due to the fact that production research has mainly focused on corpora [5, 6]. Hence, the nature of the accent on anaphors still remains obscure. By manipulating referential shift and ambiguity, this study explores the role of prosody in anaphora production in strictly Mandarin parallel structures. The results reveal a significantly higher F₀ and longer duration for anaphors in referentially shifted conditions, suggesting that anaphoric accentuation signals a referential change in strictly parallel structures in Mandarin. No evidence was found that ambiguity plays a role in anaphoric accentuation. This finding challenges the general view on accented pronouns and will deepen our understanding on semantics-prosody relationship.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Angeliki Athanasopoulou|AUTHOR Angeliki Athanasopoulou]]^^1^^, [[Irene Vogel|AUTHOR Irene Vogel]]^^2^^, [[Hossep Dolatian|AUTHOR Hossep Dolatian]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at San Diego, USA; ^^2^^University of Delaware, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1398–1402
</span></p></div>
<div class="cpabstractcardabstract"><p>Languages are often categorized as having either predictable (fixed or quantity-sensitive) or non-predictable stress. Despite their name, fixed stress languages may have exceptions, so in fact, their stress does not always appear in the same position. Since predictability has been shown to affect certain speech phenomena, with additional or redundant acoustic cues being provided when the linguistic content is less predictable (e.g., Smooth Signal Redundancy Hypothesis), we investigate whether, and to what extent, the predictability of stress position affects the manifestation of stress in different languages. We examine the acoustic properties of stress in three languages classified as having fixed stress (Turkish, French, Armenian), with exceptions, and in one language with non-predictable-stress, Brazilian Portuguese. Specifically, we compare the manifestation of stress in the canonical stress (typically “fixed”) position with its manifestation in the non-canonical (exceptional) position, where it would potentially be less predictable. We also compare these patterns with the manifestation of stress in Portuguese, in both the “default” penultimate and the less common final position. Our results show that stress is manifested quite similarly in canonical and non-canonical positions in the “fixed” stress languages and stress is most clearly produced when it is least predictable.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leendert Plug|AUTHOR Leendert Plug]]^^1^^, [[Rachel Smith|AUTHOR Rachel Smith]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Leeds, UK; ^^2^^University of Glasgow, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1403–1406
</span></p></div>
<div class="cpabstractcardabstract"><p>Studies of speech tempo commonly use syllable or segment rate as a proxy measure for perceived tempo. In languages whose phonologies allow substantial syllable complexity these measures can produce figures on quite different scales; however, little is known about the correlation between syllable and segment rate measurements on the one hand and naïve listeners’ tempo judgements on the other.
We follow up on the findings of one relevant study on German [1], which suggest that listeners attend to both syllable and segment rates in making tempo estimates, through a weighted average of the rates in which syllable rate carries more weight. We report on an experiment in which we manipulate phonological complexity in English utterance pairs that are constant in syllable rate. Listeners decide for each pair which utterance sounds faster. Our results suggest that differences in segment rate that do not correspond to differences in syllable rate have little impact on perceived speech tempo in English.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jing Yang|AUTHOR Jing Yang]]^^1^^, [[Yu Zhang|AUTHOR Yu Zhang]]^^2^^, [[Aijun Li|AUTHOR Aijun Li]]^^3^^, [[Li Xu|AUTHOR Li Xu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Central Arkansas, USA; ^^2^^Ohio University, USA; ^^3^^Chinese Academy of Social Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1407–1411
</span></p></div>
<div class="cpabstractcardabstract"><p>The present study compared the duration of Mandarin tones in three types of speech contexts: isolated monosyllables, formal text-reading passages, and casual conversations. A total of 156 adult speakers was recruited. The speech materials included 44 monosyllables recorded from each of 121 participants, 18 passages read by 2 participants, and 20 conversations conducted by 33 participants. The duration pattern of the four lexical tones in the isolated monosyllables was consistent with the pattern described in previous literature. However, the duration of the four lexical tones became much shorter and tended to converge to that of the neutral tone (i.e., tone 0) in the text-reading and conversational speech. The maximum-likelihood estimator revealed that the durational cue contributed to tone recognition in the isolated monosyllables. With a single speaker, the average tone recognition based on duration alone could reach approximately 65% correct. As the number of speakers increased (e.g., ⋝ 4), tone recognition performance dropped to approximately 45% correct. In conversational speech, the maximum likelihood estimation of tones based on duration cues was only 23% correct. The tone duration provided little useful cue to differentiate Mandarin tonal identity in everyday situations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Otto Ewald|AUTHOR Otto Ewald]]^^1^^, [[Eva Liina Asu|AUTHOR Eva Liina Asu]]^^2^^, [[Susanne Schötz|AUTHOR Susanne Schötz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Lund University, Sweden; ^^2^^University of Tartu, Estonia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1412–1416
</span></p></div>
<div class="cpabstractcardabstract"><p>This study compares the acoustic realisation of /iː yː ʉː uː/ in three varieties of Swedish: Central Swedish, Estonian Swedish, and Finland Swedish. Vowel tokens were extracted from isolated words produced by six elderly female speakers from each variety. Trajectories of the first three formants were modelled with discrete cosine transform (DCT) coefficients, enabling the comparison of the formant means as well as the direction and magnitude of the formant movement. Cross-dialectal differences were found in all measures and in all vowels. The most noteworthy feature of the Estonian Swedish long close vowel inventory is the lack of /yː/. For Finland Swedish it was shown that /iː/ and /yː/ are more close than in Central Swedish. The realisation of /ʉː/ varies from front in Central Swedish, to central in Estonian Swedish, and back in Finland Swedish. On average, the Central Swedish vowels exhibited a higher degree of formant movement than the vowels in the other two varieties. In the present study, regional variation in Swedish vowels was for the first time investigated using DCT coefficients. The results stress the importance of taking formant dynamics into account even in the analysis of nominal monophthongs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yao Qian|AUTHOR Yao Qian]], [[Keelan Evanini|AUTHOR Keelan Evanini]], [[Xinhao Wang|AUTHOR Xinhao Wang]], [[Chong Min Lee|AUTHOR Chong Min Lee]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1417–1421
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent advances in ASR and spoken language processing have led to improved systems for automated assessment for spoken language. However, it is still challenging for automated scoring systems to achieve high performance in terms of the agreement with human experts when applied to non-native children’s spontaneous speech. The subpar performance is mainly caused by the relatively low recognition rate on non-native children’s speech. In this paper, we investigate different neural network architectures for improving non-native children’s speech recognition and the impact of the features extracted from the corresponding ASR output on the automated assessment of speaking proficiency. Experimental results show that bidirectional LSTM-RNN can outperform feed-forward DNN in ASR, with an overall relative WER reduction of 13.4%. The improved speech recognition can then boost the language proficiency assessment performance. Correlations between the rounded automated scores and expert scores range from 0.66 to 0.70 for the three speaking tasks studied, similar to the human-human agreement levels for these tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junwei Yue|AUTHOR Junwei Yue]]^^1^^, [[Fumiya Shiozawa|AUTHOR Fumiya Shiozawa]]^^1^^, [[Shohei Toyama|AUTHOR Shohei Toyama]]^^1^^, [[Yutaka Yamauchi|AUTHOR Yutaka Yamauchi]]^^2^^, [[Kayoko Ito|AUTHOR Kayoko Ito]]^^3^^, [[Daisuke Saito|AUTHOR Daisuke Saito]]^^1^^, [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Tokyo, Japan; ^^2^^Tokyo International University, Japan; ^^3^^Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1422–1426
</span></p></div>
<div class="cpabstractcardabstract"><p>Shadowing has become a well-known method to improve learners’ overall proficiency. Our previous studies realized automatic scoring of shadowing speech using HMM phoneme posteriors, called GOP (Goodness of Pronunciation) and learners’ TOEIC scores were predicted adequately. In this study, we enhance our studies from multiple angles: 1) a much larger amount of shadowing speech is collected, 2) manual scoring of these utterances is done by two native teachers, 3) DNN posteriors are introduced instead of HMM ones, 4) language-independent shadowing assessment based on posteriors-based DTW (Dynamic Time Warping) is examined. Experiments suggest that, compared to HMM, DNN can improve teacher-machine correlation largely by 0.37 and DTW based on DNN posteriors shows as high correlation as 0.74 even when posterior calculation is done using a different language from the target language of learning.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chong Min Lee|AUTHOR Chong Min Lee]], [[Su-Youn Yoon|AUTHOR Su-Youn Yoon]], [[Xihao Wang|AUTHOR Xihao Wang]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]], [[Ikkyu Choi|AUTHOR Ikkyu Choi]], [[Keelan Evanini|AUTHOR Keelan Evanini]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1427–1431
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we developed an off-topic response detection system to be used in the context of the automated scoring of non-native English speakers’ spontaneous speech. Based on transcriptions generated from an ASR system trained on non-native speakers’ speech and various semantic similarity features, the system classified each test response as an on-topic or off-topic response. The recent success of deep neural networks (DNN) in text similarity detection led us to explore DNN-based document similarity features. Specifically, we used a siamese adaptation of the convolutional network, due to its efficiency in learning similarity patterns simultaneously from both responses and questions used to elicit responses. In addition, a baseline system was developed using a standard vector space model (VSM) trained on sample responses for each question. The accuracy of the siamese CNN-based system was 0.97 and there was a 50% relative error reduction compared to the standard VSM-based system. Furthermore, the accuracy of the siamese CNN-based system was consistent across different questions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vipul Arora|AUTHOR Vipul Arora]]^^1^^, [[Aditi Lahiri|AUTHOR Aditi Lahiri]]^^1^^, [[Henning Reetz|AUTHOR Henning Reetz]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Oxford, UK; ^^2^^Goethe-Universität Frankfurt, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1432–1436
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a phonological feature based computer aided pronunciation training system for the learners of a new language (L2). Phonological features allow analysing the learners’ mispronunciations systematically and rendering the feedback more effectively. The proposed acoustic model consists of a multi-task deep neural network, which uses a shared representation for estimating the phonological features and HMM state probabilities. Moreover, an active learning based scheme is proposed to efficiently deal with the cost of annotation, which is done by expert teachers, by selecting the most informative samples for annotation. Experimental evaluations are carried out for German and Italian native-speakers speaking English. For mispronunciation detection, the proposed feature-based system outperforms conventional GOP measure and classifier based methods, while providing more detailed diagnosis. Evaluations also demonstrate the advantage of active learning based sampling over random sampling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jorge Proença|AUTHOR Jorge Proença]]^^1^^, [[Carla Lopes|AUTHOR Carla Lopes]]^^1^^, [[Michael Tjalve|AUTHOR Michael Tjalve]]^^2^^, [[Andreas Stolcke|AUTHOR Andreas Stolcke]]^^2^^, [[Sara Candeias|AUTHOR Sara Candeias]]^^3^^, [[Fernando Perdigão|AUTHOR Fernando Perdigão]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Instituto de Telecomunicações, Portugal; ^^2^^Microsoft, USA; ^^3^^Microsoft, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1437–1441
</span></p></div>
<div class="cpabstractcardabstract"><p>To automatically evaluate the performance of children reading aloud or to follow a child’s reading in reading tutor applications, different types of reading disfluencies and mispronunciations must be accounted for. In this work, we aim to detect most of these disfluencies in sentence and pseudoword reading. Detecting incorrectly pronounced words, and quantifying the quality of word pronunciations, is arguably the hardest task. We approach the challenge as a two-step process. First, a segmentation using task-specific lattices is performed, while detecting repetitions and false starts and providing candidate segments for words. Then, candidates are classified as mispronounced or not, using multiple features derived from likelihood ratios based on phone decoding and forced alignment, as well as additional meta-information about the word. Several classifiers were explored (linear fit, neural networks, support vector machines) and trained after a feature selection stage to avoid overfitting. Improved results are obtained using feature combination compared to using only the log likelihood ratio of the reference word (22% versus 27% miss rate at constant 5% false alarm rate).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Escudero-Mancebo|AUTHOR David Escudero-Mancebo]]^^1^^, [[César González-Ferreras|AUTHOR César González-Ferreras]]^^1^^, [[Lourdes Aguilar|AUTHOR Lourdes Aguilar]]^^2^^, [[Eva Estebas-Vilaplana|AUTHOR Eva Estebas-Vilaplana]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Valladolid, Spain; ^^2^^Universidad Autónoma de Barcelona, Spain; ^^3^^UNED, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1442–1446
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this paper is to investigate how automatic prosodic labeling systems contribute to the evaluation of non-native pronunciation. In particular, it examines the efficiency of a group of metrics to evaluate the prosodic competence of non-native speakers, based on the information provided by sequences of labels in the analysis of both native and non-native speech. A group of Sp_ToBI labels were obtained by means of an automatic labeling system for the speech of native and non-native speakers who read the same texts. The metrics assessed the differences in the prosodic labels for both speech samples. The results showed the efficiency of the metrics to set apart both groups of speakers. Furthermore, they exhibited how non-native speakers (American and Japanese speakers) improved their Spanish productions after doing a set of listening and repeating activities. Finally, this study also shows that the results provided by the metrics are correlated with the scores given by human evaluators on the productions of the different speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Albert Swart|AUTHOR Albert Swart]], [[Niko Brümmer|AUTHOR Niko Brümmer]]
</p><p class="cpabstractcardaffiliationlist">Nuance Communications, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1477–1481
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a theoretical framework for thinking about score normalization, which confirms that normalization is not needed under (admittedly fragile) ideal conditions. If, however, these conditions are not met, e.g. under data-set shift between training and runtime, our theory reveals dependencies between scores that could be exploited by strategies such as score normalization. Indeed, it has been demonstrated over and over experimentally, that various ad-hoc score normalization recipes do work. We present a first attempt at using probability theory to design a generative score-space normalization model which gives similar improvements to ZT-norm on the text-dependent RSR 2015 database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soo Jin Park|AUTHOR Soo Jin Park]], [[Gary Yeung|AUTHOR Gary Yeung]], [[Jody Kreiman|AUTHOR Jody Kreiman]], [[Patricia A. Keating|AUTHOR Patricia A. Keating]], [[Abeer Alwan|AUTHOR Abeer Alwan]]
</p><p class="cpabstractcardaffiliationlist">University of California at Los Angeles, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1522–1526
</span></p></div>
<div class="cpabstractcardabstract"><p>Due to within-speaker variability in phonetic content and/or speaking style, the performance of automatic speaker verification (ASV) systems degrades especially when the enrollment and test utterances are short. This study examines how different types of variability influence performance of ASV systems. Speech samples (< 2 sec) from the UCLA Speaker Variability Database containing 5 different read sentences by 200 speakers were used to study content variability. Other samples (about 5 sec) that contained speech directed towards pets, characterized by exaggerated prosody, were used to analyze style variability. Using the i-vector/PLDA framework, the ASV system error rate with MFCCs had a relative increase of at least 265% and 730% in content-mismatched and style-mismatched trials, respectively. A set of features that represents voice quality (F0, F1, F2, F3, H1-H2, H2-H4, H4-H2k, A1, A2, A3, and CPP) was also used. Using score fusion with MFCCs, all conditions saw decreases in error rates. In addition, using the NIST SRE10 database, score fusion provided relative improvements of 11.78% for 5-second utterances, 12.41% for 10-second utterances, and a small improvement for long utterances (about 5 min). These results suggest that voice quality features can improve short-utterance text-independent ASV system performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^1^^, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^A*STAR, Singapore; ^^2^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1527–1531
</span></p></div>
<div class="cpabstractcardabstract"><p>I-vector is widely described as a compact and effective representation of speech utterances for speaker recognition. Standard i-vector extraction could be an expensive task for applications where computing resource is limited, for instance, on handheld devices. Fast approximate inference of i-vector aims to reduce the computational cost required in i-vector extraction where run-time requirement is critical. Most fast approaches hinge on certain assumptions to approximate the i-vector inference formulae with little loss of accuracy. In this paper, we analyze the uniform assumption that we had proposed earlier. We show that the assumption generally hold for long utterances but inadequate for utterances of short duration. We then propose to compensate for the negative effects by applying a simple gain factor on the i-vectors estimated from short utterances. The assertion is confirmed through analysis and experiments conducted on NIST SRE’08 and SRE’10 datasets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hee-soo Heo|AUTHOR Hee-soo Heo]], [[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[IL-ho Yang|AUTHOR IL-ho Yang]], [[Sung-hyun Yoon|AUTHOR Sung-hyun Yoon]], [[Ha-jin Yu|AUTHOR Ha-jin Yu]]
</p><p class="cpabstractcardaffiliationlist">University of Seoul, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1532–1536
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose an expanded end-to-end DNN architecture for speaker verification based on b-vectors as well as d-vectors. We embedded the components of a speaker verification system such as modeling frame-level features, extracting utterance-level features, dimensionality reduction of utterance-level features, and trial-level scoring in an expanded end-to-end DNN architecture. The main contribution of this paper is that, instead of using DNNs as parts of the system trained independently, we train the whole system jointly with a fine-tune cost after pre-training each part. The experimental results show that the proposed system outperforms the baseline d-vector system and i-vector PLDA system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Subhadeep Dey|AUTHOR Subhadeep Dey]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Marc Ferras|AUTHOR Marc Ferras]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1482–1486
</span></p></div>
<div class="cpabstractcardabstract"><p>Subspace based techniques, such as i-vector and Joint Factor Analysis (JFA) have shown to provide state-of-the-art performance for fixed phrase based text-dependent speaker verification. However, the error rates of such systems on the random digit task of RSR dataset are higher than that of Gaussian Mixture Model-Universal Background Model (GMM-UBM). In this paper, we aim at improving i-vector system by normalizing the content of the enrollment data to match the test data. We estimate i-vectors for each frames of a speech utterance (also called online i-vectors). The largest similarity scores across frames between enrollment and test are taken using these online i-vectors to obtain speaker verification scores. Experiments on Part3 of RSR corpora show that the proposed approach achieves 12% relative improvement in equal error rate over a GMM-UBM based baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunlei Zhang|AUTHOR Chunlei Zhang]]^^1^^, [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1487–1491
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-independent speaker verification against short utterances is still challenging despite of recent advances in the field of speaker recognition with i-vector framework. In general, to get a robust i-vector representation, a satisfying amount of data is needed in the MAP adaptation step, which is hard to meet under short duration constraint. To overcome this, we present an end-to-end system which directly learns a mapping from speech features to a compact fixed length speaker discriminative embedding where the Euclidean distance is employed for measuring similarity within trials. To learn the feature mapping, a modified Inception Net with residual block is proposed to optimize the triplet loss function. The input of our end-to-end system is a fixed length spectrogram converted from an arbitrary length utterance. Experiments show that our system consistently outperforms a conventional i-vector system on short duration speaker verification tasks. To test the limit under various duration conditions, we also demonstrate how our end-to-end system behaves with different duration from 2s–4s.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hong Yu|AUTHOR Hong Yu]]^^1^^, [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]]^^2^^, [[Zhanyu Ma|AUTHOR Zhanyu Ma]]^^1^^, [[Jun Guo|AUTHOR Jun Guo]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BUPT, China; ^^2^^Aalborg University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1492–1496
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a noise robust bottleneck feature representation which is generated by an adversarial network (AN). The AN includes two cascade connected networks, an encoding network (EN) and a discriminative network (DN). Mel-frequency cepstral coefficients (MFCCs) of clean and noisy speech are used as input to the EN and the output of the EN is used as the noise robust feature. The EN and DN are trained in turn, namely, when training the DN, noise types are selected as the training labels and when training the EN, all labels are set as the same, i.e., the clean speech label, which aims to make the AN features invariant to noise and thus achieve noise robustness. We evaluate the performance of the proposed feature on a Gaussian Mixture Model-Universal Background Model based speaker verification system, and make comparison to MFCC features of speech enhanced by short-time spectral amplitude minimum mean square error (STSA-MMSE) and deep neural network-based speech enhancement (DNN-SE) methods. Experimental results on the RSR2015 database show that the proposed AN bottleneck feature (AN-BN) dramatically outperforms the STSA-MMSE and DNN-SE based MFCCs for different noise types and signal-to-noise ratios. Furthermore, the AN-BN feature is able to improve the speaker verification performance under the clean condition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shuai Wang|AUTHOR Shuai Wang]], [[Yanmin Qian|AUTHOR Yanmin Qian]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1497–1501
</span></p></div>
<div class="cpabstractcardabstract"><p>Developing a good speaker embedding has received tremendous interest in the speech community. Speaker representations such as i-vector, d-vector have shown their superiority in speaker recognition, speaker adaptation and other related tasks. However, not much is known about which properties are exactly encoded in these speaker embeddings. In this work, we make an in-depth investigation on three kinds of speaker embeddings, i.e. i-vector, d-vector and RNN/LSTM based sequence-vector (s-vector). Classification tasks are carefully designed to facilitate better understanding of these encoded speaker representations. Their abilities of encoding different properties are revealed and compared, such as speaker identity, gender, speaking rate, text content and channel information. Moreover, a new architecture is proposed to integrate different speaker embeddings, so that the advantages can be combined. The new advanced speaker embedding (i-s-vector) outperforms the others, and shows a more than 50% EER reduction compared to the i-vector baseline on the RSR2015 content mismatch trials.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianbo Ma|AUTHOR Jianbo Ma]]^^1^^, [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]]^^1^^, [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]^^1^^, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of New South Wales, Australia; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1502–1506
</span></p></div>
<div class="cpabstractcardabstract"><p>State-of-the-art speaker verification systems are based on the total variability model to compactly represent the acoustic space. However, short duration utterances only contain limited phonetic content, potentially resulting in an incomplete representation being captured by the total variability model thus leading to poor speaker verification performance. In this paper, a technique to incorporate component-wise local acoustic variability information into the speaker verification framework is proposed. Specifically, Gaussian Probabilistic Linear Discriminant Analysis (G-PLDA) of the supervector space, with a block diagonal covariance assumption, is used in conjunction with the traditional total variability model. Experimental results obtained using the NIST SRE 2010 dataset show that the incorporation of the proposed method leads to relative improvements of 20.48% and 18.99% in the 3 second condition for male and female speech respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinghua Zhong|AUTHOR Jinghua Zhong]]^^1^^, [[Wenping Hu|AUTHOR Wenping Hu]]^^2^^, [[Frank K. Soong|AUTHOR Frank K. Soong]]^^2^^, [[Helen Meng|AUTHOR Helen Meng]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese University of Hong Kong, China; ^^2^^Microsoft, China; ^^3^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1507–1511
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate how to improve the performance of DNN i-vector based speaker verification for short, text-constrained test utterances, e.g. connected digit strings. A text-constrained verification, due to its smaller, limited vocabulary, can deliver better performance than a text-independent one for a short utterance. We study the problem with “phonetically aware” Deep Neural Net (DNN) in its capability on “stochastic phonetic-alignment” in constructing supervectors and estimating the corresponding i-vectors with two speech databases: a large vocabulary, conversational, speaker independent database (Fisher) and a small vocabulary, continuous digit database (RSR2015 Part III). The phonetic alignment efficiency and resultant speaker verification performance are compared with differently sized senone sets which can characterize the phonetic pronunciations of utterances in the two databases. Performance on RSR2015 Part III evaluation shows a relative improvement of EER, i.e., 7.89% for male speakers and 3.54% for female speakers with only digit related senones. The DNN bottleneck features were also studied to investigate their capability of extracting phonetic sensitive information which is useful for text-independent or text-constrained speaker verifications. We found that by tandeming MFCC with bottleneck features, EERs can be further reduced.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ville Vestman|AUTHOR Ville Vestman]]^^1^^, [[Dhananjaya Gowda|AUTHOR Dhananjaya Gowda]]^^2^^, [[Md. Sahidullah|AUTHOR Md. Sahidullah]]^^1^^, [[Paavo Alku|AUTHOR Paavo Alku]]^^3^^, [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Eastern Finland, Finland; ^^2^^Samsung Electronics, Korea; ^^3^^Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1512–1516
</span></p></div>
<div class="cpabstractcardabstract"><p>In poor room acoustics conditions, speech signals received by a microphone might become corrupted by the signals’ delayed versions that are reflected from the room surfaces (e.g. wall, floor). This phenomenon, reverberation, drops the accuracy of automatic speaker verification systems by causing mismatch between the training and testing. Since reverberation causes temporal smearing to the signal, one way to tackle its effects is to study robust feature extraction, particularly based on long-time temporal feature extraction. This approach has been adopted previously in the form of 2-dimensional autoregressive (2DAR) feature extraction scheme by using frequency domain linear prediction (FDLP). In 2DAR, FDLP processing is followed by time domain linear prediction (TDLP). In the current study, we propose modifying the latter part of the 2DAR feature extraction scheme by replacing TDLP with time-varying linear prediction (TVLP) to add an extra layer of temporal processing. Our speaker verification experiments using the proposed features with the text-dependent RedDots corpus show small but consistent improvements in clean and reverberant conditions (up to 6.5%) over the 2DAR features and large improvements over the MFCC features in reverberant conditions (up to 46.5%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]]^^1^^, [[Jahangir Alam|AUTHOR Jahangir Alam]]^^2^^, [[Patrick Kenny|AUTHOR Patrick Kenny]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^McGill University, Canada; ^^2^^CRIM, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1517–1521
</span></p></div>
<div class="cpabstractcardabstract"><p>The performance of a state-of-the-art speaker verification system is severely degraded when it is presented with trial recordings of short duration. In this work we propose to use deep neural networks to learn short-duration speaker embeddings. We focus on the 5s-5s condition, wherein both sides of a verification trial are 5 seconds long. In our previous work we established that learning a non-linear mapping from i-vectors to speaker labels is beneficial for speaker verification [1]. In this work we take the idea of learning a speaker classifier one step further — we apply deep neural networks directly to time-frequency speech representations. We propose two feed-forward network architectures for this task. Our best model is based on a deep convolutional architecture wherein recordings are treated as images. From our experimental findings we advocate treating utterances as images or ‘speaker snapshots’, much like in face recognition. Our convolutional speaker embeddings perform significantly better than i-vectors when scoring is done using cosine distance, where the relative improvement is 23.5%. The proposed deep embeddings combined with cosine distance also outperform a state-of-the-art i-vector verification system by 1%, providing further empirical evidence in favor of our learned speaker features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chen Chen|AUTHOR Chen Chen]], [[Jiqing Han|AUTHOR Jiqing Han]], [[Yilin Pan|AUTHOR Yilin Pan]]
</p><p class="cpabstractcardaffiliationlist">Harbin Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1537–1541
</span></p></div>
<div class="cpabstractcardabstract"><p>The i-vector framework is one of the most popular methods in speaker verification, and estimating a total variability space (TVS) is a key part in the i-vector framework. Current estimation methods pay less attention on the discrimination of TVS, but the discrimination is so important that it will influence the improvement of performance. So we focus on the discrimination of TVS to achieve a better performance. In this paper, a discriminative estimating method of TVS based on probabilistic partial least squares (PPLS) is proposed. In this method, the discrimination is improved by using the priori information (labels) of speaker, so both the correlation of intra-class and the discrimination of interclass are fully utilized. Meanwhile, it also introduces a probabilistic view of the partial least squares (PLS) method to overcome the disadvantage of high computational complexity and the inability of channel compensation. And also this proposed method can achieve a better performance than the traditional TVS estimation method as well as the PLS-based method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Md. Hafizur Rahman|AUTHOR Md. Hafizur Rahman]]^^1^^, [[Ivan Himawan|AUTHOR Ivan Himawan]]^^2^^, [[David Dean|AUTHOR David Dean]]^^1^^, [[Sridha Sridharan|AUTHOR Sridha Sridharan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Queensland University of Technology, Australia; ^^2^^Queensland University of Technology, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1581–1585
</span></p></div>
<div class="cpabstractcardabstract"><p>The state-of-the-art i-vector based probabilistic linear discriminant analysis (PLDA) trained on non-target (or out-domain) data significantly affects the speaker verification performance due to the domain mismatch between training and evaluation data. To improve the speaker verification performance, sufficient amount of domain mismatch compensated out-domain data must be used to train the PLDA models successfully. In this paper, we propose a domain mismatch modeling (DMM) technique using maximum-a-posteriori (MAP) estimation to model and compensate the domain variability from the out-domain training i-vectors. From our experimental results, we found that the DMM technique can achieve at least a 24% improvement in EER over an out-domain only baseline when speaker labels are available. Further improvement of 3% is obtained when combining DMM with domain-invariant covariance normalization (DICN) approach. The DMM/DICN combined technique is shown to perform better than in-domain PLDA system with only 200 labeled speakers or 2,000 unlabeled i-vectors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lantian Li|AUTHOR Lantian Li]], [[Yixiang Chen|AUTHOR Yixiang Chen]], [[Ying Shi|AUTHOR Ying Shi]], [[Zhiyuan Tang|AUTHOR Zhiyuan Tang]], [[Dong Wang|AUTHOR Dong Wang]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1542–1546
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently deep neural networks (DNNs) have been used to learn speaker features. However, the quality of the learned features is not sufficiently good, so a complex back-end model, either neural or probabilistic, has to be used to address the residual uncertainty when applied to speaker verification. This paper presents a convolutional time-delay deep neural network structure (CT-DNN) for speaker feature learning. Our experimental results on the Fisher database demonstrated that this CT-DNN can produce high-quality speaker features: even with a single feature (0.3 seconds including the context), the EER can be as low as 7.68%. This effectively confirmed that the speaker trait is largely a deterministic short-time property rather than a long-time distributional pattern, and therefore can be extracted from just dozens of frames.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pierre-Michel Bousquet|AUTHOR Pierre-Michel Bousquet]], [[Mickael Rouvier|AUTHOR Mickael Rouvier]]
</p><p class="cpabstractcardaffiliationlist">LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1547–1551
</span></p></div>
<div class="cpabstractcardabstract"><p>Duration mismatch between enrollment and test utterances still remains a major concern for reliability of real-life speaker recognition applications. Two approaches are proposed here to deal with this case when using the i-vector representation. The first one is an adaptation of Gaussian Probabilistic Linear Discriminant Analysis (PLDA) modeling, which can be extended to the case of any shift between i-vectors drawn from two distinct distributions. The second one attempts to map i-vectors of truncated segments of an utterance to the i-vector of the full segment, by the use of deep neural networks (DNN). Our results show that both new approaches outperform the standard PLDA by about 10% relative, noting that these back-end methods could complement those quantifying the i-vector uncertainty during its extraction process, in the case of duration gap.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alan McCree|AUTHOR Alan McCree]], [[Gregory Sell|AUTHOR Gregory Sell]], [[Daniel Garcia-Romero|AUTHOR Daniel Garcia-Romero]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1552–1556
</span></p></div>
<div class="cpabstractcardabstract"><p>Probabilistic Linear Discriminant Analysis (PLDA) continues to be the most effective approach for speaker recognition in the i-vector space. This paper extends the PLDA model to include both enrollment and test cut duration as well as to distinguish between session and channel variability. In addition, we address the task of unsupervised adaptation to unknown new domains in two ways: speaker-dependent PLDA parameters and cohort score normalization using Bayes rule. Experimental results on the NIST SRE16 task show that these principled techniques provide state-of-the-art performance with negligible increase in complexity over a PLDA baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bengt J. Borgström|AUTHOR Bengt J. Borgström]]^^1^^, [[Elliot Singer|AUTHOR Elliot Singer]]^^1^^, [[Douglas Reynolds|AUTHOR Douglas Reynolds]]^^1^^, [[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT Lincoln Laboratory, USA; ^^2^^NIST, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1557–1561
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses speaker verification domain adaptation with inadequate in-domain data. Specifically, we explore the cases where in-domain data sets do not include speaker labels, contain speakers with few samples, or contain speakers with low channel diversity. Existing domain adaptation methods are reviewed, and their shortcomings are discussed. We derive an unsupervised version of fully Bayesian adaptation which reduces the reliance on rich in-domain data. When applied to domain adaptation with inadequate in-domain data, the proposed approach yields competitive results when the samples per speaker are reduced, and outperforms existing supervised methods when the channel diversity is low, even without requiring speaker labels. These results are validated on the NIST SRE16, which uses a highly inadequate in-domain data set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhili Tan|AUTHOR Zhili Tan]], [[Man-Wai Mak|AUTHOR Man-Wai Mak]]
</p><p class="cpabstractcardaffiliationlist">Hong Kong Polytechnic University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1562–1566
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes applying multi-task learning to train deep neural networks (DNNs) for calibrating the PLDA scores of speaker verification systems under noisy environments. To facilitate the DNNs to learn the main task (calibration), several auxiliary tasks were introduced, including the prediction of SNR and duration from i-vectors and classifying whether an i-vector pair belongs to the same speaker or not. The possibility of replacing the PLDA model by a DNN during the scoring stage is also explored. Evaluations on noise contaminated speech suggest that the auxiliary tasks are important for the DNNs to learn the main calibration task and that the uncalibrated PLDA scores are an essential input to the DNNs. Without this input, the DNNs can only predict the score shifts accurately, suggesting that the PLDA model is indispensable.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pavel Matějka|AUTHOR Pavel Matějka]], [[Ondřej Novotný|AUTHOR Ondřej Novotný]], [[Oldřich Plchot|AUTHOR Oldřich Plchot]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Mireia Diez Sánchez|AUTHOR Mireia Diez Sánchez]], [[Jan Černocký|AUTHOR Jan Černocký]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1567–1571
</span></p></div>
<div class="cpabstractcardabstract"><p>NIST Speaker Recognition Evaluation 2016 has revealed the importance of score normalization for mismatched data conditions. This paper analyzes several score normalization techniques for test conditions with multiple languages. The best performing one for a PLDA classifier is an adaptive s-norm with 30% relative improvement over the system without any score normalization. The analysis shows that the adaptive score normalization (using top scoring files per trial) selects cohorts that in 68% contain recordings from the same language and in 92% of the same gender as the enrollment and test recordings. Our results suggest that the data to select score normalization cohorts should be a pool of several languages and channels and if possible, its subset should contain data from the target domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Silnova|AUTHOR Anna Silnova]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1572–1575
</span></p></div>
<div class="cpabstractcardabstract"><p>Just like in other areas of automatic speech processing, feature extraction based on bottleneck neural networks was recently found very effective for the speaker verification task. However, better results are usually reported with more complex neural network architectures (e.g. stacked bottlenecks), which are difficult to reproduce. In this work, we experiment with the so called deep features, which are based on a simple feed-forward neural network architecture. We study various forms of applying deep features to i-vector/PDA based speaker verification. With proper settings, better verification performance can be obtained by means of this simple architecture as compared to the more elaborate bottleneck features. Also, we further experiment with multi-task training, where the neural network is trained for both speaker recognition and senone recognition objectives. Results indicate that, with a careful weighting of the two objectives, multi-task training can result in significantly better performing deep features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruchir Travadi|AUTHOR Ruchir Travadi]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1576–1580
</span></p></div>
<div class="cpabstractcardabstract"><p>The Total Variability Model (TVM) [1] has been widely used in audio signal processing as a framework for capturing differences in feature space distributions across variable length sequences by mapping them into a fixed-dimensional representation. Its formulation requires making an assumption about the source data distribution being a Gaussian Mixture Model (GMM). In this paper, we show that it is possible to arrive at the same model formulation without requiring such an assumption about distribution of the data, by showing asymptotic normality of the statistics used to estimate the model. We highlight some connections between TVM and heteroscedastic Principal Component Analysis (PCA), as well as the matrix completion problem, which lead to a computationally efficient formulation of the Maximum Likelihood estimation problem for the model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gaofeng Cheng|AUTHOR Gaofeng Cheng]]^^1^^, [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]]^^2^^, [[Daniel Povey|AUTHOR Daniel Povey]]^^2^^, [[Vimal Manohar|AUTHOR Vimal Manohar]]^^2^^, [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]^^2^^, [[Yonghong Yan|AUTHOR Yonghong Yan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese Academy of Sciences, China; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1586–1590
</span></p></div>
<div class="cpabstractcardabstract"><p>Long Short-Term Memory networks (LSTMs) are a component of many state-of-the-art DNN-based speech recognition systems. Dropout is a popular method to improve generalization in DNN training. In this paper we describe extensive experiments in which we investigated the best way to combine dropout with LSTMs — specifically, projected LSTMs (LSTMP). We investigated various locations in the LSTM to place the dropout (and various combinations of locations), and a variety of dropout schedules. Our optimized recipe gives consistent improvements in WER across a range of datasets, including Switchboard, TED-LIUM and AMI.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaeyoung Kim|AUTHOR Jaeyoung Kim]], [[Mostafa El-Khamy|AUTHOR Mostafa El-Khamy]], [[Jungwon Lee|AUTHOR Jungwon Lee]]
</p><p class="cpabstractcardaffiliationlist">Samsung Semiconductor, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1591–1595
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a novel architecture for a deep recurrent neural network, residual LSTM is introduced. A plain LSTM has an internal memory cell that can learn long term dependencies of sequential data. It also provides a temporal shortcut path to avoid vanishing or exploding gradients in the temporal domain. The residual LSTM provides an additional spatial shortcut path from lower layers for efficient training of deep networks with multiple LSTM layers. Compared with the previous work, highway LSTM, residual LSTM separates a spatial shortcut path with temporal one by using output layers, which can help to avoid a conflict between spatial and temporal-domain gradient flows. Furthermore, residual LSTM reuses the output projection matrix and the output gate of LSTM to control the spatial information flow instead of additional gate networks, which effectively reduces more than 10% of network parameters. An experiment for distant speech recognition on the AMI SDM corpus shows that 10-layer plain and highway LSTM networks presented 13.7% and 6.2% increase in WER over 3-layer baselines, respectively. On the contrary, 10-layer residual LSTM networks provided the lowest WER 41.0%, which corresponds to 3.3% and 2.8% WER reduction over plain and highway LSTM networks, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dung T. Tran|AUTHOR Dung T. Tran]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Shigeki Karita|AUTHOR Shigeki Karita]], [[Michael Hentschel|AUTHOR Michael Hentschel]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1596–1600
</span></p></div>
<div class="cpabstractcardabstract"><p>Recurrent neural networks (RNNs) with jump ahead connections have been used in the computer vision tasks. Still, they have not been investigated well for automatic speech recognition (ASR) tasks. In other words, unfolded RNN has been shown to be an effective model for acoustic modeling tasks. This paper investigates how to elaborate a sophisticated unfolded deep RNN architecture in which recurrent connections use a convolutional neural network (CNN) to model a short-term dependence between hidden states. In this study, our unfolded RNN architecture is a CNN that process a sequence of input features sequentially. Each time step, the CNN inputs a small block of the input features and the output of the hidden layer from the preceding block in order to compute the output of its hidden layer. In addition, by exploiting either one or multiple jump ahead connections between time steps, our network can learn long-term dependencies more effectively. We carried experiments on the CHiME 3 task showing the effectiveness of our proposed approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shigeki Karita|AUTHOR Shigeki Karita]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1601–1605
</span></p></div>
<div class="cpabstractcardabstract"><p>An automatic speech recognition (ASR) performance has greatly improved with the introduction of convolutional neural network (CNN) or long-short term memory (LSTM) for acoustic modeling. Recently, a convolutional LSTM (CLSTM) has been proposed to directly use convolution operation within the LSTM blocks and combine the advantages of both CNN and LSTM structures into a single architecture. This paper presents the first attempt to use CLSTMs for acoustic modeling. In addition, we propose a new forward-backward architecture to exploit long-term left/right context efficiently. The proposed scheme combines forward and backward LSTMs at different time points of an utterance with the aim of modeling long term frame invariant information such as speaker characteristics, channel etc. Furthermore, the proposed forward-backward architecture can be trained with truncated back-propagation-through-time unlike conventional bidirectional LSTM (BLSTM) architectures. Therefore, we are able to train deeply stacked CLSTM acoustic models, which is practically challenging with conventional BLSTMs. Experimental results show that both CLSTM and forward-backward LSTM improve word error rates significantly compared to standard CNN and LSTM architectures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sercan Ö. Arık|AUTHOR Sercan Ö. Arık]], [[Markus Kliegl|AUTHOR Markus Kliegl]], [[Rewon Child|AUTHOR Rewon Child]], [[Joel Hestness|AUTHOR Joel Hestness]], [[Andrew Gibiansky|AUTHOR Andrew Gibiansky]], [[Chris Fougner|AUTHOR Chris Fougner]], [[Ryan Prenger|AUTHOR Ryan Prenger]], [[Adam Coates|AUTHOR Adam Coates]]
</p><p class="cpabstractcardaffiliationlist">Baidu Research, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1606–1610
</span></p></div>
<div class="cpabstractcardabstract"><p>Keyword spotting (KWS) constitutes a major component of human-technology interfaces. Maximizing the detection accuracy at a low false alarm (FA) rate, while minimizing the footprint size, latency and complexity are the goals for KWS. Towards achieving them, we study Convolutional Recurrent Neural Networks (CRNNs). Inspired by large-scale state-of-the-art speech recognition systems, we combine the strengths of convolutional layers and recurrent layers to exploit local structure and long-range context. We analyze the effect of architecture parameters, and propose training strategies to improve performance. With only ~230k parameters, our CRNN model yields acceptably low latency, and achieves 97.71% accuracy at 0.5 FA/hour for 5 dB signal-to-noise ratio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunyang Wu|AUTHOR Chunyang Wu]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1611–1615
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep learning approaches achieve state-of-the-art performance in a range of applications, including speech recognition. However, the parameters of the deep neural network (DNN) are hard to interpret, which makes regularisation and adaptation to speaker or acoustic conditions challenging. This paper proposes the deep activation mixture model (DAMM) to address these problems. The output of one hidden layer is modelled as the sum of a mixture and residual models. The mixture model forms an activation function contour while the residual one models fluctuations around the contour. The use of the mixture model gives two advantages: First, it introduces a novel regularisation on the DNN. Second, it allows novel adaptation schemes. The proposed approach is evaluated on a large-vocabulary U.S. English broadcast news task. It yields a slightly better performance than the DNN baselines, and on the utterance-level unsupervised adaptation, the adapted DAMM acquires further performance gains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Heck|AUTHOR Michael Heck]]^^1^^, [[Masayuki Suzuki|AUTHOR Masayuki Suzuki]]^^1^^, [[Takashi Fukuda|AUTHOR Takashi Fukuda]]^^1^^, [[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^, [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1616–1620
</span></p></div>
<div class="cpabstractcardabstract"><p>We present our work on constructing multi-scale deep convolutional neural networks for automatic speech recognition. Several VGG nets have been trained that differ solely in the kernel size of the convolutional layers. The general idea is that receptive fields of varying sizes match structures of different scales, thus supporting more robust recognition when combined appropriately. We construct a large multi-scale system by means of system combination. We use ROVER and the fusion of posterior predictions as examples of late combination, and knowledge distillation using soft labels from a model ensemble as a way of early combination. In this work, distillation is approached from the perspective of knowledge transfer pre-training, which is followed by a fine-tuning on the original hard labels. Our results show that it is possible to bundle the individual recognition strengths of the VGGs in a much simpler CNN architecture that yields equal performance with the best late combination.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tamás Grósz|AUTHOR Tamás Grósz]]^^1^^, [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^1^^, [[László Tóth|AUTHOR László Tóth]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Szeged, Hungary; ^^2^^MTA-SZTE RGAI, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1621–1625
</span></p></div>
<div class="cpabstractcardabstract"><p>In current HMM/DNN speech recognition systems, the purpose of the DNN component is to estimate the posterior probabilities of tied triphone states. In most cases the distribution of these states is uneven, meaning that we have a markedly different number of training samples for the various states. This imbalance of the training data is a source of suboptimality for most machine learning algorithms, and DNNs are no exception. A straightforward solution is to re-sample the data, either by upsampling the rarer classes or by downsampling the more common classes. Here, we experiment with the so-called probabilistic sampling method that applies downsampling and upsampling at the same time. For this, it defines a new class distribution for the training data, which is a linear combination of the original and the uniform class distributions. As an extension to previous studies, we propose a new method to re-estimate the class priors, which is required to remedy the mismatch between the training and the test data distributions introduced by re-sampling. Using probabilistic sampling and the proposed modification we report 5% and 6% relative error rate reductions on the TED-LIUM and on the AMI corpora, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tamás Grósz|AUTHOR Tamás Grósz]]^^1^^, [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^1^^, [[László Tóth|AUTHOR László Tóth]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Szeged, Hungary; ^^2^^MTA-SZTE RGAI, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1626–1630
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep neural network (DNN) based speech recognizers have recently replaced Gaussian mixture (GMM) based systems as the state-of-the-art. While some of the modeling techniques developed for the GMM based framework may directly be applied to HMM/DNN systems, others may be inappropriate. One such example is the creation of context-dependent tied states, for which an efficient decision tree state tying method exists. The tied states used to train DNNs are usually obtained using the same tying algorithm, even though it is based on likelihoods of Gaussians, hence it is more appropriate for HMM/GMMs. Recently, however, several refinements have been published which seek to adapt the state tying algorithm to the HMM/DNN hybrid architecture. Unfortunately, these studies reported results on different (and sometimes very small) datasets, which does not allow their direct comparison. Here, we tested four of these methods on the same LVCSR task, and compared their performance under the same circumstances. We found that, besides changing the input of the context-dependent state tying algorithm, it is worth adjusting the tying criterion as well. The methods which utilized a decision criterion designed directly for neural networks consistently, and significantly, outperformed those which employed the standard Gaussian-based algorithm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yiming Wang|AUTHOR Yiming Wang]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Hainan Xu|AUTHOR Hainan Xu]], [[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1631–1635
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we describe a modification to Stochastic Gradient Descent (SGD) that improves generalization to unseen data. It consists of doing two steps for each minibatch: a backward step with a small negative learning rate, followed by a forward step with a larger learning rate. The idea was initially inspired by ideas from adversarial training, but we show that it can be viewed as a crude way of canceling out certain systematic biases that come from training on finite data sets. The method gives ~ 10% relative improvement over our best acoustic models based on lattice-free MMI, across multiple datasets with 100–300 hours of data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryu Takeda|AUTHOR Ryu Takeda]]^^1^^, [[Kazuhiro Nakadai|AUTHOR Kazuhiro Nakadai]]^^2^^, [[Kazunori Komatani|AUTHOR Kazunori Komatani]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Osaka University, Japan; ^^2^^Honda Research Institute Japan, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1636–1640
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a node-pruning method for an acoustic model based on deep neural networks (DNNs). Node pruning is a promising method to reduce the memory usage and computational cost of DNNs. A score function is defined to measure the importance of each node, and less important nodes are pruned. The entropy of the activity of each node has been used as a score function to find nodes with outputs that do not change at all. We introduce entropy of weights of each node to consider the number of weights and their patterns of each node. Because the number of weights and the patterns differ at each layer, the importance of the node should also be measured using the related weights of the target node. We then propose a score function that integrates the entropy of weights and node activity, which will prune less important nodes more efficiently. Experimental results showed that the proposed pruning method successfully reduced the number of parameters by about 6% without any accuracy loss compared with a score function based only on the entropy of node activity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ehsan Variani|AUTHOR Ehsan Variani]], [[Tom Bagby|AUTHOR Tom Bagby]], [[Erik McDermott|AUTHOR Erik McDermott]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1641–1645
</span></p></div>
<div class="cpabstractcardabstract"><p>This article discusses strategies for end-to-end training of state-of-the-art acoustic models for Large Vocabulary Continuous Speech Recognition (LVCSR), with the goal of leveraging TensorFlow components so as to make efficient use of large-scale training sets, large model sizes, and high-speed computation units such as Graphical Processing Units (GPUs). Benchmarks are presented that evaluate the efficiency of different approaches to batching of training data, unrolling of recurrent acoustic models, and device placement of TensorFlow variables and operations. An overall training architecture developed in light of those findings is then described. The approach makes it possible to take advantage of both data parallelism and high speed computation on GPU for state-of-the-art sequence training of acoustic models. The effectiveness of the design is evaluated for different training schemes and model sizes, on a 15,000 hour Voice Search task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Arun Narayanan|AUTHOR Arun Narayanan]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1646–1650
</span></p></div>
<div class="cpabstractcardabstract"><p>The forward-backward algorithm is commonly used to train neural network acoustic models when optimizing a sequence objective like MMI and sMBR. Recent work on lattice-free MMI training of neural network acoustic models shows that the forward-backward algorithm can be computed efficiently in the probability domain as a series of sparse matrix multiplications using GPUs. In this paper, we present a more efficient way of computing forward-backward using a dense matrix multiplication approach. We do this by exploiting the block-diagonal structure of the n-gram state transition matrix; instead of multiplying large sparse matrices, the proposed method involves a series of smaller dense matrix multiplications, which can be computed in parallel. Efficient implementation can be easily achieved by leveraging on the optimized matrix multiplication routines provided by standard libraries, such as NumPy and TensorFlow. Runtime benchmarks show that the dense multiplication method is consistently faster than the sparse multiplication method (on both CPUs and GPUs), when applied to a 4-gram phone language model. This is still the case even when the sparse multiplication method uses a more compact finite state model representation by excluding unseen n-grams.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1651–1655
</span></p></div>
<div class="cpabstractcardabstract"><p>The combination of acoustic models or features is a standard approach to exploit various knowledge sources. This paper investigates the concatenation of different bottleneck (BN) neural network (NN) outputs for tandem acoustic modeling. Thus, combination of NN features is performed via Gaussian mixture models (GMM). Complementarity between the NN feature representations is attained by using various network topologies: LSTM recurrent, feed-forward, and hierarchical, as well as different non-linearities: hyperbolic tangent, sigmoid, and rectified linear units. Speech recognition experiments are carried out on various tasks: telephone conversations, Skype calls, as well as broadcast news and conversations. Results indicate that LSTM based tandem approach is still competitive, and such tandem model can challenge comparable hybrid systems. The traditional steps of tandem modeling, speaker adaptive and sequence discriminative GMM training, improve the tandem results further. Furthermore, these “old-fashioned” steps remain applicable after the concatenation of multiple neural network feature streams. Exploiting the parallel processing of input feature streams, it is shown that 2–5% relative improvement could be achieved over the single best BN feature set. Finally, we also report results after neural network based language model rescoring and examine the system combination possibilities using such complex tandem models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qingming Tang|AUTHOR Qingming Tang]], [[Weiran Wang|AUTHOR Weiran Wang]], [[Karen Livescu|AUTHOR Karen Livescu]]
</p><p class="cpabstractcardaffiliationlist">TTIC, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1656–1660
</span></p></div>
<div class="cpabstractcardabstract"><p>We study the problem of acoustic feature learning in the setting where we have access to another (non-acoustic) modality for feature learning but not at test time. We use deep variational canonical correlation analysis (VCCA), a recently proposed deep generative method for multi-view representation learning. We also extend VCCA with improved latent variable priors and with adversarial learning. Compared to other techniques for multi-view feature learning, VCCA’s advantages include an intuitive latent variable interpretation and a variational lower bound objective that can be trained end-to-end efficiently. We compare VCCA and its extensions with previous feature learning methods on the University of Wisconsin X-ray Microbeam Database, and show that VCCA-based feature learning improves over previous methods for speaker-independent phonetic recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryo Masumura|AUTHOR Ryo Masumura]], [[Taichi Asami|AUTHOR Taichi Asami]], [[Hirokazu Masataki|AUTHOR Hirokazu Masataki]], [[Ryo Ishii|AUTHOR Ryo Ishii]], [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1661–1665
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a novel modeling called stacked time-asynchronous sequential networks (STASNs) for online end-of-turn detection. An online end-of-turn detection that determines turn-taking points in a real-time manner is an essential component for human-computer interaction systems. In this study, we use long-range sequential information of multiple time-asynchronous sequential features, such as prosodic, phonetic, and lexical sequential features, to enhance online end-of-turn detection performance. Our key idea is to embed individual sequential features in a fixed-length continuous representation by using sequential networks. This enables us to simultaneously handle multiple time-asynchronous sequential features for end-of-turn detection. STASNs can embed all of the sequential information between a start-of-conversation and the current end-of-utterance in a fixed-length continuous representation that can be directly used for classification by stacking multiple sequential networks. Experiments show that STASNs outperforms conventional modeling with limited sequential information. Furthermore, STASNs with senone bottleneck features extracted using senone-based deep neural networks have superior performance without requiring lexical features decoded by an automatic speech recognition process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Paul Crook|AUTHOR Paul Crook]], [[Alex Marin|AUTHOR Alex Marin]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1706–1710
<a href="./IS2017/MEDIA/0161" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>User simulators are a principal offline method for training and evaluating human-computer dialog systems. In this paper, we examine simple sequence-to-sequence neural network architectures for training end-to-end, natural language to natural language, user simulators, using only raw logs of previous interactions without any additional human labelling. We compare the neural network-based simulators with a language model (LM)-based approach for creating natural language user simulators. Using both an automatic evaluation using LM perplexity and a human evaluation, we demonstrate that the sequence-to-sequence approaches outperform the LM-based method. We show correlation between LM perplexity and the human evaluation on this task, and discuss the benefits of different neural network architecture variations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Patrick L. Lange|AUTHOR Patrick L. Lange]], [[Keelan Evanini|AUTHOR Keelan Evanini]], [[Hillary R. Molloy|AUTHOR Hillary R. Molloy]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1711–1715
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a spoken dialog-based framework for the computer-assisted language learning (CALL) of conversational English. In particular, we leveraged the open-source HALEF dialog framework to develop a job interview conversational application. We then used crowdsourcing to collect multiple interactions with the system from non-native English speakers. We analyzed human-rated scores of the recorded dialog data on three different scoring dimensions critical to the delivery of conversational English — fluency, pronunciation and intonation/stress — and further examined the efficacy of automatically-extracted, hand-curated speech features in predicting each of these sub-scores. Machine learning experiments showed that trained scoring models generally perform at par with the human inter-rater agreement baseline in predicting human-rated scores of conversational proficiency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Atsushi Ando|AUTHOR Atsushi Ando]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Hosana Kamiyama|AUTHOR Hosana Kamiyama]], [[Satoshi Kobashikawa|AUTHOR Satoshi Kobashikawa]], [[Yushi Aono|AUTHOR Yushi Aono]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1716–1720
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a joint modeling of both turn-level and call-level customer satisfaction in contact center dialogue. Our key idea is to directly apply turn-level estimation results to call-level estimation and optimize them jointly; previous work treated both estimations as being independent. Proposed joint modeling is achieved by stacking two types of long short-term memory recurrent neural networks (LSTM-RNNs). The lower layer employs LSTM-RNN for sequential labeling of turn-level customer satisfaction in which each label is estimated from context information extracted from not only the target turn but also the surrounding turns. The upper layer uses another LSTM-RNN to estimate call-level customer satisfaction labels from all information of estimated turn-level customer satisfaction. These two networks can be efficiently optimized by joint learning of both types of labels. Experiments show that the proposed method outperforms a conventional support vector machine based method in terms of both turn-level and call-level customer satisfaction with relative error reductions of over 20%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Stefan Ultes|AUTHOR Stefan Ultes]], [[Paweł Budzianowski|AUTHOR Paweł Budzianowski]], [[Iñigo Casanueva|AUTHOR Iñigo Casanueva]], [[Nikola Mrkšić|AUTHOR Nikola Mrkšić]], [[Lina Rojas-Barahona|AUTHOR Lina Rojas-Barahona]], [[Pei-Hao Su|AUTHOR Pei-Hao Su]], [[Tsung-Hsien Wen|AUTHOR Tsung-Hsien Wen]], [[Milica Gašić|AUTHOR Milica Gašić]], [[Steve Young|AUTHOR Steve Young]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1721–1725
</span></p></div>
<div class="cpabstractcardabstract"><p>Learning suitable and well-performing dialogue behaviour in statistical spoken dialogue systems has been in the focus of research for many years. While most work which is based on reinforcement learning employs an objective measure like task success for modelling the reward signal, we propose to use a reward based on user satisfaction. We will show in simulated experiments that a live user satisfaction estimation model may be applied resulting in higher estimated satisfaction whilst achieving similar success rates. Moreover, we will show that one satisfaction estimation model which has been trained on one domain may be applied in many other domains which cover a similar task. We will verify our findings by employing the model to one of the domains for learning a policy from real users and compare its performance to policies using the user satisfaction and task success acquired directly from the users as reward.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shizuka Nakamura|AUTHOR Shizuka Nakamura]], [[Ryosuke Nakanishi|AUTHOR Ryosuke Nakanishi]], [[Katsuya Takanashi|AUTHOR Katsuya Takanashi]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]
</p><p class="cpabstractcardaffiliationlist">Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1726–1730
</span></p></div>
<div class="cpabstractcardabstract"><p>Fillers are involved in the ease of understanding by listeners and turn-taking. However, the knowledge about its prosodic features is insufficient, and its modeling has not been done either. For these reasons, there is insufficient knowledge to generate natural and appropriate fillers in a dialog system at present. Therefore, for the purpose of clarifying the prosodic features of fillers, its relationship with occurrence positions or forms were analyzed in this research. ‘Ano’ and ‘Eto’ were used as forms, non-/boundary of Dialog Act and non-/turn-taking for occurrence positions. Duration, F0, and intensity were utilized as prosodic features. As a result, the followings were found out: the prosodic features are different depending on the difference of the occurrence positions even for fillers of the same form, and similar prosodic features are found between the same occurrence positions even in different forms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Syeda Narjis Fatima|AUTHOR Syeda Narjis Fatima]], [[Engin Erzin|AUTHOR Engin Erzin]]
</p><p class="cpabstractcardaffiliationlist">Koç Üniversitesi, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1731–1735
</span></p></div>
<div class="cpabstractcardabstract"><p>Dyadic interactions encapsulate rich emotional exchange between interlocutors suggesting a multimodal, cross-speaker and cross-dimensional continuous emotion dependency. This study explores the dynamic inter-attribute emotional dependency at the cross-subject level with implications to continuous emotion recognition based on speech and body motion cues. We propose a novel two-stage Gaussian Mixture Model mapping framework for the continuous emotion recognition problem. In the first stage, we perform continuous emotion recognition (CER) of both speakers from speech and body motion modalities to estimate activation, valence and dominance (AVD) attributes. In the second stage, we improve the first stage estimates by performing CER of the selected speaker using her/his speech and body motion modalities as well as using the estimated affective attribute(s) of the other speaker. Our experimental evaluations indicate that the second stage, cross-subject continuous emotion recognition (CSCER), provides complementary information to recognize the affective state, and delivers promising improvements for the continuous emotion recognition problem.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]^^1^^, [[Kornel Laskowski|AUTHOR Kornel Laskowski]]^^2^^, [[Mattias Heldner|AUTHOR Mattias Heldner]]^^1^^, [[Kätlin Aare|AUTHOR Kätlin Aare]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stockholm University, Sweden; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1666–1670
</span></p></div>
<div class="cpabstractcardabstract"><p>One consequence of situated face-to-face conversation is the co-observability of participants’ respiratory movements and sounds. We explore whether this information can be exploited in predicting incipient speech activity. Using a methodology called stochastic turn-taking modeling, we compare the performance of a model trained on speech activity alone to one additionally trained on static and dynamic lung volume features. The methodology permits automatic discovery of temporal dependencies across participants and feature types. Our experiments show that respiratory information substantially lowers cross-entropy rates, and that this generalizes to unseen data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peter A. Heeman|AUTHOR Peter A. Heeman]], [[Rebecca Lunsford|AUTHOR Rebecca Lunsford]]
</p><p class="cpabstractcardaffiliationlist">Oregon Health & Science University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1671–1675
</span></p></div>
<div class="cpabstractcardabstract"><p>A number of researchers have studied turn-taking offsets in human-human dialogues. However, that work collapses over a wide number of different turn-taking contexts. In this work, we delve into the turn-taking delays based on different contexts. We show that turn-taking behavior, both who tends to take the turn next, and the turn-taking delays, are dependent on the previous speech act type, the upcoming speech act, and the nature of the dialogue. This strongly suggests that in studying turn-taking, all turn-taking events should not be grouped together. This also suggests that delays are due to cognitive processing of what to say, rather than whether a speaker should take the turn.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Angelika Maier|AUTHOR Angelika Maier]], [[Julian Hough|AUTHOR Julian Hough]], [[David Schlangen|AUTHOR David Schlangen]]
</p><p class="cpabstractcardaffiliationlist">Universität Bielefeld, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1676–1680
</span></p></div>
<div class="cpabstractcardabstract"><p>We address the challenge of improving live end-of-turn detection for situated spoken dialogue systems. While traditionally silence thresholds have been used to detect the user’s end-of-turn, such an approach limits the system’s potential fluidity in interaction, restricting it to a purely reactive paradigm. By contrast, here we present a system which takes a predictive approach. The user’s end-of-turn is predicted live as acoustic features and words are consumed by the system. We compare the benefits of live lexical and acoustic information by feature analysis and testing equivalent models with different feature sets with a common deep learning architecture, a Long Short-Term Memory (LSTM) network. We show the usefulness of incremental enriched language model features in particular. Training and testing onWizard-of-Oz data collected to train an agent in a simple virtual world, we are successful in improving over a reactive baseline in terms of reducing latency whilst minimising the cut-in rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuichi Ishimoto|AUTHOR Yuichi Ishimoto]]^^1^^, [[Takehiro Teraoka|AUTHOR Takehiro Teraoka]]^^2^^, [[Mika Enomoto|AUTHOR Mika Enomoto]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NINJAL, Japan; ^^2^^Tokyo University of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1681–1685
</span></p></div>
<div class="cpabstractcardabstract"><p>This study is aimed at uncovering a way that participants in conversation predict end-of-utterance for spontaneous Japanese speech. In spontaneous everyday conversation, the participants must predict the ends of utterances of a speaker to perform smooth turn-taking without too much gap. We consider that they utilize not only syntactic factors but also prosodic factors for the end-of-utterance prediction because of the difficulty of prediction of a syntactic completion point in spontaneous Japanese. In previous studies, we found that prosodic features changed significantly in the final accentual phrase. However, it is not clear what prosodic features support the prediction. In this paper, we focused on dependency structure among bunsetsu-phrases as the syntactic factor, and investigated the relation between the phrase-dependency and prosodic features. The results showed that the average fundamental frequency and the average intensity for accentual phrases did not decline until the modified phrase appeared. Next, to predict the end of utterance from the syntactic and prosodic features, we constructed a generalized linear mixed model. The model provided higher accuracy than using the prosodic features only. These suggest the possibility that prosodic changes and phrase-dependency relations inform the hearer that the utterance is approaching its end.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chaoran Liu|AUTHOR Chaoran Liu]], [[Carlos Ishi|AUTHOR Carlos Ishi]], [[Hiroshi Ishiguro|AUTHOR Hiroshi Ishiguro]]
</p><p class="cpabstractcardaffiliationlist">ATR HIL, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1686–1690
</span></p></div>
<div class="cpabstractcardabstract"><p>A natural conversation involves rapid exchanges of turns while talking. Taking turns at appropriate timing or intervals is a requisite feature for a dialog system as a conversation partner. This paper proposes a model that estimates the timing of turn-taking during verbal interactions. Unlike previous studies, our proposed model does not rely on a silence region between sentences since a dialog system must respond without large gaps or overlaps. We propose a Recurrent Neural Network (RNN) based model that takes the joint embedding of lexical and prosodic contents as its input to classify utterances into turn-taking related classes and estimates the turn-taking timing. To this end, we trained a neural network to embed the lexical contents, the fundamental frequencies, and the speech power into a joint embedding space. To learn meaningful embedding spaces, the prosodic features from each single utterance are pre-trained using RNN and combined with utterance lexical embedding as the input of our proposed model. We tested this model on a spontaneous conversation dataset and confirmed that it outperformed the use of word embedding-based features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Koji Inoue|AUTHOR Koji Inoue]], [[Masato Mimura|AUTHOR Masato Mimura]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]
</p><p class="cpabstractcardaffiliationlist">Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1691–1695
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-verbal speech cues such as laughter and fillers, which are collectively called social signals, play an important role in human communication. Therefore, detection of them would be useful for dialogue systems to infer speaker’s intentions, emotions and engagements. The conventional approaches are based on frame-wise classifiers, which require precise time-alignment of these events for training. This work investigates the Connectionist Temporal Classification (CTC) approach which can learn an alignment between the input and its target label sequence. This allows for robust detection of the events and efficient training without precise time information. Experimental evaluations with various settings demonstrate that CTC based on bidirectional LSTM outperforms the conventional DNN and HMM based methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zahra Rahimi|AUTHOR Zahra Rahimi]]^^1^^, [[Anish Kumar|AUTHOR Anish Kumar]]^^1^^, [[Diane Litman|AUTHOR Diane Litman]]^^1^^, [[Susannah Paletz|AUTHOR Susannah Paletz]]^^2^^, [[Mingzhi Yu|AUTHOR Mingzhi Yu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Pittsburgh, USA; ^^2^^University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1696–1700
</span></p></div>
<div class="cpabstractcardabstract"><p>Linguistic entrainment, the phenomena whereby dialogue partners speak more similarly to each other in a variety of dimensions, is key to the success and naturalness of interactions. While there is considerable evidence for both lexical and acoustic-prosodic entrainment, little work has been conducted to investigate the relationship between these two different modalities using the same measures in the same dialogues, specifically in multi-party dialogue. In this paper, we measure lexical and acoustic-prosodic entrainment for multi-party teams to explore whether entrainment occurs at multiple levels during conversation and to understand the relationship between these two modalities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Justine Reverdy|AUTHOR Justine Reverdy]], [[Carl Vogel|AUTHOR Carl Vogel]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1701–1705
</span></p></div>
<div class="cpabstractcardabstract"><p>In many contexts from casual everyday conversations to formal discussions, people tend to repeat their interlocutors, and themselves. This phenomenon not only yields random repetitions one might expect from a natural Zipfian distribution of linguistic forms, but also projects underlying discourse mechanisms and rhythms that researchers have suggested establishes conversational involvement and may support communicative progress towards mutual understanding. In this paper, advances in an automated method for assessing interlocutor synchrony in task-based Human-to-Human interactions are reported. The method focuses on dialogue structure, rather than temporal distance, measuring repetition between speakers and their interlocutors last n-turns (n = 1, however far back in the conversation that might have been) rather than utterances during a prior window fixed by duration. The significance of distinct linguistic levels of repetition are assessed by observing contrasts between actual and randomized dialogues, in order to provide a quantifying measure of communicative success. Definite patterns of repetitions where identified, notably in contrasting the role of participants (as information giver or follower). The extent to which those interacted sometime surprisingly with gender, eye-contact and familiarity is the principal contribution of this work.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Micha Elsner|AUTHOR Micha Elsner]], [[Kiwako Ito|AUTHOR Kiwako Ito]]
</p><p class="cpabstractcardaffiliationlist">Ohio State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1736–1740
</span></p></div>
<div class="cpabstractcardabstract"><p>Forced alignment would enable phonetic analyses of child directed speech (CDS) corpora which have existing transcriptions. But existing alignment systems are inaccurate due to the atypical phonetics of CDS. We adapt a Kaldi forced alignment system to CDS by extending the dictionary and providing it with heuristically-derived hints for vowel locations. Using this system, we present a new time-aligned CDS corpus with a million aligned segments. We manually correct a subset of the corpus and demonstrate that our system is 70% accurate. Both our automatic and manually corrected alignments are publically available at ‘osf.io/ke44q‘.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gintarė Grigonytė|AUTHOR Gintarė Grigonytė]]^^1^^, [[Gerold Schneider|AUTHOR Gerold Schneider]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stockholm University, Sweden; ^^2^^Universität Zürich, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1779–1783
</span></p></div>
<div class="cpabstractcardabstract"><p>We use n-gram language models to investigate how far language approximates an optimal code for human communication in terms of Information Theory [1], and what differences there are between Learner proficiency levels. Although the language of lower level learners is simpler, it is less optimal in terms of information theory, and as a consequence more difficult to process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adriana Hanulíková|AUTHOR Adriana Hanulíková]]^^1^^, [[Jenny Ekström|AUTHOR Jenny Ekström]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Albert-Ludwigs-Universität Freiburg, Germany; ^^2^^University of Stockholm, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1784–1788
</span></p></div>
<div class="cpabstractcardabstract"><p>Listeners usually adjust rapidly to unfamiliar regional and foreign accents in their native (L1) language. Non-native (L2) listeners, however, usually struggle when confronted with unfamiliar accents in their non-native language. The present study asks how native language background of L2 speakers influences lexical adjustments in a novel accent of German, in which several vowels were systematically lowered. We measured word judgments on a lexical decision task before and after exposure to a 15-min story in the novel dialect, and compared German, Swedish and Finnish listeners’ performance. Swedish is a Germanic language and shares with German a number of lexical roots and a relatively large vowel inventory. Finnish is a Finno-Ugric language and differs substantially from Germanic languages in both lexicon and phonology. The results were as predicted: descriptively, all groups showed a similar pattern of adaptation to the accented speech, but only German and Swedish participants showed a significant effect. Lexical and phonological relatedness between the native and non-native languages may thus positively influence lexical adaptation in an unfamiliar accent.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alejandra Keidel Fernández|AUTHOR Alejandra Keidel Fernández]], [[Thomas Hörberg|AUTHOR Thomas Hörberg]]
</p><p class="cpabstractcardaffiliationlist">Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1789–1793
</span></p></div>
<div class="cpabstractcardabstract"><p>Third language (L3) acquisition differs from first language (L1) and second language (L2) acquisition. There are different views on whether L1 or L2 is of primary influence on L3 acquisition in terms of transfer. This study examines differences in the event-related brain potentials (ERP) response to agreement incongruencies between L1 Spanish speakers and L3 Spanish learners, comparing response differences to incongruencies that are transferrable from the learners’ L1 (Swedish), or their L2 (English). Whereas verb incongruencies, available in L3 learners’ L2 but not their L1, engendered a similar response for L1 speakers and L3 learners, adjective incongruencies, available in L3 learners’ L1 but not their L2, elicited responses that differed between groups: Adjective incongruencies engendered a negativity in the 450–550 ms time window for L1 speakers only. Both congruent and incongruent adjectives also engendered an enhanced P3 wave in L3 learners compared to L1 speakers. Since the P300 correlates with task-related, strategic processing, this indicates that L3 learners process grammatical features that are transferrable from their L1 in a less automatic mode than features that are transferrable from their L2. L3 learners therefore seem to benefit more from their knowledge of their L2 than their knowledge of their L1.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Johan Sjons|AUTHOR Johan Sjons]]^^1^^, [[Thomas Hörberg|AUTHOR Thomas Hörberg]]^^1^^, [[Robert Östling|AUTHOR Robert Östling]]^^1^^, [[Johannes Bjerva|AUTHOR Johannes Bjerva]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stockholm University, Sweden; ^^2^^Rijksuniversiteit Groningen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1794–1798
</span></p></div>
<div class="cpabstractcardabstract"><p>In earlier work, we have shown that articulation rate in Swedish child-directed speech (CDS) increases as a function of the age of the child, even when utterance length and differences in articulation rate between subjects are controlled for. In this paper we show on utterance level in spontaneous Swedish speech that i) for the youngest children, articulation rate in CDS is lower than in adult-directed speech (ADS), ii) there is a significant negative correlation between articulation rate and surprisal (the negative log probability) in ADS, and iii) the increase in articulation rate in Swedish CDS as a function of the age of the child holds, even when surprisal along with utterance length and differences in articulation rate between speakers are controlled for. These results indicate that adults adjust their articulation rate to make it fit the linguistic capacity of the child.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kaile Zhang|AUTHOR Kaile Zhang]], [[Gang Peng|AUTHOR Gang Peng]]
</p><p class="cpabstractcardaffiliationlist">Hong Kong Polytechnic University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1799–1803
</span></p></div>
<div class="cpabstractcardabstract"><p>To further investigate the relationship between non-native tone perception and production, the present study trained Mandarin speakers to learn Cantonese lexical tones with a speech shadowing paradigm. After two weeks’ training, both Mandarin speakers’ Cantonese tone perception and their production had improved significantly. The overall performances in Cantonese tone perception and production are moderately correlated, but the degree of performance change after training among the two modalities shows no correlation, suggesting that non-native tone perception and production might be partially correlated, but that the improvement of the two modalities is not synchronous. A comparison between the present study and previous studies on non-native tone learning indicates that experience in lexical tone processing might be important in forming the correlation between tone perception and production. Mandarin speakers showed greater improvement in Cantonese tone perception than in production after training, indicating that second language (L2) perception might precede production. Besides, both the first language (L1) and L2 tonal systems showed an influence on Mandarin speakers’ learning of Cantonese tones.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ellen Marklund|AUTHOR Ellen Marklund]], [[Elísabet Eir Cortes|AUTHOR Elísabet Eir Cortes]], [[Johan Sjons|AUTHOR Johan Sjons]]
</p><p class="cpabstractcardaffiliationlist">Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1804–1808
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of the present study is to further the understanding of the relationship between perceptual categorization and exposure to different frequency distributions of sounds. Previous studies have shown that speech sound discrimination proficiency is influenced by exposure to different distributions of speech sound continua varying along one or several acoustic dimensions, both in adults and in infants. In the current study, adults were presented with either a bimodal or a unimodal frequency distribution of spectrally rotated sounds along a continuum (a vowel continuum before rotation). Categorization of the sounds, quantified as amplitude of the event-related potential (ERP) component mismatch negativity (MMN) in response to two of the sounds, was measured before and after exposure. It was expected that the bimodal group would have a larger MMN amplitude after exposure whereas the unimodal group would have a smaller MMN amplitude after exposure. Contrary to expectations, the MMN amplitude was smaller overall after exposure, and no difference was found between groups. This suggests that either the previously reported sensitivity to frequency distributions of speech sounds is not present for non-speech sounds, or the MMN amplitude is not a sensitive enough measure of categorization to detect an influence from passive exposure, or both.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ocke-Schwen Bohn|AUTHOR Ocke-Schwen Bohn]], [[Trine Askjær-Jørgensen|AUTHOR Trine Askjær-Jørgensen]]
</p><p class="cpabstractcardaffiliationlist">Aarhus University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1741–1744
</span></p></div>
<div class="cpabstractcardabstract"><p>The present study used a sentence verification task to assess the processing cost involved in native Danish listeners’ attempts to comprehend true/false statements spoken in Danish, Norwegian, Swedish, and English. Three groups of native Danish listeners heard 40 sentences each which were translation equivalents, and assessed the truth value of these statements. Group 1 heard sentences in Danish and Norwegian, Group 2 in Danish and Swedish, and Group 3 in Danish and English. Response time and proportion of correct responses were used as indices of processing cost. Both measures indicate that the processing cost for native Danish listeners in comprehending Danish and English statements is equivalent, whereas Norwegian and Swedish statements incur a much higher cost, both in terms of response time and correct assessments. The results are discussed with regard to the costs of inter-Scandinavian and English lingua franca communication.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Felicitas Kleber|AUTHOR Felicitas Kleber]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1745–1749
</span></p></div>
<div class="cpabstractcardabstract"><p>This study is part of a larger project investigating the acquisition of stable vowel-plus-consonant timing patterns needed to convey the phonemic vowel length and the voicing contrast in German. The research is motivated by findings showing greater temporal variability in children until the age of 12. The specific aims of the current study were to test (1) whether temporal variability in the production of the vowel length contrast decreases with increasing age (in general and more so when the variability is speech rate induced) and (2) whether duration cues are perceived more categorically with increasing age. Production and perception data were obtained from eleven preschool, five school children and eleven adults. Results revealed that children produce the quantity contrast with temporal patterns that are similar to adults’ patterns, although vowel duration was overall longer and variability slightly higher in faster speech and younger children. Apart from that, the two groups of children did not differ in production. In perception, however, school children’s response patterns to a continuum from a long vowel to a short vowel word were in between those of adults and preschool children. Findings are discussed with respect to motor control and phonemic abstraction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Patrick F. Reidy|AUTHOR Patrick F. Reidy]]^^1^^, [[Mary E. Beckman|AUTHOR Mary E. Beckman]]^^2^^, [[Jan Edwards|AUTHOR Jan Edwards]]^^3^^, [[Benjamin Munson|AUTHOR Benjamin Munson]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^Ohio State University, USA; ^^3^^University of Maryland, USA; ^^4^^University of Minnesota, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1750–1754
</span></p></div>
<div class="cpabstractcardabstract"><p>Both perceptual and acoustic studies of children’s speech independently suggest that phonological contrasts are continuously refined during acquisition. This paper considers two traditional acoustic features for the ‘s’-vs.-‘sh’ contrast (centroid and peak frequencies) and a novel feature learned from data, evaluating these features relative to perceptual ratings of children’s productions.
Productions of sibilant fricatives were elicited from 16 adults and 69 preschool children. A second group of adults rated the children’s productions on a visual analog scale (VAS). Each production was rated by multiple listeners; mean VAS score for each production was used as its perceptual goodness rating. For each production from the repetition task, a psychoacoustic spectrum was estimated by passing it through a filter bank that modeled the auditory periphery. From these spectra centroid and peak frequencies were computed, two traditional features for a sibilant fricative’s place of articulation. A novel acoustic measure was derived by inputting the spectra to a graph-based dimensionality-reduction algorithm.
Simple regression analyses indicated that a greater amount of variance in the VAS scores was explained by the novel feature (adjusted R^2 = 0.569) than by either centroid (adjusted R^2 = 0.468) or peak frequency (adjusted R^2 = 0.254).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yujia Xiao|AUTHOR Yujia Xiao]]^^1^^, [[Frank K. Soong|AUTHOR Frank K. Soong]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SCUT, China; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1755–1759
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate how to assess the prosody quality of an ESL learner’s spoken sentence against native speaker’s natural recording or TTS synthesized voice. A spoken English utterance read by an ESL leaner is compared with the recording of a native speaker, or TTS voice. The corresponding F0 contours (with voicings) and breaks are compared at the mapped syllable level via a DTW. The correlations between the prosody patterns of learner and native speaker (or TTS voice) of the same sentence are computed after the speech rates and F0 distributions between speakers are equalized. Based upon collected native and non-native speakers’ databases and correlation coefficients, we use Gaussian mixtures to model them as continuous distributions for training a two-class (native vs non-native) neural net classifier. We found that classification accuracy between using native speaker’s and TTS reference is close, i.e., 91.2% vs 88.1%. To assess the prosody proficiency of an ESL learner with one sentence input, the prosody patterns of our high quality TTS is almost as effective as those of native speakers’ recordings, which are more expensive and inconvenient to collect.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Si Chen|AUTHOR Si Chen]]^^1^^, [[Yunjuan He|AUTHOR Yunjuan He]]^^2^^, [[Chun Wah Yuen|AUTHOR Chun Wah Yuen]]^^1^^, [[Bei Li|AUTHOR Bei Li]]^^1^^, [[Yike Yang|AUTHOR Yike Yang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hong Kong Polytechnic University, China; ^^2^^University of North Georgia, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1760–1764
</span></p></div>
<div class="cpabstractcardabstract"><p>This study is the first to examine acquisition of two Mandarin tone sandhi rules by Cantonese speakers. It designs both real and different types of wug words to test whether learners may exploit a lexical or computation mechanism in tone sandhi rule application. We also statistically compared their speech production with Beijing Mandarin speakers. The results of functional data analysis showed that non-native speakers applied tone sandhi rules both to real and wug words in a similar manner, indicating that they might utilize a computation mechanism and compute the rules under phonological conditions. No significant differences in applying these two phonological rules on reading wug words also suggest no bias in the application of these two rules. However, their speech production differed from native speakers. The application of third tone sandhi rule was more categorical than native speakers in that Cantonese speakers tended to neutralize the sandhi Tone 3 more with Tone 2 produced in isolation compared to native speakers. Also, Cantonese speakers might not have applied half-third tone sandhi rule fully since they tended to raise f0 values more at the end of vowels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seth Wiener|AUTHOR Seth Wiener]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1765–1769
</span></p></div>
<div class="cpabstractcardabstract"><p>This study examined how cue-weighting of a non-native speech cue changes during early adult second language (L2) acquisition. Ten native English speaking learners of Mandarin Chinese performed a speeded AX-discrimination task during months 1, 2, and 3 of a first-year Chinese course. Results were compared to ten native Mandarin speakers. Learners’ reaction time and d-prime results became more native-like after two months of classroom study but plateaued thereafter. Multidimensional scaling results showed a similar shift to more native-like cue-weighting as learners attended more to pitch direction and less to pitch height. Despite the improvements, learners’ month 3 configuration of cue-weighting differed from that of native speakers; learners appeared to weight pitch end points rather than overall pitch directions. These results suggest that learners’ warping of the weights of dimensions underlying the perceptual space changes rapidly during early acquisition and can plateau like other measures of L2 acquisition. Previous perceptual learning studies may have only captured initial L2 perception gains, not the learning plateau that often follows. New methods of perceptual learning, especially for tonal languages, are needed to advance learners off the plateau.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ying Chen|AUTHOR Ying Chen]]^^1^^, [[Eric Pederson|AUTHOR Eric Pederson]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUST, China; ^^2^^University of Oregon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1770–1774
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous studies have shown that directing learners’ attention during perceptual training facilitates detection and learning of unfamiliar consonant categories [1, 2]. The current study asks whether this attentional directing can also facilitate other types of phonetic learning. Monolingual Mandarin speakers were divided into two groups directed to learn either 1) the consonants or 2) the tones in an identification training task with the same set of Southern Min monosyllabic words containing the consonants /p^^h^^, p, b, k^^h^^, k, ʀiptg, tɕ^^h^^, tɕ, ɕ/ and the tones (55, 33, 22, 24, 41). All subjects were also tested with an AXB discrimination task (with a distinct set of Southern Min words) before and after the training. Unsurprisingly, both groups improved accuracy in the sound type to which they attended. However, the consonant-attending group did not improve in discriminating tones after training and neither did the tone-attending group in discriminating consonants — despite both groups having equal exposure to the same training stimuli. When combined with previous results for consonant and vowel training, these results suggest that explicitly directing learners’ attention has a broadly facilitative effect on phonetic learning including of tonal contrasts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dean Luo|AUTHOR Dean Luo]]^^1^^, [[Ruxin Luo|AUTHOR Ruxin Luo]]^^2^^, [[Lixin Wang|AUTHOR Lixin Wang]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Shenzhen Institute of Information Technology, China; ^^2^^Shenzhen Polytechnic, China; ^^3^^Shenzhen Seaskyland Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1775–1778
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates how different prosodic features affect native speakers’ naturalness judgement of L2 English speech by Chinese students. Through subjective judgment by native speakers and objectively measured prosodic features, timing and pitch related prosodic features, as well as segmental goodness of pronunciation have been found to play key roles in native speakers’ perception of naturalness. In order to eliminate segmental factors, we used accent conversion techniques that modify native reference speech with learners’ erroneous prosodic cues without altering segmental properties. Experimental results show that without interference of segmental factors, both timing and pitch features affect naturalness of L2 speech. Timing plays a more crucial role in naturalness than pitch. Accent modification that corrects timing or pitch errors can improve naturalness of the speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Visar Berisha|AUTHOR Visar Berisha]]^^1^^, [[Julie Liss|AUTHOR Julie Liss]]^^1^^, [[Timothy Huston|AUTHOR Timothy Huston]]^^1^^, [[Alan Wisler|AUTHOR Alan Wisler]]^^1^^, [[Yishan Jiao|AUTHOR Yishan Jiao]]^^1^^, [[Jonathan Eig|AUTHOR Jonathan Eig]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Arizona State University, USA; ^^2^^Independent Author, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1809–1813
</span></p></div>
<div class="cpabstractcardabstract"><p>Early identification of the onset of neurological disease is critical for testing drugs or interventions to halt or slow progression. Speech production has been proposed as an early indicator of neurological impairment. However, for speech to be useful for early detection, speech changes should be measurable from uncontrolled conversational speech collected passively in natural recording environments over extended periods of time. Such longitudinal speech data sets for testing the robustness of algorithms are difficult to acquire. In this paper, we exploit YouTube interviews from Muhammad Ali from 1968 to 1981, before his 1984 diagnosis of parkinsonism. The interviews are unscripted, conversational in nature, and of varying fidelity. We measured changes in speech production from the Ali interviews and analyzed these changes relative to a coded registry of blows Mr. Ali received in each of his boxing matches over time. This provided a rich and unique opportunity to evaluate speech change as both a function of disease progression and as a function of fight history. Multivariate analyses revealed changes in prosody and articulation consistent with hypokinetic dysarthria over time, and a relationship between reduced speech intonation and the amount of time elapsed since the most recent fight preceding the interview.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bhavik Vachhani|AUTHOR Bhavik Vachhani]], [[Chitralekha Bhat|AUTHOR Chitralekha Bhat]], [[Biswajit Das|AUTHOR Biswajit Das]], [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]
</p><p class="cpabstractcardaffiliationlist">TCS Innovation Labs Mumbai, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1854–1858
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysarthria is a motor speech disorder, resulting in mumbled, slurred or slow speech that is generally difficult to understand by both humans and machines. Traditional Automatic Speech Recognizers (ASR) perform poorly on dysarthric speech recognition tasks. In this paper, we propose the use of deep autoencoders to enhance the Mel Frequency Cepstral Coefficients (MFCC) based features in order to improve dysarthric speech recognition. Speech from healthy control speakers is used to train an autoencoder which is in turn used to obtain improved feature representation for dysarthric speech. Additionally, we analyze the use of severity based tempo adaptation followed by autoencoder based speech feature enhancement. All evaluations were carried out on Universal Access dysarthric speech corpus. An overall absolute improvement of 16% was achieved using tempo adaptation followed by autoencoder based speech front end representation for DNN-HMM based dysarthric speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jason Lilley|AUTHOR Jason Lilley]], [[Madhavi Ratnagiri|AUTHOR Madhavi Ratnagiri]], [[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]]
</p><p class="cpabstractcardaffiliationlist">Nemours Biomedical Research, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1859–1863
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech delay is characterized by a difficulty with producing or perceiving the sounds of language in comparison to one’s peers. It is a common problem in young children, occurring at a rate of about 5%. There are high rates of co-occurring problems with language, reading, learning, and social interactions, so intervention is needed for most. The Goldman-Fristoe Test of Articulation (GFTA) is a standardized tool for the assessment of consonant articulation in American English children. GFTA scores are normalized for age and can be used to help diagnose and assess speech delay. The GFTA was administered to 65 young children, a mixture of delayed children and controls. Their productions of the 39 GFTA words spoken in isolation were recorded and aligned to 3-state hidden Markov models. Seven measurements (state log likelihoods, state durations, and total duration) were extracted from each target segment in each word. From a subset of these measures, cross-validated statistical models were used to predict the children’s GFTA scores and whether they were delayed. The measurements most useful for prediction came primarily from approximants /r, l/. An analysis of the predictors and discussion of the implications will be provided.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aijun Li|AUTHOR Aijun Li]]^^1^^, [[Hua Zhang|AUTHOR Hua Zhang]]^^2^^, [[Wen Sun|AUTHOR Wen Sun]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese Academy of Social Sciences, China; ^^2^^Beijing Tongren Hospital, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1864–1868
</span></p></div>
<div class="cpabstractcardabstract"><p>“The Ling Six Sounds” are a range of speech sounds encompassing the speech frequencies that are widely used clinically to verify the effectiveness of hearing aid fitting in children. This study focused on the spectral features of the six sounds in Standard Chinese. We examined the frequency range of /m, u, a, i, ʂ, s/ as well as three consonants in syllables, i.e., /m(o)/, /ʂ(ʅ)/, and /s(ɿ)/. We presented the frequency distribution of these sounds. Based on this, we further proposed guidelines to improve “the Ling Six-Sound Test” regarding tones in Standard Chinese. We also suggested further studies in other dialects/languages spoken in China with regard to their phonological specifics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wentao Gu|AUTHOR Wentao Gu]]^^1^^, [[Jiao Yin|AUTHOR Jiao Yin]]^^1^^, [[James Mahshie|AUTHOR James Mahshie]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nanjing Normal University, China; ^^2^^George Washington University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1869–1873
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated both production and perception of Mandarin speech, comparing two groups of 4-to-5-year-old children, a normal-hearing (NH) group and a cochlear-implanted (CI) hearing-impaired group; the perception ability of the CI group was tested under two conditions, with and without hearing aids. In the production study, the participants were asked to produce sustained vowels /a/, /i/ and /u/, on which a set of acoustic parameters were then measured. In comparison to the NH group, the CI group showed a higher F,,0,,, a higher H1-H2, and a smaller acoustic space for vowels, demonstrating both phonatory and articulatory impairments. In the perception study, the identification tests of two tone-pairs in Mandarin (T1-T2 and T1-T4) were conducted, using two sets of synthetic speech stimuli varying only along F,,0,, continua. All groups/conditions showed categorical effects in perception. The CI group in the unimodal condition showed little difference from normal, while in the bimodal condition the categorical effect became weaker in identifying the T1-T4 continuum, with the category boundary more biased to T4. This suggests that bimodal CI children may need more fine grain adjustments of hearing aids to take full advantage of the bimodal technology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Antonella Castellana|AUTHOR Antonella Castellana]]^^1^^, [[Andreas Selamtzis|AUTHOR Andreas Selamtzis]]^^2^^, [[Giampiero Salvi|AUTHOR Giampiero Salvi]]^^2^^, [[Alessio Carullo|AUTHOR Alessio Carullo]]^^1^^, [[Arianna Astolfi|AUTHOR Arianna Astolfi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Politecnico di Torino, Italy; ^^2^^KTH, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1814–1818
</span></p></div>
<div class="cpabstractcardabstract"><p>There is a growing interest in Cepstral and Entropy analyses of voice samples for defining a vocal health indicator, due to their reliability in investigating both regular and irregular voice signals. The purpose of this study is to determine whether the Cepstral Peak Prominence Smoothed (CPPS) and Sample Entropy (SampEn) could differentiate dysphonic speakers from normal speakers in vowels excerpted from readings and to compare their discrimination power. Results are reported for 33 patients and 31 controls, who read a standardized phonetically balanced passage while wearing a head mounted microphone. Vowels were excerpted from recordings using Automatic Speech Recognition and, after obtaining a measure for each vowel, individual distributions and their descriptive statistics were considered for CPPS and SampEn. The Receiver Operating Curve analysis revealed that the mean of the distributions was the parameter with the highest discrimination power for both CPPS and SampEn. CPPS showed a higher diagnostic precision than SampEn, exhibiting an Area Under Curve (AUC) of 0.85 compared to 0.72. A negative correlation between the parameters was found (Spearman; ρ = -0.61), with higher SampEn corresponding to lower CPPS. The automatic method used in this study could provide support to voice monitorings in clinic and during individual’s daily activities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrea Bandini|AUTHOR Andrea Bandini]]^^1^^, [[Jordan R. Green|AUTHOR Jordan R. Green]]^^2^^, [[Lorne Zinman|AUTHOR Lorne Zinman]]^^3^^, [[Yana Yunusova|AUTHOR Yana Yunusova]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University Health Network, Canada; ^^2^^MGH Institute of Health Professions, USA; ^^3^^Sunnybrook Health Sciences Centre, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1819–1823
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent studies demonstrated that lip and jaw movements during speech may provide important information for the diagnosis of amyotrophic lateral sclerosis (ALS) and for understanding its progression. A thorough investigation of these movements is essential for the development of intelligent video- or optically-based facial tracking systems that could assist with early diagnosis and progress monitoring. In this paper, we investigated the potential for a novel and expanded set of kinematic features obtained from lips and jaw to classify articulatory data into three stages of bulbar disease progression (i.e., pre-symptomatic, early symptomatic, and late symptomatic). Feature selection methods (Relief-F and mRMR) and classification algorithm (SVM) were used for this purpose. Results showed that even with a limited number of kinematic features it was possible to obtain good classification accuracy (nearly 80%). Given the recent development of video-based markerless methods for tracking speech movements, these results provide strong rationale for supporting the development of portable and cheap systems for monitoring the orofacial function in ALS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nagaraj Adiga|AUTHOR Nagaraj Adiga]]^^1^^, [[Vikram C.M.|AUTHOR Vikram C.M.]]^^2^^, [[Keerthi Pullela|AUTHOR Keerthi Pullela]]^^3^^, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Guwahati, India; ^^2^^IIT Guwahati, India; ^^3^^VIT University, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1824–1828
</span></p></div>
<div class="cpabstractcardabstract"><p>Pitch period and amplitude perturbations are widely used parameters to discriminate normal and voice disorder speech. Instantaneous pitch period and amplitude of glottal vibrations directly from the speech waveform may not give an accurate estimation of jitter and shimmer. In this paper, the significance of epochs (glottal closure instants) and strength of excitation (SoE) derived from the zero-frequency filter (ZFF) are exploited to discriminate the voice disorder and normal speech. Pitch epoch derived from ZFF is used to compute the jitter, and SoE derived around each epoch is used compute the shimmer. The derived epoch-based features are analyzed on the some of the voice disorders like Parkinson’s disease, vocal fold paralysis, cyst, and gastroesophageal reflux disease. The significance of proposed epoch-based features for discriminating normal and pathological voices is analyzed and compared with the state-of-the-art methods using a support vector machine classifier. The results show that epoch-based features performed significantly better than other methods both in clean and noisy conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nikitha K.|AUTHOR Nikitha K.]]^^1^^, [[Sishir Kalita|AUTHOR Sishir Kalita]]^^2^^, [[C.M. Vikram|AUTHOR C.M. Vikram]]^^2^^, [[M. Pushpavathi|AUTHOR M. Pushpavathi]]^^1^^, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^AIISH, India; ^^2^^IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1829–1833
</span></p></div>
<div class="cpabstractcardabstract"><p>Vowel space area (VSA) refers to a two-dimensional area, which is bounded by lines joining F,,1,,and F,,2,, coordinates of vowels. In the speech of individuals with cleft lip and palate (CLP), the effect of hypernasality introduces the pole-zero pairs in the speech spectrum, which will shift the formants of a target sound. As a result, vowel space in hypernasal speech gets affected. In this work, analysis of vowel space area in normal, mild and moderate-severe hypernasality groups is analyzed and compared across the three groups. Also, the effect of hypernasality severity ratings across different phonetic contexts i.e, /p/, /t/, and /k/ is studied. The results revealed that VSA is reduced in CLP children, compared to control participants, across sustained vowels and different phonetic contexts. Compared to normal, the reduction in the vowel space is more for the moderate-severe hypernasality group than that of mild. The CLP group exhibited a trend of having larger VSA for /p/, followed by /t/, and lastly by /k/. The statistical analysis revealed overall significant difference among the three groups (p < 0.05).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Imed Laaridh|AUTHOR Imed Laaridh]]^^1^^, [[Waad Ben Kheder|AUTHOR Waad Ben Kheder]]^^1^^, [[Corinne Fredouille|AUTHOR Corinne Fredouille]]^^1^^, [[Christine Meunier|AUTHOR Christine Meunier]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIA (EA 4128), France; ^^2^^LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1834–1838
</span></p></div>
<div class="cpabstractcardabstract"><p>During the last decades, automatic speech processing systems witnessed an important progress and achieved remarkable reliability. As a result, such technologies have been exploited in new areas and applications including medical practice. In disordered speech evaluation context, perceptual evaluation is still the most common method used in clinical practice for the diagnosing and the following of the condition progression of patients despite its well documented limits (such as subjectivity).
In this paper, we propose an automatic approach for the prediction of dysarthric speech evaluation metrics (intelligibility, severity, articulation impairment) based on the representation of the speech acoustics in the total variability subspace based on the i-vectors paradigm. The proposed approach, evaluated on 129 French dysarthric speakers from the DesPhoAPady and VML databases, is proven to be efficient for the modeling of patient’s production and capable of detecting the evolution of speech quality. Also, low RMSE and high correlation measures are obtained between automatically predicted metrics and perceptual evaluations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Philipp Klumpp|AUTHOR Philipp Klumpp]]^^1^^, [[Thomas Janu|AUTHOR Thomas Janu]]^^1^^, [[Tomás Arias-Vergara|AUTHOR Tomás Arias-Vergara]]^^2^^, [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]]^^2^^, [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]]^^1^^, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^Universidad de Antioquia, Colombia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1839–1843
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we want to present our work on a smartphone application which aims to provide a mobile monitoring solution for patients suffering from Parkinson’s disease. By unobtrusively analyzing the speech signal during phone calls and with a dedicated speech test, we want to be able to determine the severity and the progression of Parkinson’s disease for a patient much more frequently than it would be possible with regular check-ups.
The application consists of four major parts. There is a phone call detection which triggers the whole processing chain. Secondly, there is the phone call recording which has proven to be more challenging than expected. The signal analysis, another crucial component, is still in development for the phone call analysis. Additionally, the application collects several pieces of meta information about the calls to put the results into deeper context.
After describing how the speech signal is affected by Parkinson’s disease, we sketch the overall application architecture and explain the four major parts of the current implementation in further detail. We then present the promising results achieved with the first version of a dedicated speech test. In the end, we outline how the project could receive further improvements in the future.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Hlavnička|AUTHOR Jan Hlavnička]]^^1^^, [[Tereza Tykalová|AUTHOR Tereza Tykalová]]^^1^^, [[Roman Čmejla|AUTHOR Roman Čmejla]]^^1^^, [[Jiří Klempíř|AUTHOR Jiří Klempíř]]^^2^^, [[Evžen Růžička|AUTHOR Evžen Růžička]]^^2^^, [[Jan Rusz|AUTHOR Jan Rusz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CTU, Czech Republic; ^^2^^Charles University, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1844–1848
</span></p></div>
<div class="cpabstractcardabstract"><p>Parkinson’s disease (PD), progressive supranuclear palsy (PSP), and multiple system atrophy (MSA) are distinctive neurodegenerative disorders, which manifest similar motor features. Their differentiation is crucial but difficult. Dysfunctional speech, especially dysprosody, is a common symptom accompanying PD, PSP, and MSA from early stages. We hypothesized that automated analysis of monologue could provide speech patterns distinguishing PD, PSP, and MSA. We analyzed speech recordings of 16 patients with PSP, 20 patients with MSA, and 23 patients with PD. Our findings revealed that deviant pause production differentiated between PSP, MSA, and PD. In addition, PSP showed greater deficits in speech respiration when compared to MSA and PD. Automated analysis of connected speech is easy to administer and could provide valuable information about underlying pathology for differentiation between PSP, MSA, and PD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ming Tu|AUTHOR Ming Tu]], [[Visar Berisha|AUTHOR Visar Berisha]], [[Julie Liss|AUTHOR Julie Liss]]
</p><p class="cpabstractcardaffiliationlist">Arizona State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1849–1853
</span></p></div>
<div class="cpabstractcardabstract"><p>Improved performance in speech applications using deep neural networks (DNNs) has come at the expense of reduced model interpretability. For consumer applications this is not a problem; however, for health applications, clinicians must be able to interpret why a predictive model made the decision that it did. In this paper, we propose an interpretable model for objective assessment of dysarthric speech for speech therapy applications based on DNNs. Our model aims to predict a general impression of the severity of the speech disorder; however, instead of directly generating a severity prediction from a high-dimensional input acoustic feature space, we add an intermediate interpretable layer that acts as a bottle-neck feature extractor and constrains the solution space of the DNNs. During inference, the model provides an estimate of severity at the output of the network and a set of explanatory features from the intermediate layer of the network that explain the final decision. We evaluate the performance of the model on a dysarthric speech dataset and show that the proposed model provides an interpretable output that is highly correlated with the subjective evaluation of Speech-Language Pathologists (SLPs).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anurag Kumar|AUTHOR Anurag Kumar]], [[Benjamin Elizalde|AUTHOR Benjamin Elizalde]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1874–1878
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we propose methods to extract geographically relevant information in a multimedia recording using its audio content. Our method primarily is based on the fact that urban acoustic environment consists of a variety of sounds. Hence, location information can be inferred from the composition of sound events/classes present in the audio. More specifically, we adopt matrix factorization techniques to obtain semantic content of recording in terms of different sound classes. We use semi-NMF to for to do audio semantic content analysis using MFCCs. These semantic information are then combined to identify the location of recording. We show that these semantic content based geotagging can perform significantly better than state of art methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jeroen Zegers|AUTHOR Jeroen Zegers]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]
</p><p class="cpabstractcardaffiliationlist">Katholieke Universiteit Leuven, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1919–1923
</span></p></div>
<div class="cpabstractcardabstract"><p>Lately there have been novel developments in deep learning towards solving the cocktail party problem. Initial results are very promising and allow for more research in the domain. One technique that has not yet been explored in the neural network approach to this task is speaker adaptation. Intuitively, information on the speakers that we are trying to separate seems fundamentally important for the speaker separation task. However, retrieving this speaker information is challenging since the speaker identities are not known a priori and multiple speakers are simultaneously active. There is thus some sort of chicken and egg problem. To tackle this, source signals and i-vectors are estimated alternately. We show that blind multi-speaker adaptation improves the results of the network and that (in our case) the network is not capable of adequately retrieving this useful speaker information itself.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bing Yang|AUTHOR Bing Yang]], [[Hong Liu|AUTHOR Hong Liu]], [[Cheng Pang|AUTHOR Cheng Pang]]
</p><p class="cpabstractcardaffiliationlist">Peking University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1924–1928
</span></p></div>
<div class="cpabstractcardabstract"><p>Multiple sound source localization remains a challenging issue due to the interaction between sources. Although traditional approaches can locate multiple sources effectively, most of them require the number of sound sources as a priori knowledge. However, the number of sound sources is generally unknown in practical applications. To overcome this problem, a spatial principal eigenvector based approach is proposed to estimate the number and the direction of arrivals (DOAs) of multiple speech sources. Firstly, a time-frequency (TF) bin weighting scheme is utilized to select the TF bins dominated by single source. Then, for these selected bins, the spatial principal eigenvectors are extracted to construct a contribution function which is used to simultaneously estimate the number of sources and corresponding coarse DOAs. Finally, the coarse DOA estimations are refined by iteratively optimizing the assignment of selected TF bins to each source. Experimental results validate that the proposed approach yields favorable performance for multiple sound source counting and localization in the environment with different levels of noise and reverberation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Girija Ramesan Karthik|AUTHOR Girija Ramesan Karthik]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1929–1933
</span></p></div>
<div class="cpabstractcardabstract"><p>We consider the task of speech source localization using binaural cues, namely interaural time and level difference (ITD & ILD). A typical approach is to process binaural speech using gammatone filters and calculate frame-level ITD and ILD in each subband. The ITD, ILD and their combination (ITLD) in each subband are statistically modelled using Gaussian mixture models for every direction during training. Given a binaural test-speech, the source is localized using maximum likelihood criterion assuming that the binaural cues in each subband are independent. We, in this work, investigate the robustness of each subband for localization and compare their performance against the full-band scheme with 32 gammatone filters. We propose a subband selection procedure using the training data where subbands are rank ordered based on their localization performance. Experiments on Subject 003 from the CIPIC database reveal that, for high SNRs, the ITD and ITLD of just one subband centered at 296Hz is sufficient to yield localization accuracy identical to that of the full-band scheme with a test-speech of duration 1sec. At low SNRs, in case of ITD, the selected subbands are found to perform better than the full-band scheme.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo-Rui Chen|AUTHOR Bo-Rui Chen]], [[Huang-Yi Lee|AUTHOR Huang-Yi Lee]], [[Yi-Wen Liu|AUTHOR Yi-Wen Liu]]
</p><p class="cpabstractcardaffiliationlist">National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1934–1937
<a href="./IS2017/MEDIA/1227" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents and evaluates two frequency-domain methods for multi-channel sound source separation. The sources are assumed to couple to the microphones with unknown room responses. Independent component analysis (ICA) is applied in the frequency domain to obtain maximally independent amplitude envelopes (AEs) at every frequency. Due to the nature of ICA, the AEs across frequencies need to be de-permuted. To this end, we seek to assign AEs to the same source solely based on the correlation in their magnitude variation against time. The resulted time-varying spectra are inverse Fourier transformed to synthesize separated signals. Objective evaluation showed that both methods achieve a signal-to-interference ratio (SIR) that is comparable to Mazur et al (2013). In addition, we created spoken Mandarin materials and recruited age-matched subjects to perform word-by-word transcription. Results showed that, first, speech intelligibility significantly improved after unmixing. Secondly, while both methods achieved similar SIR, the subjects preferred to listen to the results that were post-processed to ensure a speech-like spectral shape; the mean opinion scores were 2.9 vs. 4.3 (out of 5) between the two methods. The present results may provide suggestions regarding deployment of the correlation-based source separation algorithms into devices with limited computational resources.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fei Tao|AUTHOR Fei Tao]], [[Carlos Busso|AUTHOR Carlos Busso]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1938–1942
</span></p></div>
<div class="cpabstractcardabstract"><p> Voice activity detection (VAD) is an important preprocessing step in speech-based systems, especially for emerging hand-free intelligent assistants. Conventional VAD systems relying on audio-only features are normally impaired by noise in the environment. An alternative approach to address this problem is audiovisual VAD (AV-VAD) systems. Modeling timing dependencies between acoustic and visual features is a challenge in AV-VAD. This study proposes a bimodal recurrent neural network (RNN) which combines audiovisual features in a principled, unified framework, capturing the timing dependency within modalities and across modalities. Each modality is modeled with separate bidirectional long short-term memory (BLSTM) networks. The output layers are used as input of another BLSTM network. The experimental evaluation considers a large audiovisual corpus with clean and noisy recordings to assess the robustness of the approach. The proposed approach outperforms audio-only VAD by 7.9% (absolute) under clean/ideal conditions (i.e., high definition (HD) camera, close-talk microphone). The proposed solution outperforms the audio-only VAD system by 18.5% (absolute) when the conditions are more challenging (i.e., camera and microphone from a tablet with noise in the environment). The proposed approach shows the best performance and robustness across a varieties of conditions, demonstrating its potential for real-world applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roland Maas|AUTHOR Roland Maas]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]], [[Kyle Goehner|AUTHOR Kyle Goehner]], [[Gautam Tiwari|AUTHOR Gautam Tiwari]], [[Shaun Joseph|AUTHOR Shaun Joseph]], [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]
</p><p class="cpabstractcardaffiliationlist">Amazon.com, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1943–1947
</span></p></div>
<div class="cpabstractcardabstract"><p>The task of automatically detecting the end of a device-directed user request is particularly challenging in case of switching short command and long free-form utterances. While low-latency end-pointing configurations typically lead to good user experiences in the case of short requests, such as “play music”, it can be too aggressive in domains with longer free-form queries, where users tend to pause noticeably between words and hence are easily cut off prematurely. We previously proposed an approach for accurate end-pointing by continuously estimating pause duration features over all active recognition hypotheses. In this paper, we study the behavior of these pause duration features and infer domain-dependent parametrizations. We furthermore propose to adapt the end-pointer aggressiveness on-the-fly by comparing the Viterbi scores of active short command vs. long free-form decoding hypotheses. The experimental evaluation evidences a 18% relative reduction in word error rate on free-form requests while maintaining low latency on short queries.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vinay Kothapally|AUTHOR Vinay Kothapally]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1948–1952
</span></p></div>
<div class="cpabstractcardabstract"><p>It is well known that in reverberant environments, the human auditory system has the ability to pre-process reverberant signals to compensate for reflections and obtain effective cues for improved recognition. In this study, we propose such a preprocessing technique for combined detection and enhancement of speech using a single microphone in reverberant environments for distant speech applications. The proposed system employs a framework where the target speech is synthesized using continuous auditory masks estimated from sub-band signals. Linear gammatone analysis/synthesis filter banks are used as an auditory model for sub-band processing. The performance of the proposed system is evaluated on the UT-DistantReverb corpus which consists of speech recorded in a reverberant racquetball court (T60~9000 msec). The current system shows an average improvement of 15% STNR over an existing single-channel dereverberation algorithm and 17% improvement in detecting speech frames over G729B, SOHN & Combo-SAD unsupervised speech activity detectors on actual reverberant and noisy environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhaoqiong Huang|AUTHOR Zhaoqiong Huang]], [[Zhanzhong Cao|AUTHOR Zhanzhong Cao]], [[Dongwen Ying|AUTHOR Dongwen Ying]], [[Jielin Pan|AUTHOR Jielin Pan]], [[Yonghong Yan|AUTHOR Yonghong Yan]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1879–1883
</span></p></div>
<div class="cpabstractcardabstract"><p>Bin-wise time delay is a valuable clue to form the time-frequency (TF) mask for speech source separation on the two-microphone array. On widely spaces microphones, however, the time delay estimation suffers from spatial aliasing. Although histogram is a simple and effective method to tackle the problem of spatial aliasing, it can not be directly applied on planar arrays. This paper proposes a histogram-based method to separate multiple speech sources on the arbitrary-size planar array, where the spatial aliasing is resisted. Time delay histogram is firstly utilized to estimate the delays of multiple sources on each microphone pair. The estimated delays on all pairs are then incorporated into an azimuth histogram by means of the pairwise combination test. From the azimuth histogram, the direction-of-arrivals (DOAs) and the number of sources are obtained. Eventually, the TF mask is determined based on the estimated DOAs. Some experiments were conducted under various conditions, confirming the superiority of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]], [[Avinash Kumar|AUTHOR Avinash Kumar]], [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]]
</p><p class="cpabstractcardaffiliationlist">NIT Patna, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1884–1888
</span></p></div>
<div class="cpabstractcardabstract"><p>The task of detecting the vowel regions in a given speech signal is a challenging problem. Over the years, several works on accurate detection of vowel regions and the corresponding vowel onset points (VOPs) and vowel end points (VEPs) have been reported. A novel front-end feature extraction technique exploiting the temporal and spectral characteristics of the excitation source information in the speech signal is proposed in this paper to improve the detection of vowel regions, VOPs and VEPs. To do the same, a three-class classifiers (vowel, non-vowel and silence) is developed on the TIMIT database using the proposed features as well as mel-frequency cepstral coefficients (MFCC). Statistical modeling based on deep neural network has been employed for learning the parameters. Using the developed three-class classifier, a given speech sample is then forced aligned against the trained acoustic models to detect the vowel regions. The use of proposed feature results in detection of vowel regions quite different from those obtained through the MFCC. Exploiting the differences in the evidences obtained by using the two kinds of features, a technique to combine the evidences is also proposed in order to get a better estimate of the VOPs and VEPs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Gao|AUTHOR Wei Gao]], [[Roberto Togneri|AUTHOR Roberto Togneri]], [[Victor Sreeram|AUTHOR Victor Sreeram]]
</p><p class="cpabstractcardaffiliationlist">University of Western Australia, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1889–1893
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a contrast function and associated algorithm for blind separation of audio signals. The contrast function is based on second-order statistics to minimize the ratio between the product of the diagonal entries and the determinant of the covariance matrix. The contrast function can be minimized by a batch and adaptive gradient descent method to formulate a blind source separation algorithm. Experimental results on realistic audio signals show that the proposed algorithm yielded comparable separation performance with benchmark algorithms for speech signals, and outperformed benchmark algorithms for music signals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chenglin Xu|AUTHOR Chenglin Xu]]^^1^^, [[Xiong Xiao|AUTHOR Xiong Xiao]]^^2^^, [[Sining Sun|AUTHOR Sining Sun]]^^3^^, [[Wei Rao|AUTHOR Wei Rao]]^^2^^, [[Eng Siong Chng|AUTHOR Eng Siong Chng]]^^1^^, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTU, Singapore; ^^2^^TL@NTU, Singapore; ^^3^^Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1894–1898
</span></p></div>
<div class="cpabstractcardabstract"><p>We study the estimation of time difference of arrival (TDOA) under noisy and reverberant conditions. Conventional TDOA estimation methods such as MUltiple SIgnal Classification (MUSIC) are not robust to noise and reverberation due to the distortion in the spatial covariance matrix (SCM). To address this issue, this paper proposes a robust SCM estimation method, called weighted SCM (WSCM). In the WSCM estimation, each time-frequency (TF) bin of the input signal is weighted by a TF mask which is 0 for non-speech TF bins and 1 for speech TF bins in ideal case. In practice, the TF mask takes values between 0 and 1 that are predicted by a long short term memory (LSTM) network trained from a large amount of simulated noisy and reverberant data. The use of mask weights significantly reduces the contribution of low SNR TF bins to the SCM estimation, hence improves the robustness of MUSIC. Experimental results on both simulated and real data show that we have significantly improved the robustness of MUSIC by using the weighted SCM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Feng Guo|AUTHOR Feng Guo]]^^1^^, [[Yuhang Cao|AUTHOR Yuhang Cao]]^^2^^, [[Zheng Liu|AUTHOR Zheng Liu]]^^3^^, [[Jiaen Liang|AUTHOR Jiaen Liang]]^^2^^, [[Baoqing Li|AUTHOR Baoqing Li]]^^1^^, [[Xiaobing Yuan|AUTHOR Xiaobing Yuan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese Academy of Sciences, China; ^^2^^Beijing Unisound Information Technology, China; ^^3^^Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1899–1903
</span></p></div>
<div class="cpabstractcardabstract"><p>The differential microphone array (DMA) becomes more and more popular recently. In this paper, we derive the relationship between the direction-of-arrival (DoA) and DMA’s frequency-independent beampatterns. The derivation demonstrates that the DoA can be yielded by solving a trigonometric polynomial. Taking the dipoles as a special case of this relationship, we propose three methods to estimate the DoA based on the dipoles. However, we find these methods are vulnerable to the axial directions under the reverberation environment. Fortunately, they can complement each other owing to their robustness to different angles. Hence, to increase the robustness to the reverberation, we proposed another new approach by combining the advantages of these three dipole-based methods for the speaker DoA estimation. Both simulations and experiments show that the proposed method not only outperforms the traditional methods for small aperture array but also is much more computationally efficient with avoiding the spatial spectrum search.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianyun Wang|AUTHOR Xianyun Wang]]^^1^^, [[Changchun Bao|AUTHOR Changchun Bao]]^^1^^, [[Feng Bao|AUTHOR Feng Bao]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Beijing University of Technology, China; ^^2^^University of Auckland, New Zealand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1904–1908
</span></p></div>
<div class="cpabstractcardabstract"><p>In most approaches based on computational auditory scene analysis (CASA), the ideal binary mask (IBM) is often used for noise reduction. However, it is almost impossible to obtain the IBM result. The error in IBM estimation may greatly violate smooth evolution nature of speech because of the energy absence in many speech-dominated time-frequency (T-F) units. To reduce the error, the ideal ratio mask (IRM) via modeling the spatial dependencies of speech spectrum is used as an optimal target mask because the predictive ratio mask is less sensitive to the error than the predictive binary mask. In this paper, we introduce a data field (DF) to model the spatial dependencies of the cochleagram for obtaining the ratio mask. Firstly, initial T-F units of noise and speech are obtained from noisy speech. Then we can calculate the forms of the potentials of noise and speech. Subsequently, their optimal potentials which reflect their respective distribution of potential field are obtained by the optimal influence factors of speech and noise. Finally, we exploit the potentials of speech and noise to obtain the ratio mask. Experimental results show that the proposed method can obtain a better performance than the reference methods in speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matt Shannon|AUTHOR Matt Shannon]], [[Gabor Simko|AUTHOR Gabor Simko]], [[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Carolina Parada|AUTHOR Carolina Parada]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1909–1913
</span></p></div>
<div class="cpabstractcardabstract"><p>In many streaming speech recognition applications such as voice search it is important to determine quickly and accurately when the user has finished speaking their query. A conventional approach to this task is to declare end-of-query whenever a fixed interval of silence is detected by a voice activity detector (VAD) trained to classify each frame as speech or silence. However silence detection and end-of-query detection are fundamentally different tasks, and the criterion used during VAD training may not be optimal. In particular the conventional approach ignores potential acoustic cues such as filler sounds and past speaking rate which may indicate whether a given pause is temporary or query-final. In this paper we present a simple modification to make the conventional VAD training criterion more closely related to end-of-query detection. A unidirectional long short-term memory architecture allows the system to remember past acoustic events, and the training criterion incentivizes the system to learn to use any acoustic cues relevant to predicting future user intent. We show experimentally that this approach improves latency at a given accuracy by around 100 ms for end-of-query detection for voice search.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Di He|AUTHOR Di He]]^^1^^, [[Zuofu Cheng|AUTHOR Zuofu Cheng]]^^2^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^3^^, [[Deming Chen|AUTHOR Deming Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^Inspirit IoT, USA; ^^3^^University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1914–1918
</span></p></div>
<div class="cpabstractcardabstract"><p>Detecting human screaming, shouting, and other verbal manifestations of fear and anger are of great interest to security Audio Event Detection (AED) systems. The Internet of Things (IoT) approach allows wide-covering, powerful AED systems to be distributed across the Internet. But a good feature to pre-filter the audio is critical to these systems. This work evaluates the potential of detecting screaming and affective speech using Auditory Roughness and proposes a very light-weight approximation method. Our approximation uses a similar amount of Multiple Add Accumulate (MAA) compared to short-term energy (STE), and at least 10× less MAA than MFCC. We evaluated the performance of our approximated roughness on the Mandarin Affective Speech corpus and a subset of the Youtube AudioSet for screaming against other low-complexity features. We show that our approximated roughness returns higher accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Hsin-Te Hwang|AUTHOR Hsin-Te Hwang]], [[Syu-Siang Wang|AUTHOR Syu-Siang Wang]], [[Chin-Cheng Hsu|AUTHOR Chin-Cheng Hsu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]
</p><p class="cpabstractcardaffiliationlist">Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1953–1957
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a novel difference compensation post-filtering approach based on the locally linear embedding (LLE) algorithm for speech enhancement (SE). The main goal of the proposed post-filtering approach is to further suppress residual noises in SE-processed signals to attain improved speech quality and intelligibility. The proposed system can be divided into offline and online stages. In the offline stage, we prepare paired differences: the estimated difference of SE-processed speech; noisy speech and the ground-truth difference of clean speech; noisy speech. In the online stage, on the basis of estimated difference of a test utterance, we first predict the corresponding ground-truth difference based on the LLE algorithm, and then compensate the noisy speech with the predicted difference. In this study, we integrate a deep denoising autoencoder (DDAE) SE method with the proposed LLE-based difference compensation post-filtering approach. The experiment results reveal that the proposed post-filtering approach obviously enhanced the speech quality and intelligibility of the DDAE-based SE-processed speech in different noise types and signal-to-noise-ratio levels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Li Li|AUTHOR Li Li]]^^1^^, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^2^^, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^3^^, [[Shoji Makino|AUTHOR Shoji Makino]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Tsukuba, Japan; ^^2^^NTT, Japan; ^^3^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1998–2002
</span></p></div>
<div class="cpabstractcardabstract"><p>Spectral domain speech enhancement algorithms based on non-negative spectrogram models such as non-negative matrix factorization (NMF) and non-negative matrix factor deconvolution are powerful in terms of signal recovery accuracy, however they do not directly lead to an enhancement in the feature domain (e.g., cepstral domain) or in terms of perceived quality. We have previously proposed a method that makes it possible to enhance speech in the spectral and cepstral domains simultaneously. Although this method was shown to be effective, the devised algorithm was computationally demanding. This paper proposes yet another formulation that allows for a fast implementation by replacing the regularization term with a divergence measure between the NMF model and the mel-generalized cepstral (MGC) representation of the target spectrum. Since the MGC is an auditory-motivated representation of an audio signal widely used in parametric speech synthesis, we also expect the proposed method to have an effect in enhancing the perceived quality. Experimental results revealed the effectiveness of the proposed method in terms of both the signal-to-distortion ratio and the cepstral distance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Danny Websdale|AUTHOR Danny Websdale]], [[Ben Milner|AUTHOR Ben Milner]]
</p><p class="cpabstractcardaffiliationlist">University of East Anglia, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2003–2007
</span></p></div>
<div class="cpabstractcardabstract"><p>This work proposes and compares perceptually motivated loss functions for deep learning based binary mask estimation for speech separation. Previous loss functions have focused on maximising classification accuracy of mask estimation but we now propose loss functions that aim to maximise the hit minus false-alarm (HIT-FA) rate which is known to correlate more closely to speech intelligibility. The baseline loss function is binary cross-entropy (CE), a standard loss function used in binary mask estimation, which maximises classification accuracy. We propose first a loss function that maximises the HIT-FA rate instead of classification accuracy. We then propose a second loss function that is a hybrid between CE and HIT-FA, providing a balance between classification accuracy and HIT-FA rate. Evaluations of the perceptually motivated loss functions with the GRID database show improvements to HIT-FA rate and ESTOI across babble and factory noises. Further tests then explore application of the perceptually motivated loss functions to a larger vocabulary dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Michelsanti|AUTHOR Daniel Michelsanti]], [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]]
</p><p class="cpabstractcardaffiliationlist">Aalborg University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2008–2012
</span></p></div>
<div class="cpabstractcardabstract"><p>Improving speech system performance in noisy environments remains a challenging task, and speech enhancement (SE) is one of the effective techniques to solve the problem. Motivated by the promising results of generative adversarial networks (GANs) in a variety of image processing tasks, we explore the potential of conditional GANs (cGANs) for SE, and in particular, we make use of the image processing framework proposed by Isola et al. [1] to learn a mapping from the spectrogram of noisy speech to an enhanced counterpart. The SE cGAN consists of two networks, trained in an adversarial manner: a generator that tries to enhance the input noisy spectrogram, and a discriminator that tries to distinguish between enhanced spectrograms provided by the generator and clean ones from the database using the noisy spectrogram as a condition. We evaluate the performance of the cGAN method in terms of perceptual evaluation of speech quality (PESQ), short-time objective intelligibility (STOI), and equal error rate (EER) of speaker verification (an example application). Experimental results show that the cGAN method overall outperforms the classical short-time spectral amplitude minimum mean square error (STSA-MMSE) SE algorithm, and is comparable to a deep neural network-based SE approach (DNN-SE).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kaizhi Qian|AUTHOR Kaizhi Qian]]^^1^^, [[Yang Zhang|AUTHOR Yang Zhang]]^^1^^, [[Shiyu Chang|AUTHOR Shiyu Chang]]^^2^^, [[Xuesong Yang|AUTHOR Xuesong Yang]]^^1^^, [[Dinei Flor^encio|AUTHOR Dinei Flor^encio]]^^3^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^IBM, USA; ^^3^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2013–2017
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, deep learning has achieved great success in speech enhancement. However, there are two major limitations regarding existing works. First, the Bayesian framework is not adopted in many such deep-learning-based algorithms. In particular, the prior distribution for speech in the Bayesian framework has been shown useful by regularizing the output to be in the speech space, and thus improving the performance. Second, the majority of the existing methods operate on the frequency domain of the noisy speech, such as spectrogram and its variations. The clean speech is then reconstructed using the approach of overlap-add, which is limited by its inherent performance upper bound. This paper presents a Bayesian speech enhancement framework, called BaWN (Bayesian WaveNet), which directly operates on raw audio samples. It adopts the recently announced WaveNet, which is shown to be effective in modeling conditional distributions of speech samples while generating natural speech. Experiments show that BaWN is able to recover clean and natural speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xueliang Zhang|AUTHOR Xueliang Zhang]]^^1^^, [[DeLiang Wang|AUTHOR DeLiang Wang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Inner Mongolia University, China; ^^2^^Ohio State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2018–2022
</span></p></div>
<div class="cpabstractcardabstract"><p>Supervised learning has exhibited great potential for speech separation in recent years. In this paper, we focus on separating target speech in reverberant conditions from binaural inputs using supervised learning. Specifically, deep neural network (DNN) is constructed to map from both spectral and spatial features to a training target. For spectral features extraction, we first convert binaural inputs into a single signal by applying a fixed beamformer. A new spatial feature is proposed and extracted to complement spectral features. The training target is the recently suggested ideal ratio mask (IRM). Systematic evaluations and comparisons show that the proposed system achieves good separation performance and substantially outperforms existing algorithms under challenging multi-source and reverberant environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tudor-Cătălin Zorilă|AUTHOR Tudor-Cătălin Zorilă]], [[Yannis Stylianou|AUTHOR Yannis Stylianou]]
</p><p class="cpabstractcardaffiliationlist">Toshiba Research Europe, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2023–2027
</span></p></div>
<div class="cpabstractcardabstract"><p>Most current techniques for near-end speech intelligibility enhancement have focused on processing clean input signals, however, in realistic environments, the input is often noisy. Processing noisy speech for intelligibility enhancement using algorithms developed for clean signals can lower the perceptual quality of the samples when they are listened in quiet. Here we address the quality loss in these conditions by combining noise reduction with a multi-band version of a state-of-the-art intelligibility enhancer for clean speech that is based on spectral shaping and dynamic range compression (SSDRC). Subjective quality and intelligibility assessments with noisy input speech showed that: (a) In quiet near-end conditions, the proposed system outperformed the baseline SSDRC in terms of Mean Opinion Score (MOS); (b) In speech-shaped near-end noise, the proposed system improved the intelligibility of unprocessed speech by a factor larger than three at the lowest tested signal-to-noise ratio (SNR) however, overall, it yielded lower recognition scores than the standard SSDRC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hui Zhang|AUTHOR Hui Zhang]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]], [[Guanglai Gao|AUTHOR Guanglai Gao]]
</p><p class="cpabstractcardaffiliationlist">Inner Mongolia University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1958–1962
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech separation can be formulated as a supervised learning problem where a machine is trained to cast the acoustic features of the noisy speech to a time-frequency mask, or the spectrum of the clean speech. These two categories of speech separation methods can be generally referred as the masking-based and the mapping-based methods, but none of them can perfectly estimate the clean speech, since any target can only describe a part of the characteristics of the speech. However, the estimated masks and speech spectrum can, sometimes, be complementary as the speech is described from different perspectives. In this paper, by adopting an ensemble framework, a multi-target deep neural network (DNN) based method is proposed, which combines the masking-based and the mapping-based strategies, and the DNN is trained to jointly estimate the time-frequency masks and the clean spectrum. We show that as expected the mask and speech spectrum based targets yield partly complementary estimates, and the separation performance can be improved by merging these estimates. Furthermore, a merging model trained jointly with the multi-target DNN is developed. Experimental results indicate that the proposed multi-target DNN based method outperforms the DNN based algorithm which optimizes a single target.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1963–1967
</span></p></div>
<div class="cpabstractcardabstract"><p>Example-based speech enhancement is a promising single-channel approach for coping with highly nonstationary noise. Given a noisy speech input, it first searches in a noisy speech corpus for the noisy speech examples that best match the input. Then, it concatenates the clean speech examples that are paired with the matched noisy examples to obtain an estimate of the underlying clean speech component in the input. The quality of the enhanced speech depends on how accurate an example search can be performed given a noisy speech input. The example search is conventionally performed using a Gaussian mixture model (GMM) with mel-frequency cepstral coefficient features (MFCCs). To improve the noise robustness of the GMM-based example search, instead of using noise sensitive MFCCs, we have proposed using bottleneck features (BNFs), which are extracted from a deep neural network-based acoustic model (DNN-AM) built for automatic speech recognition. In this paper, instead of using a GMM with noise robust BNFs, we propose the direct use of a DNN-AM in the example search to further improve its noise robustness. Experimental results on the Aurora4 corpus show that the DNN-AM-based example search steadily improves the enhanced speech quality compared with the GMM-based example search using BNFs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Femke B. Gelderblom|AUTHOR Femke B. Gelderblom]], [[Tron V. Tronstad|AUTHOR Tron V. Tronstad]], [[Erlend Magnus Viggen|AUTHOR Erlend Magnus Viggen]]
</p><p class="cpabstractcardaffiliationlist">SINTEF, Norway</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1968–1972
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent literature indicates increasing interest in deep neural networks for use in speech enhancement systems. Currently, these systems are mostly evaluated through objective measures of speech quality and/or intelligibility. Subjective intelligibility evaluations of these systems have so far not been reported. In this paper we report the results of a speech recognition test with 15 participants, where the participants were asked to pick out words in background noise before and after enhancement using a common deep neural network approach. We found that, although the objective measure STOI predicts that intelligibility should improve or at the very least stay the same, the speech recognition threshold, which is a measure of intelligibility, deteriorated by 4 dB. These results indicate that STOI is not a good predictor for the subjective intelligibility of deep neural network-based speech enhancement systems. We also found that the postprocessing technique of global variance normalisation does not significantly affect subjective intelligibility.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Maria Koutsogiannaki|AUTHOR Maria Koutsogiannaki]]^^1^^, [[Holly Francois|AUTHOR Holly Francois]]^^2^^, [[Kihyun Choo|AUTHOR Kihyun Choo]]^^3^^, [[Eunmi Oh|AUTHOR Eunmi Oh]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BCBL, Spain; ^^2^^Samsung Electronics, UK; ^^3^^Samsung Electronics, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1973–1977
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a novel approach is introduced for performing real-time speech modulation enhancement to increase speech intelligibility in noise. The proposed modulation enhancement technique operates independently in the frequency and time domains. In the frequency domain, a compression function is used to perform energy reallocation within a frame. This compression function contains novel scaling operations to ensure speech quality. In the time domain, a mathematical equation is introduced to reallocate energy from the louder to the quieter parts of the speech. This proposed mathematical equation ensures that the long-term energy of the speech is preserved independently of the amount of compression, hence gaining full control of the time-energy reallocation in real-time. Evaluations on intelligibility and quality show that the suggested approach increases the intelligibility of speech while maintaining the overall energy and quality of the speech signal.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hans-Günter Hirsch|AUTHOR Hans-Günter Hirsch]], [[Michael Gref|AUTHOR Michael Gref]]
</p><p class="cpabstractcardaffiliationlist">Hochschule Niederrhein, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1978–1982
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural networks have proven their ability to be usefully applied as component of a speech enhancement system. This is based on the known feature of neural nets to map regions inside a feature space to other regions. It can be taken to map noisy magnitude spectra to clean spectra. This way the net can be used to substitute an adaptive filtering in the spectral domain. We set up such a system and compared its performance against a known adaptive filtering approach in terms of speech quality and in terms of recognition rate. It is a still not fully answered question how far the speech quality can be enhanced by modifying not only the magnitude but also the spectral phase and how this phase modification could be realized. Before trying to use a neural network for a possible modification of the phase spectrum we ran a set of oracle experiments to find out how far the quality can be improved by modifying the magnitude and/or the phase spectrum in voiced segments. It turns out that the simultaneous modification of magnitude and phase spectrum has the potential for a considerable improvement of the speech quality in comparison to modifying the magnitude or the phase only.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robert Rehr|AUTHOR Robert Rehr]], [[Timo Gerkmann|AUTHOR Timo Gerkmann]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Hamburg-Harburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1983–1987
</span></p></div>
<div class="cpabstractcardabstract"><p>For single-channel speech enhancement, most commonly, the noisy observation is described as the sum of the clean speech signal and the noise signal. For machine learning based enhancement schemes where speech and noise are modeled in the log-spectral domain, however, the log-spectrum of the noisy observation can be described as the maximum of the speech and noise log-spectrum to simplify statistical inference. This approximation is referred to as MixMax model or log-max approximation. In this paper, we show how this approximation can be used in combination with non-trained, blind speech and noise power estimators derived in the spectral domain. Our findings allow to interpret the MixMax based clean speech estimator as a super-Gaussian log-spectral amplitude estimator. This MixMax based estimator is embedded in a pre-trained speech enhancement scheme and compared to a log-spectral amplitude estimator based on an additive mixing model. Instrumental measures indicate that the MixMax based estimator causes less musical tones while it virtually yields the same quality for the enhanced speech signal.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ricard Marxer|AUTHOR Ricard Marxer]], [[Jon Barker|AUTHOR Jon Barker]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1988–1992
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, speech enhancement by analysis-resynthesis has emerged as an alternative to conventional noise filtering approaches. Analysis-resynthesis replaces noisy speech with a signal that has been reconstructed from a clean speech model. It can deliver high-quality signals with no residual noise, but at the expense of losing information from the original signal that is not well-represented by the model. A recent compromise solution, called constrained resynthesis, solves this problem by only resynthesising spectro-temporal regions that are estimated to be masked by noise (conditioned on the evidence in the unmasked regions). In this paper we first extend the approach by: i) introducing multi-condition training and a deep discriminative model for the analysis stage; ii) introducing an improved resynthesis model that captures within-state cross-frequency dependencies. We then extend the previous stationary-noise evaluation by using real domestic audio noise from the CHiME-2 evaluation. We compare various mask estimation strategies while varying the degree of constraint by tuning the threshold for reliable speech detection. PESQ and log-spectral distance measures show that although mask estimation remains a challenge, it is only necessary to estimate a few reliable signal regions in order to achieve performance close to that achieved with an optimal oracle mask.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Se Rim Park|AUTHOR Se Rim Park]]^^1^^, [[Jin Won Lee|AUTHOR Jin Won Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^Qualcomm, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1993–1997
</span></p></div>
<div class="cpabstractcardabstract"><p>The presence of babble noise degrades hearing intelligibility of human speech greatly. However, removing the babble without creating artifacts in human speech is a challenging task in a low SNR environment. Here, we sought to solve the problem by finding a ‘mapping’ between noisy speech spectra and clean speech spectra via supervised learning. Specifically, we propose using fully Convolutional Neural Networks, which consist of lesser number of parameters than fully connected networks. The proposed network, Redundant Convolutional Encoder Decoder (R-CED), demonstrates that a convolutional network can be 12 times smaller than a recurrent network and yet achieves better performance, which shows its applicability for an embedded system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ralf Meermeier|AUTHOR Ralf Meermeier]], [[Sean Colbath|AUTHOR Sean Colbath]]
</p><p class="cpabstractcardaffiliationlist">Raytheon BBN Technologies, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2028–2029
</span></p></div>
<div class="cpabstractcardabstract"><p>As a follow-up to our paper at Interspeech 2016 [1], we propose to showcase various applications that now all use BBN’s Sage Speech Processing Platform, demonstrating the platform’s versatility and ease of integration.
In particular, we will showcase 1) BBN TransTalk: A turn-based speech-to-speech translation program running entirely on an Android smartphone, alongside a custom 3D-printed peripheral for it. 2) A continuous transcription and translation application running on a Raspberry Pi 3) An offline OCR application utilizing Sage, running on a COTS Windows laptop.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Milos Cernak|AUTHOR Milos Cernak]], [[Alain Komaty|AUTHOR Alain Komaty]], [[Amir Mohammadi|AUTHOR Amir Mohammadi]], [[André Anjos|AUTHOR André Anjos]], [[Sébastien Marcel|AUTHOR Sébastien Marcel]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2030–2031
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces and demonstrates Kaldi integration into Bob signal-processing and machine learning toolbox. The motivation for this integration is two-fold. Firstly, Bob benefits from using advanced speech processing tools developed in Kaldi. Secondly, Kaldi benefits from using complementary Bob modules, such as modulation-based VAD with an adaptive thresholding. In addition, Bob is designed as an open science tool, and this integration might offer to the Kaldi speech community a framework for better reproducibility of state-of-the-art research results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michał Lenarczyk|AUTHOR Michał Lenarczyk]]
</p><p class="cpabstractcardaffiliationlist">Polish Academy of Sciences, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2032–2033
</span></p></div>
<div class="cpabstractcardabstract"><p>Pitch shifting in speech is presented based on the use of the phase vocoder in combination with spectral whitening and envelope reconstruction, applied respectively before and after the transformation. A band preservation technique is introduced to contain quality degradation when downscaling the pitch. The transposition ratio is fixed in advance by selecting analysis and synthesis window sizes. Real time performance is demonstrated for window sizes having adequate factorization required by fast Fourier transformation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nivedita Chennupati|AUTHOR Nivedita Chennupati]], [[B.H.V.S. Narayana Murthy|AUTHOR B.H.V.S. Narayana Murthy]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2034–2035
</span></p></div>
<div class="cpabstractcardabstract"><p>Multi-speaker separation is necessary to increase intelligibility of speech signals or to improve accuracy of speech recognition systems. Ideal binary mask (IBM) has set a gold standard for speech separation by suppressing the undesired speakers and also by increasing intelligibility of the desired speech. In this work, single frequency filtering (SFF) analysis is used to estimate the mask closer to IBM for speaker separation. The SFF analysis gives good temporal resolution for extracting features such as glottal closure instants (GCIs), and high spectral resolution for resolving harmonics. The temporal resolution in SFF gives impulse locations, which are used to calculate the time delay. The delay compensation between two microphone signals reinforces the impulses corresponding to one of the speakers. The spectral resolution of the SFF is exploited to estimate the masks using the SFF magnitude spectra on the enhanced impulse-like sequence corresponding to one of the speakers. The estimated mask is used to refine the SFF magnitude. The refined SFF magnitude along with the phase of the mixed microphone signal is used to obtain speaker separation. Performance of proposed algorithm is demonstrated using multi-speaker data collected in a real room environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Georg Stemmer|AUTHOR Georg Stemmer]]^^1^^, [[Munir Georges|AUTHOR Munir Georges]]^^1^^, [[Joachim Hofer|AUTHOR Joachim Hofer]]^^1^^, [[Piotr Rozen|AUTHOR Piotr Rozen]]^^2^^, [[Josef Bauer|AUTHOR Josef Bauer]]^^1^^, [[Jakub Nowicki|AUTHOR Jakub Nowicki]]^^2^^, [[Tobias Bocklet|AUTHOR Tobias Bocklet]]^^1^^, [[Hannah R. Colett|AUTHOR Hannah R. Colett]]^^3^^, [[Ohad Falik|AUTHOR Ohad Falik]]^^4^^, [[Michael Deisher|AUTHOR Michael Deisher]]^^3^^, [[Sylvia J. Downing|AUTHOR Sylvia J. Downing]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Intel, Germany; ^^2^^Intel, Poland; ^^3^^Intel, USA; ^^4^^Intel, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2036–2037
</span></p></div>
<div class="cpabstractcardabstract"><p>A smart home controller that responds to natural language input is demonstrated on an Intel embedded processor. This device contains two DSP cores and a neural network co-processor which share 4MB SRAM. An embedded configuration of the Intel RealSpeech speech recognizer and intent extraction engine runs on the DSP cores with neural network operations offloaded to the co-processor. The prototype demonstrates that continuous speech recognition and understanding is possible on hardware with very low power consumption. As an example application, control of lights in a home via natural language is shown. An Intel development kit is demonstrated together with a set of tools. Conference attendees are encouraged to interact with the demo and development system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sho Tsuji|AUTHOR Sho Tsuji]]^^1^^, [[Christina Bergmann|AUTHOR Christina Bergmann]]^^2^^, [[Molly Lewis|AUTHOR Molly Lewis]]^^3^^, [[Mika Braginsky|AUTHOR Mika Braginsky]]^^4^^, [[Page Piccinini|AUTHOR Page Piccinini]]^^5^^, [[Michael C. Frank|AUTHOR Michael C. Frank]]^^6^^, [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Pennsylvania, USA; ^^2^^LSCP (UMR 8554), France; ^^3^^University of Chicago, USA; ^^4^^MIT, USA; ^^5^^NPI (U955 E01), France; ^^6^^Stanford University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2038–2039
</span></p></div>
<div class="cpabstractcardabstract"><p>MetaLab is a growing database of meta-analyses, shared in a github repository and via an interactive website. This website contains interactive tools for community-augmented meta-analyses, power analyses, and experimental planning. It currently contains a dozen meta-analyses spanning a number of phenomena in early language acquisition research, including infants’ vowel discrimination, acoustic wordform segmentation, and distributional learning in the laboratory. During the Show and Tell, we will demonstrate how to use the online visualization tools, download data, and re-use our analysis scripts for other research purposes. We expect MetaLab data to be particularly useful to researchers interested in early speech perception. Additionally, the infrastructure and tools can be adopted by speech scientists seeking to perform and utilize (meta-)meta-analyses in other fields.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adrien Daniel|AUTHOR Adrien Daniel]]
</p><p class="cpabstractcardaffiliationlist">NXP Semiconductors, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2040–2041
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper describes a neuroevolution-based novel approach to train recurrent neural networks that can process and classify audio directly from the raw waveform signal, without any assumption on the signal itself, on the features that should be extracted, or on the required network topology to perform the task. Resulting networks are relatively small in memory size, and their usage in a streaming fashion makes them particularly suited to embedded real-time applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Milana Milošević|AUTHOR Milana Milošević]]^^1^^, [[Ulrike Glavitsch|AUTHOR Ulrike Glavitsch]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Belgrade, Serbia; ^^2^^EMPA, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2042–2043
<a href="./IS2017/MEDIA/2032" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In most speaker recognition systems speech utterances are not constrained in content or language. In a text-dependent speaker recognition system lexical content of speech and language are known in advance. The goal of this paper is to show that this information can be used by a segmental features (SF) approach to improve a standard Gaussian mixture model with MFCC features (GMM-MFCC). Speech features such as mean energy, delta energy, pitch, delta pitch, the formants F1–F4 and their bandwidths B1–B4 and the difference between F2 and F1 are calculated on segments and are associated to phonemes and phoneme groups for each speaker. The SF and GMM-MFCC approaches are combined by multiplying the outputs of two classifiers. All the experiments are performed on the two versions of TEVOID: TEVOID16 with 16 and the upgraded TEVOID50 with 50 speakers. On TEVOID16, SF achieves 84.23%, GMM-MFCC 91.75%, and the combined approach gives 95.12% recognition rate. On TEVOID50, the SF approach gives 68.69%, while both GMM-MFCC and the combined model achieve 95.84% recognition rate. On both databases, the number of male/female confusions decreased for the combined model. These results are promising for using segmental features to improve the recognition rate of text-dependent systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gerhard Hagerer|AUTHOR Gerhard Hagerer]]^^1^^, [[Nicholas Cummins|AUTHOR Nicholas Cummins]]^^2^^, [[Florian Eyben|AUTHOR Florian Eyben]]^^1^^, [[Björn Schuller|AUTHOR Björn Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^audEERING, Germany; ^^2^^Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2044–2045
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we describe a mobile and wearable devices app that recognises laughter from speech in real-time. The laughter detection is based on a deep neural network architecture, which runs smoothly and robustly, even natively on a smartwatch. Further, this paper presents results demonstrating that our approach achieves state-of-the-art laughter detection performance on the SSPNet Vocalization Corpus (SVC) from the 2013 Interspeech Computational Paralinguistics Challenge Social Signals Sub-Challenge. As this technology is tailored for mobile and wearable devices, it enables and motivates many new use cases, for example, deployment in health care settings such as laughter tracking for psychological coaching, depression monitoring, and therapies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kwang Myung Jeon|AUTHOR Kwang Myung Jeon]], [[Nam Kyun Kim|AUTHOR Nam Kyun Kim]], [[Chan Woong Kwak|AUTHOR Chan Woong Kwak]], [[Jung Min Moon|AUTHOR Jung Min Moon]], [[Hong Kook Kim|AUTHOR Hong Kook Kim]]
</p><p class="cpabstractcardaffiliationlist">GIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2046–2047
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech broadcasting via loudspeakers is widely used in public transportation to send broadcast notifications. However, listeners often fail to catch spoken context from speech broadcasts due to excessive environmental noise. We propose an ultrasonic communication method that can be applied to loudspeaker-based speech broadcasting to cope with this issue. In other words, text notifications are modulated and carried over low-frequency ultrasonic waves through loudspeakers to the microphones of each potential listener’s mobile device. Then, the received ultrasonic stream is demodulated back into the text and the listener hears the notification context by a text-to-speech engine embedded in each mobile device. Such a transmission system is realized with a 20 kHz carrier frequency because it is inaudible to most listeners but capable of being used in communication between a loudspeaker and microphone. In addition, the performance of the proposed ultrasonic communication method is evaluated by measuring the success rate of transmitted words under various signal-to-noise ratio conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sean U.N. Wood|AUTHOR Sean U.N. Wood]], [[Jean Rouat|AUTHOR Jean Rouat]]
</p><p class="cpabstractcardaffiliationlist">Université de Sherbrooke, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2048–2049
</span></p></div>
<div class="cpabstractcardabstract"><p>We demonstrate a real-time, open source implementation of the online GCC-NMF stereo speech enhancement algorithm. While the system runs on a variety of operating systems and hardware platforms, we highlight its potential for real-world mobile use by presenting it on two embedded systems: the Raspberry Pi 3 and the NVIDIA Jetson TX1. The effect of various algorithm parameters on subjective enhancement quality may be explored interactively via a graphical user interface, with the results heard in real-time. The trade-off between interference suppression and target fidelity is controlled by manipulating the parameters of the coefficient masking function. Increasing the pre-learned dictionary size improves overall speech enhancement quality at increased computational cost. We show that real-time GCC-NMF has potential for real-world application, remaining purely unsupervised and retaining the simplicity and flexibility of offline GCC-NMF.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aku Rouhe|AUTHOR Aku Rouhe]], [[Reima Karhila|AUTHOR Reima Karhila]], [[Peter Smit|AUTHOR Peter Smit]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2050–2051
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe a recognition, validation and segmentation system as an intelligent preprocessor for automatic pronunciation evaluation. The system is developed for large-scale high stake foreign language tests, where it is necessary to reduce human workload and ensure fair evaluation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Brian Stasak|AUTHOR Brian Stasak]]^^1^^, [[Julien Epps|AUTHOR Julien Epps]]^^1^^, [[Roland Goecke|AUTHOR Roland Goecke]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of New South Wales, Australia; ^^2^^University of Canberra, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 834–838
</span></p></div>
<div class="cpabstractcardabstract"><p>Assessment of neurological and psychiatric disorders like depression are unusual from a speech processing perspective, in that speakers can be prompted or instructed in what they should say (e.g. as part of a clinical assessment). Despite prior speech-based depression studies that have used a variety of speech elicitation methods, there has been little evaluation of the best elicitation mode. One approach to understand this better is to analyze an existing database from the perspective of articulation effort, word affect, and linguistic complexity measures as proxies for depression sub-symptoms (e.g. psychomotor retardation, negative stimulus suppression, cognitive impairment). Here a novel measure for quantifying articulation effort is introduced, and when applied experimentally to the DAIC corpus shows promise for identifying speech data that are more discriminative of depression. Interestingly, experiment results demonstrate that by selecting speech with higher articulation effort, linguistic complexity, or word-based arousal/valence, improvements in acoustic speech-based feature depression classification performance can be achieved, serving as a guide for future elicitation design.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[José Novoa|AUTHOR José Novoa]]^^1^^, [[Jorge Wuth|AUTHOR Jorge Wuth]]^^1^^, [[Juan Pablo Escudero|AUTHOR Juan Pablo Escudero]]^^1^^, [[Josué Fredes|AUTHOR Josué Fredes]]^^1^^, [[Rodrigo Mahu|AUTHOR Rodrigo Mahu]]^^1^^, [[Richard M. Stern|AUTHOR Richard M. Stern]]^^2^^, [[Nestor Becerra Yoma|AUTHOR Nestor Becerra Yoma]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Chile, Chile; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 839–843
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the problem of time-varying channels in speech-recognition-based human-robot interaction using Locally-Normalized Filter-Bank features (LNFB), and training strategies that compensate for microphone response and room acoustics. Testing utterances were generated by re-recording the Aurora-4 testing database using a PR2 mobile robot, equipped with a Kinect audio interface while performing head rotations and movements toward and away from a fixed source. Three training conditions were evaluated called Clean, 1-IR and 33-IR. With Clean training, the DNN-HMM system was trained using the Aurora-4 clean training database. With 1-IR training, the same training data were convolved with an impulse response estimated at one meter from the source with no rotation of the robot head. With 33-IR training, the Aurora-4 training data were convolved with impulse responses estimated at one, two and three meters from the source and 11 angular positions of the robot head. The 33-IR training method produced reductions in WER greater than 50% when compared with Clean training using both LNFB and conventional Mel filterbank features. Nevertheless, LNFB features provided a WER 23% lower than MelFB using 33-IR training. The use of 33-IR training and LNFB features reduced WER by 64% compared to Clean training and MelFB features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bekir Berker Türker|AUTHOR Bekir Berker Türker]], [[Zana Buçinca|AUTHOR Zana Buçinca]], [[Engin Erzin|AUTHOR Engin Erzin]], [[Yücel Yemez|AUTHOR Yücel Yemez]], [[Metin Sezgin|AUTHOR Metin Sezgin]]
</p><p class="cpabstractcardaffiliationlist">Koç Üniversitesi, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 844–848
</span></p></div>
<div class="cpabstractcardabstract"><p>We explore the effect of laughter perception and response in terms of engagement in human-robot interaction. We designed two distinct experiments in which the robot has two modes: laughter responsive and laughter non-responsive. In responsive mode, the robot detects laughter using a multimodal real-time laughter detection module and invokes laughter as a backchannel to users accordingly. In non-responsive mode, robot has no utilization of detection, thus provides no feedback. In the experimental design, we use a straightforward question-answer based interaction scenario using a back-projected robot head. We evaluate the interactions with objective and subjective measurements of engagement and user experience.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alice Baird|AUTHOR Alice Baird]]^^1^^, [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]]^^1^^, [[Nicholas Cummins|AUTHOR Nicholas Cummins]]^^1^^, [[Alyssa M. Alcorn|AUTHOR Alyssa M. Alcorn]]^^2^^, [[Anton Batliner|AUTHOR Anton Batliner]]^^1^^, [[Sergey Pugachevskiy|AUTHOR Sergey Pugachevskiy]]^^1^^, [[Michael Freitag|AUTHOR Michael Freitag]]^^1^^, [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]]^^1^^, [[Björn Schuller|AUTHOR Björn Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Passau, Germany; ^^2^^University College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 849–853
</span></p></div>
<div class="cpabstractcardabstract"><p>Humanoid robots have in recent years shown great promise for supporting the educational needs of children on the autism spectrum. To further improve the efficacy of such interactions, user-adaptation strategies based on the individual needs of a child are required. In this regard, the proposed study assesses the suitability of a range of speech-based classification approaches for automatic detection of autism severity according to the commonly used Social Responsiveness Scale second edition (SRS-2). Autism is characterised by socialisation limitations including child language and communication ability. When compared to neurotypical children of the same age these can be a strong indication of severity. This study introduces a novel dataset of 803 utterances recorded from 14 autistic children aged between 4–10 years, during Wizard-of-Oz interactions with a humanoid robot. Our results demonstrate the suitability of support vector machines (SVMs) which use acoustic feature sets from multiple Interspeech COMPARE challenges. We also evaluate deep spectrum features, extracted via an image classification convolutional neural network (CNN) from the spectrogram of autistic speech instances. At best, by using SVMs on the acoustic feature sets, we achieved a UAR of 73.7% for the proposed 3-class task. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Catharine Oertel|AUTHOR Catharine Oertel]], [[Patrik Jonell|AUTHOR Patrik Jonell]], [[Dimosthenis Kontogiorgos|AUTHOR Dimosthenis Kontogiorgos]], [[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Jonas Beskow|AUTHOR Jonas Beskow]], [[Joakim Gustafson|AUTHOR Joakim Gustafson]]
</p><p class="cpabstractcardaffiliationlist">KTH, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 854–858
</span></p></div>
<div class="cpabstractcardabstract"><p>Feedback generation is an important component of human-human communication. Humans can choose to signal support, understanding, agreement or also scepticism by means of feedback tokens. Many studies have focused on the timing of feedback behaviours. In the current study, however, we keep the timing constant and instead focus on the lexical form and prosody of feedback tokens as well as their sequential patterns.
For this we crowdsourced participant’s feedback behaviour in identical interactional contexts in order to model a virtual agent that is able to provide feedback as an attentive/supportive as well as attentive/sceptical listener. The resulting models were realised in a robot which was evaluated by third-party observers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leonardo Lancia|AUTHOR Leonardo Lancia]]^^1^^, [[Thierry Chaminade|AUTHOR Thierry Chaminade]]^^2^^, [[Noël Nguyen|AUTHOR Noël Nguyen]]^^3^^, [[Laurent Prévot|AUTHOR Laurent Prévot]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPP (UMR 7018), France; ^^2^^Institut de Neuroscience de la Timone, France; ^^3^^LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 859–863
</span></p></div>
<div class="cpabstractcardabstract"><p>According to accounts of inter-speaker coordination based on internal predictive models, speakers tend to imitate each other each time they need to coordinate their behavior. According to accounts based on the notion of dynamical coupling, imitation should be observed only if it helps stabilizing the specific coordinative pattern produced by the interlocutors or if it is a direct consequence of inter-speaker coordination. To compare these accounts, we implemented an artificial agent designed to repeat a speech utterance while coordinating its behavior with that of a human speaker performing the same task. We asked 10 Italian speakers to repeat the utterance /topkop/ simultaneously with the agent during short time intervals. In some interactions, the agent was parameterized to cooperate with the speakers (by producing its syllables simultaneously with those of the human) while in others it was parameterized to compete with them (by producing its syllables in-between those of the human). A positive correlation between the stability of inter-speaker coordination and the degree of f0 imitation was observed only in cooperative interactions. However, in line with accounts based on prediction, speakers imitate the f0 of the agent regardless of whether this is parameterized to cooperate or to compete with them.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samuel Delalez|AUTHOR Samuel Delalez]]^^1^^, [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIMSI, France; ^^2^^∂’Alembert (UMR 7190), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 864–868
<a href="./IS2017/MEDIA/0396" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Performative time and pitch scaling is a new research paradigm for prosodic analysis by synthesis. In this paper, a system for real-time recorded speech time and pitch scaling by the means of hands or feet gestures is designed and evaluated. Pitch is controlled with the preferred hand, using a stylus on a graphic tablet. Time is controlled using rhythmic frames, or constriction gestures, defined by pairs of control points. The “Arsis” corresponds to the constriction (weak beat of the syllable) and the “Thesis” corresponds to the vocalic nucleus (strong beat of the syllable). This biphasic control of rhythmic units is performed by the non-preferred hand using a button. Pitch and time scales are modified according to these gestural controls with the help of a real-time pitch synchronous overlap-add technique (RT-PSOLA). Rhythm and pitch control accuracy are assessed in a prosodic imitation experiment: the task is to reproduce intonation and rhythm of various sentences. The results show that inter-vocalic durations differ on average of only 20 ms. The system appears as a new and effective tool for performative speech and singing synthesis. Consequences and applications in speech prosody research are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raheleh Saryazdi|AUTHOR Raheleh Saryazdi]], [[Craig G. Chambers|AUTHOR Craig G. Chambers]]
</p><p class="cpabstractcardaffiliationlist">University of Toronto, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 869–873
</span></p></div>
<div class="cpabstractcardabstract"><p>In conversation, speakers spontaneously produce manual gestures that can facilitate listeners’ comprehension of speech. However, various factors may affect listeners’ ability to use gesture cues. Here we examine a situation where a speaker is referring to physical objects in the contextual here-and-now. In this situation, objects for potential reference will compete with gestures for visual attention. In two experiments, a speaker provided instructions to pick up objects in the visual environment (“ Pick up the candy”). On some trials, the speaker produced a “pick up” gesture that reflected the size/shape of the target object. Gaze position was recorded to evaluate how listeners allocated attention to scene elements. Experiment 1 showed that, although iconic gestures (when present) were rarely fixated directly, peripheral uptake of these cues speeded listeners’ visual identification of intended referents as the instruction unfolded. However, the benefit was mild and occurred primarily for small/hard-to-identify objects. In Experiment 2, background noise was added to reveal whether challenging auditory environments lead listeners to allocate additional visual attention to gesture cues in a compensatory manner. Interestingly, background noise actually reduced listeners’ use of gesture cues. Together the findings highlight how situational factors govern the use of visual cues during multimodal communication.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carlos Ishi|AUTHOR Carlos Ishi]], [[Takashi Minato|AUTHOR Takashi Minato]], [[Hiroshi Ishiguro|AUTHOR Hiroshi Ishiguro]]
</p><p class="cpabstractcardaffiliationlist">ATR HIL, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 874–878
</span></p></div>
<div class="cpabstractcardabstract"><p>The background of our research is the generation of natural human-like motions during speech in android robots that have a highly human-like appearance. Mismatches in speech and motion are sources of unnaturalness, especially when emotion expressions are involved. Surprise expressions often occur in dialogue interactions, and they are often accompanied by verbal interjectional utterances. In this study, we analyze facial, head and body motions during several types of vocalized surprise expressions appearing in human-human dialogue interactions. The analysis results indicate an inter-dependence between motion types and different types of surprise expression (such as emotional, social or quoted) as well as different degrees of surprise expression. The synchronization between motion and surprise utterances is also analyzed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robin Ruede|AUTHOR Robin Ruede]]^^1^^, [[Markus Müller|AUTHOR Markus Müller]]^^1^^, [[Sebastian Stüker|AUTHOR Sebastian Stüker]]^^1^^, [[Alex Waibel|AUTHOR Alex Waibel]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KIT, Germany; ^^2^^KIT, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 879–883
</span></p></div>
<div class="cpabstractcardabstract"><p>Backchannel responses like “uh-huh”, “yeah”, “right” are used by the listener in a social dialog as a way to provide feedback to the speaker. In the context of human-computer interaction, these responses can be used by an artificial agent to build rapport in conversations with users. In the past, multiple approaches have been proposed to detect backchannel cues and to predict the most natural timing to place those backchannel utterances. Most of these are based on manually optimized fixed rules, which may fail to generalize. Many systems rely on the location and duration of pauses and pitch slopes of specific lengths. In the past, we proposed an approach by training artificial neural networks on acoustic features such as pitch and power and also attempted to add word embeddings via word2vec. In this work, we refined this approach by evaluating different methods to add timed word embeddings via word2vec. Comparing the performance using various feature combinations, we could show that adding linguistic features improves the performance over a prediction system that only uses acoustic features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eran Raveh|AUTHOR Eran Raveh]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]], [[Bernd Möbius|AUTHOR Bernd Möbius]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 884–888
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a model for segment-level phonetic responsiveness. It is based on behavior observed in human-human interaction, and is designed to be integrated into spoken dialogue systems to capture potential phonetic variation and simulate convergence capabilities. Each step in the process is responsible for an aspect of the interaction, including monitoring the input speech and appropriately analyzing it. Various parameters can be tuned to configure the speech handling and adjust the response style. Evaluation was performed by simulating simple end-to-end dialogue scenarios, including analyzing the synthesized output of the model. The results show promising ground for further extensions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eustace Ebhotemhen|AUTHOR Eustace Ebhotemhen]], [[Volha Petukhova|AUTHOR Volha Petukhova]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 889–893
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a machine learning based approach to incremental dialogue act classification with a focus on the recognition of communicative functions associated with dialogue segments in a multidimensional space, as defined in the ISO 24617-2 dialogue act annotation standard. The main goal is to establish the nature of an increment whose processing will result in a reliable overall system performance. We explore scenarios where increments are tokens or syntactically, semantically or prosodically motivated chunks. Combing local classification with meta-classifiers at a late fusion decision level we obtained state-of-the-art classification performance. Experiments were carried out on manually corrected transcriptions and on potentially erroneous ASR output. Chunk-based classification yields better results on the manual transcriptions, whereas token-based classification shows a more robust performance on the ASR output. It is also demonstrated that layered hierarchical and cascade training procedures result in better classification performance than the single-layered approach based on a joint classification predicting complex class labels.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]
</p><p class="cpabstractcardaffiliationlist">University of Southern Denmark, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 894–898
</span></p></div>
<div class="cpabstractcardabstract"><p>Research on speech reduction is primarily concerned with analyzing, modeling, explaining, and, ultimately, predicting phonetic variation. That is, the focus is on the speech signal itself. The present paper adds a little side note to this fundamental line of research by addressing the question whether variation in the degree of reduction also has a systematic effect on the attributes we ascribe to the speaker who produces the speech signal. A perception experiment was carried out for German in which 46 listeners judged whether or not speakers showing 3 different combinations of segmental and prosodic reduction levels (unreduced, moderately reduced, strongly reduced) are appropriately described by 13 physical, social, and cognitive attributes. The experiment shows that clear speech is not mere speech, and less clear speech is not just reduced either. Rather, results revealed a complex interplay of reduction levels and perceived speaker attributes in which moderate reduction can make a better impression on listeners than no reduction. In addition to its relevance in reduction models and theories, this interplay is instructive for various fields of speech application from social robotics to charisma coaching.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Charlotte Kouklia|AUTHOR Charlotte Kouklia]], [[Nicolas Audibert|AUTHOR Nicolas Audibert]]
</p><p class="cpabstractcardaffiliationlist">LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 899–903
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates the relationship between perceived hostility and speech timing features within extracts from Montreuil’s City Council sessions in 2013, marked by a tense political context at this time. A dataset of 118 speech extracts from the mayor (Dominique Voynet) and four of her political opponents during the City Council has been analyzed through the combination of perception tests and speech timing phenomena, estimated from classical timing-related measurements and custom metrics. We also develop a methodological framework for the phonetic analysis of nonscripted speech: a double perceptive evaluation of the original dataset (22 participants) allowed us to measure the difference of hostility perceived (dHost) between the original audio extracts and their read transcriptions, and the five speakers produced the same utterances in a controlled reading task to make the direct comparison with original extracts possible. Correlations between dHost and speech timing features differences between each original utterance and its control counterpart show that perceived hostility is mainly influenced by local deviations to the expected accentuation pattern in French combined with the insertion of silent pauses. Moreover, a finer-grained analysis of rhythmic features reveals different strategies amongst speakers, especially regarding the realization of interpausal speech rate variation and final syllables lengthening.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]], [[Benjamin Weiss|AUTHOR Benjamin Weiss]]
</p><p class="cpabstractcardaffiliationlist">T-Labs, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 904–908
</span></p></div>
<div class="cpabstractcardabstract"><p>A great number of investigations on person characterization rely on the assessment of the Big-Five personality traits, a prevalent and widely accepted model with strong psychological foundation. However, in the context on characterizing unfamiliar individuals from their voices only, it may be hard for assessors to determine the Big-Five traits based on their first impression. In this study, a 28-item semantic differential rating scale has been completed by a total of 33 listeners who were presented with 15 male voice stimuli. A factor analysis on their responses enabled us to identify five perceptual factors of person attribution: (social and physical) attractiveness, confidence, apathy, serenity, and incompetence. A discussion on the relations of these dimensions of speaker attribution to the Big-Five factors is provided and speech features relevant to the automatic prediction of our dimensions are analyzed, together with SVM regression performance. Although more data are needed to validate our findings, we believe that our approach can lead to establish a space of person attributions with dimensions that can easily be detected from utterances in zero-acquaintance scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carlos Ishi|AUTHOR Carlos Ishi]]^^1^^, [[Jun Arai|AUTHOR Jun Arai]]^^1^^, [[Norihiro Hagita|AUTHOR Norihiro Hagita]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ATR HIL, Japan; ^^2^^ATR IRC, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 909–913
</span></p></div>
<div class="cpabstractcardabstract"><p>The term “attention drawing” refers to the action of sellers who call out to get the attention of people passing by in front of their stores or shops to invite them inside to buy or sample products. Since the speaking styles exhibited in such attention-drawing speech are clearly different from conversational speech, in this study, we focused on prosodic analyses of attention-drawing speech and collected the speech data of multiple people with previous attention-drawing experience by simulating several situations. We then investigated the effects of several factors, including background noise, interaction phases, and shop categories on the prosodic features of attention-drawing utterances. Analysis results indicate that compared to dialogue interaction utterances, attention-drawing utterances usually have higher power, higher mean F0s, smaller F0 ranges, and do not drop at the end of sentences, regardless of the presence or absence of background noise. Analysis of sentence-final syllable intonation indicates the presence of lengthened flat or rising tones in attention-drawing utterances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adrian P. Simpson|AUTHOR Adrian P. Simpson]]^^1^^, [[Riccarda Funk|AUTHOR Riccarda Funk]]^^2^^, [[Frederik Palmer|AUTHOR Frederik Palmer]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FSU Jena, Germany; ^^2^^MLU Halle-Wittenberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 914–918
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates the perceptual and acoustic correlates of gender in the prepubertal voice. 23 German-speaking primary school pupils (13 female, 10 male) aged 8–9 years were recorded producing 10 sentences each. Two sentences from each speaker were presented in random order to a group of listeners who were asked to assign a gender to each stimulus. Single utterances from each of the three male and three female speakers whose gender was identified most reliably were played in a second experiment to two further groups of listeners who judged each stimulus against seven perceptual attribute pairs. Acoustic analysis of those parameters corresponding most directly to the perceptual attributes revealed a number of highly significant correlations, indicating some aspects of the voice and speech (f0, harmonics-to-noise ratio, tempo) that children use to construct and adults use to identify gender in the prepubertal voice.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Katrin Schweitzer|AUTHOR Katrin Schweitzer]], [[Michael Walsh|AUTHOR Michael Walsh]], [[Antje Schweitzer|AUTHOR Antje Schweitzer]]
</p><p class="cpabstractcardaffiliationlist">Universität Stuttgart, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 919–923
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we look at convergence and divergence in intonation in the context of social qualities. Specifically we examine pitch accent realisations in the GECO corpus of German conversations. Pitch accents are represented as 6-dimensional vectors where each dimension corresponds to a characteristic of the accent’s shape. Convergence/divergence is then measured by calculating the distance between pitch accent realisations of conversational partners. A decrease of distance values over time indicates convergence, an increase divergence. The corpus comprises dialogue sessions in two modalities: partners either saw each other during the conversation or not. Linear mixed model analyses show convergence as well as divergence effects in the realisations of H*L accents. This convergence/divergence is strongly related to the modality and to how much speakers like their partners: generally, seeing the partner comes with divergence, whereas when the dialogue partners cannot see each other, there is convergence. The effect varies, however, depending on the extent to which a speaker likes their partner. Less liking entails a greater change in the realisations over time — stronger divergence when partners could see each other, and stronger convergence when they could not.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Melanie Weirich|AUTHOR Melanie Weirich]], [[Adrian P. Simpson|AUTHOR Adrian P. Simpson]]
</p><p class="cpabstractcardaffiliationlist">FSU Jena, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 924–928
</span></p></div>
<div class="cpabstractcardabstract"><p>Differences between male and female speakers have been explained in terms of biological inevitabilities but also in terms of behavioral and socially motivated factors. The aim of this study is to investigate the latter by examining gender-specific variability within the same gender.
The speech of 29 German men and women — all of them expecting their first child but varying in the time they plan to stay at home during their child’s first year (parental role) — is analyzed. Acoustic analyses comprise the vowel space size and the realization of the inter-sibilant contrast.
While the data is part of a larger longitudinal project investigating adult- and infant-directed speech during the infant’s first year of life, this study concentrates on the recordings made before the birth of the child. Inter-speaker variability is investigated in relation to 1) the chosen parental role and 2) self-ascribed ratings on positive feminine attributes (gender identity).
Results show that both factors (planned duration of parental leave and the femininity ratings) contribute to the variability found between, but also within the same gender. In particular, the vowel space size was found to be positively correlated with self-ascribed femininity ratings in male speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rubén Solera-Ureña|AUTHOR Rubén Solera-Ureña]]^^1^^, [[Helena Moniz|AUTHOR Helena Moniz]]^^1^^, [[Fernando Batista|AUTHOR Fernando Batista]]^^1^^, [[Vera Cabarrão|AUTHOR Vera Cabarrão]]^^1^^, [[Anna Pompili|AUTHOR Anna Pompili]]^^1^^, [[Ramon Fernandez Astudillo|AUTHOR Ramon Fernandez Astudillo]]^^1^^, [[Joana Campos|AUTHOR Joana Campos]]^^2^^, [[Ana Paiva|AUTHOR Ana Paiva]]^^2^^, [[Isabel Trancoso|AUTHOR Isabel Trancoso]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^INESC-ID Lisboa, Portugal; ^^2^^Universidade de Lisboa, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 929–933
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic personality analysis has gained attention in the last years as a fundamental dimension in human-to-human and human-to-machine interaction. However, it still suffers from limited number and size of speech corpora for specific domains, such as the assessment of children’s personality. This paper investigates a semi-supervised training approach to tackle this scenario. We devise an experimental setup with age and language mismatch and two training sets: a small labeled training set from the Interspeech 2012 Personality Sub-challenge, containing French adult speech labeled with personality OCEAN traits, and a large unlabeled training set of Portuguese children’s speech. As test set, a corpus of Portuguese children’s speech labeled with OCEAN traits is used. Based on this setting, we investigate a weak supervision approach that iteratively refines an initial model trained with the labeled data-set using the unlabeled data-set. We also investigate knowledge-based features, which leverage expert knowledge in acoustic-prosodic cues and thus need no extra data. Results show that, despite the large mismatch imposed by language and age differences, it is possible to attain improvements with these techniques, pointing both to the benefits of using a weak supervision and expert-based acoustic-prosodic features across age and language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rachael Tatman|AUTHOR Rachael Tatman]]^^1^^, [[Conner Kasten|AUTHOR Conner Kasten]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Washington, USA; ^^2^^Zonar Systems, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 934–938
</span></p></div>
<div class="cpabstractcardabstract"><p>This project compares the accuracy of two automatic speech recognition (ASR) systems — Bing Speech and YouTube’s automatic captions — across gender, race and four dialects of American English. The dialects included were chosen for their acoustic dissimilarity. Bing Speech had differences in word error rate (WER) between dialects and ethnicities, but they were not statistically reliable. YouTube’s automatic captions, however, did have statistically different WERs between dialects and races. The lowest average error rates were for General American and white talkers, respectively. Neither system had a reliably different WER between genders, which had been previously reported for YouTube’s automatic captions [1]. However, the higher error rate non-white talkers is worrying, as it may reduce the utility of these systems for talkers of color.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Catherine Pelachaud|AUTHOR Catherine Pelachaud]]
</p><p class="cpabstractcardaffiliationlist">ISIR (UMR 7222), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2052
</span></p></div>
<div class="cpabstractcardabstract"><p>Our aim is to create virtual conversational partners. As such we have developed computational models to enrich virtual characters with socio-emotional capabilities that are communicated through multimodal behaviors. The approach we follow to build interactive and expressive interactants relies on theories from human and social sciences as well as data analysis and user-perception-based design. We have explored specific social signals such as smile and laughter, capturing their variation in production but also their different communicative functions and their impact in human-agent interaction. Lately we have been interested in modeling agents with social attitudes. Our aim is to model how social attitudes color the multimodal behaviors of the agents. We have gathered a corpus of dyads that was annotated along two layers: social attitudes and nonverbal behaviors. By applying sequence mining methods we have extracted behavior patterns involved in the change of perception of an attitude. We are particularly interested in capturing the behaviors that correspond to a change of perception of an attitude. In this talk I will present the GRETA/VIB platform where our research is implemented.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rosario Signorello|AUTHOR Rosario Signorello]]^^1^^, [[Sergio Hassid|AUTHOR Sergio Hassid]]^^2^^, [[Didier Demolin|AUTHOR Didier Demolin]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPP (UMR 7018), France; ^^2^^H^opital Erasme, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2267–2271
</span></p></div>
<div class="cpabstractcardabstract"><p>The present research investigates the aerodynamic features of French fricative consonants using direct measurement of subglottal air pressure by tracheal puncture (Ps) synchronized with intraoral air pressure (Po), oral airflow (Oaf) and acoustic measurements. Data were collected from four Belgian French speakers’ productions of CVCV pseudowords including voiceless and voiced fricatives [f, v, s, z, ʃ, ʒ]. The goals of this study are: (i) to predict the starting, central, and releasing points of frication based on the measurements of Ps, Po, and Oaf; (ii) to compare voiceless and voiced fricatives and their places of articulation; and (iii) to provide reference values for the aerodynamic features of fricatives for further linguistic, clinical, physical and computational modeling research.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Antoine Serrurier|AUTHOR Antoine Serrurier]]^^1^^, [[Pierre Badin|AUTHOR Pierre Badin]]^^2^^, [[Louis-Jean Boë|AUTHOR Louis-Jean Boë]]^^2^^, [[Laurent Lamalle|AUTHOR Laurent Lamalle]]^^3^^, [[Christiane Neuschaefer-Rube|AUTHOR Christiane Neuschaefer-Rube]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Uniklinik RWTH Aachen, Germany; ^^2^^GIPSA, France; ^^3^^IRMaGe, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2272–2276
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech production can be analysed in terms of universal articulatory-acoustic phonemic units shared between speakers. However, morphological differences between speakers and idiosyncratic articulatory strategies lead to large inter-speaker articulatory variability. Relationships between strategy and morphology have already been pinpointed in the literature. This study aims thus at generalising existing results on a larger database for the entire vocal tract (VT) and at quantifying phoneme-specific inter-speaker articulatory invariants. Midsagittal MRI of 11 French speakers for 62 vowels and consonants were recorded and VT contours manually edited. A procedure of normalisation of VT contours between speakers, based on the use of mean VT contours, led to an overall reduction of inter-speaker VT contours variance of 88%. On the opposite, the sagittal function (i.e. the transverse sagittal distance along the VT midline), which is the main determinant of the acoustic output, had an overall amplitude variance decrease of only 37%, suggesting that the speakers adapt their strategy to their morphology to achieve proper acoustic goals. Moreover, articulatory invariants were identified on the sagittal variance distribution along the VT as the regions with lower variability. These regions correspond to the classical places of articulation and are associated with higher acoustic sensitivity function levels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nimisha Patil|AUTHOR Nimisha Patil]], [[Timothy Greer|AUTHOR Timothy Greer]], [[Reed Blaylock|AUTHOR Reed Blaylock]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2277–2281
</span></p></div>
<div class="cpabstractcardabstract"><p>Real-time Magnetic Resonance Imaging (rtMRI) was used to examine mechanisms of sound production in five beatboxers. rtMRI was found to be an effective tool with which to study the articulatory dynamics of this form of human vocal production; it provides a dynamic view of the entire midsagittal vocal tract and at a frame rate (83 fps) sufficient to observe the movement and coordination of critical articulators. The artists’ repertoires included percussion elements generated using a wide range of articulatory and airstream mechanisms. Analysis of three common beatboxing sounds resulted in the finding that advanced beatboxers produce stronger ejectives and have greater control over different airstreams than novice beatboxers, to enhance the quality of their sounds. No difference in production mechanisms between males and females was observed. These data offer insights into the ways in which articulators can be trained and used to achieve specific acoustic goals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keyi Tang|AUTHOR Keyi Tang]]^^1^^, [[Negar M. Harandi|AUTHOR Negar M. Harandi]]^^1^^, [[Jonghye Woo|AUTHOR Jonghye Woo]]^^2^^, [[Georges El Fakhri|AUTHOR Georges El Fakhri]]^^2^^, [[Maureen Stone|AUTHOR Maureen Stone]]^^3^^, [[Sidney Fels|AUTHOR Sidney Fels]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of British Columbia, Canada; ^^2^^Massachusetts General Hospital, USA; ^^3^^University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2282–2286
</span></p></div>
<div class="cpabstractcardabstract"><p>We create two 3D biomechanical speaker models matched to medical image data of two healthy English speakers. We use a new, hybrid registration technique that morphs a generic 3D, biomechanical model to medical images. The generic model of the head and neck includes jaw, tongue, soft-palate, epiglottis, lips and face, and is capable of simulating upper-airway biomechanics. We use cine and tagged magnetic resonance (MR) images captured while our volunteers repeated a simple utterance (/ə-gis/) synchronized to a metronome. We simulate our models based on internal tongue tissue trajectories that we extract from tagged MR images, and use in an inverse solver. For areas without tracked data points, the registered generic model moves based on the computed muscle activations. Our modeling efforts include a wide range of speech organs illustrating the coupling complexity between the oral anatomy during simple speech utterances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Reed Blaylock|AUTHOR Reed Blaylock]], [[Nimisha Patil|AUTHOR Nimisha Patil]], [[Timothy Greer|AUTHOR Timothy Greer]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2287–2291
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous research suggests that beatboxers only use sounds that exist in the world’s languages. This paper provides evidence to the contrary, showing that beatboxers use non-linguistic articulations and airstream mechanisms to produce many sound effects that have not been attested in any language. An analysis of real-time magnetic resonance videos of beatboxing reveals that beatboxers produce non-linguistic articulations such as ingressive retroflex trills and ingressive lateral bilabial trills. In addition, beatboxers can use both lingual egressive and pulmonic ingressive airstreams, neither of which have been reported in any language.
The results of this study affect our understanding of the limits of the human vocal tract, and address questions about the mental units that encode music and phonological grammar.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yasufumi Uezu|AUTHOR Yasufumi Uezu]], [[Tokihiko Kaburagi|AUTHOR Tokihiko Kaburagi]]
</p><p class="cpabstractcardaffiliationlist">Kyushu University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2292–2296
</span></p></div>
<div class="cpabstractcardabstract"><p>In the source-filter theory, the complete closure of the glottis is assumed as a glottal boundary condition. However, such assumption of glottal closure in the source-filter theory is not strictly satisfied in actual utterance. Therefore, it is considered that acoustic features of the glottis and the subglottal region may affect vocal tract formants. In this study, we investigated how differences in the glottal boundary conditions affect vocal tract formants by speech synthesis simulation using speech production model. We synthesized five Japanese vowels using the speech production model in consideration of the source-filter interaction. This model consisted of the glottal area polynomial model and the acoustic tube model in the concatenation of the vocal tract, glottis, and the subglottis. From the results, it was found that the first formant frequency was affected more strongly by the boundary conditions, and also found that the open quotient may give the formant stronger effect than the maximum glottal width. In addition, formant frequencies were also affected more strongly by subglottal impedance when the maximum glottal area was wider.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinyu Li|AUTHOR Jinyu Li]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]], [[Xi Wang|AUTHOR Xi Wang]], [[Rui Zhao|AUTHOR Rui Zhao]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2386–2390
</span></p></div>
<div class="cpabstractcardabstract"><p>High accuracy speech recognition requires a large amount of transcribed data for supervised training. In the absence of such data, domain adaptation of a well-trained acoustic model can be performed, but even here, high accuracy usually requires significant labeled data from the target domain. In this work, we propose an approach to domain adaptation that does not require transcriptions but instead uses a corpus of unlabeled parallel data, consisting of pairs of samples from the source domain of the well-trained model and the desired target domain. To perform adaptation, we employ teacher/student (T/S) learning, in which the posterior probabilities generated by the source-domain model can be used in lieu of labels to train the target-domain model. We evaluate the proposed approach in two scenarios, adapting a clean acoustic model to noisy speech and adapting an adults’ speech acoustic model to children’s speech. Significant improvements in accuracy are obtained, with reductions in word error rate of up to 44% over the original source model without the need for transcribed data in the target domain. Moreover, we show that increasing the amount of unlabeled data results in additional model robustness, which is particularly beneficial when using simulated training data in the target-domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[W. Ahmad|AUTHOR W. Ahmad]]^^1^^, [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]]^^2^^, [[H.K. Kathania|AUTHOR H.K. Kathania]]^^1^^, [[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]]^^2^^, [[A.B. Samaddar|AUTHOR A.B. Samaddar]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NIT Sikkim, India; ^^2^^NIT Patna, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2391–2395
</span></p></div>
<div class="cpabstractcardabstract"><p>The task of transcribing children’s speech using statistical models trained on adults’ speech is very challenging. Large mismatch in the acoustic and linguistic attributes of the training and test data is reported to degrade the performance. In such speech recognition tasks, the differences in pitch (or fundamental frequency) between the two groups of speakers is one among several mismatch factors. To overcome the pitch mismatch, an existing pitch scaling technique based on iterative spectrogram inversion is explored in this work. Explicit pitch scaling is found to improve the recognition of children’s speech under mismatched setup. In addition to that, we have also studied the effect of discarding the phase information during spectrum reconstruction. This is motivated by the fact that the dominant acoustic feature extraction techniques make use of the magnitude spectrum only. On evaluating the effectiveness under mismatched testing scenario, the existing as well as the modified pitch scaling techniques result in very similar recognition performances. Furthermore, we have explored the role of pitch scaling on another speech recognition system which is trained on speech data from both adult and child speakers. Pitch scaling is noted to be effective for children’s speech recognition in this case as well.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xurong Xie|AUTHOR Xurong Xie]]^^1^^, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^, [[Tan Lee|AUTHOR Tan Lee]]^^2^^, [[Lan Wang|AUTHOR Lan Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese Academy of Sciences, China; ^^2^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2396–2400
</span></p></div>
<div class="cpabstractcardabstract"><p>Model based deep neural network (DNN) adaptation approaches often require multi-pass decoding in test time. Input feature based DNN adaptation, for example, based on latent Dirichlet allocation (LDA) clustering, provide a more efficient alternative. In conventional LDA clustering, the transition and correlation between neighboring clusters is ignored. In order to address this issue, a recurrent neural network (RNN) based clustering scheme is proposed to learn both the standard LDA cluster labels and their natural correlation over time in this paper. In addition to directly using the resulting RNN-LDA as input features during DNN adaptation, a range of techniques were investigated to condition the DNN hidden layer parameters or activation outputs on the RNN-LDA features. On a DARPA Gale Mandarin Chinese broadcast speech transcription task, the proposed RNN-LDA cluster features adapted DNN system outperformed both the baseline un-adapted DNN system and conventional LDA features adapted DNN system by 8% relative on the most difficult Phoenix TV subset. Consistent improvements were also obtained after further combination with model based adaptation approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Harish Arsikere|AUTHOR Harish Arsikere]], [[Sri Garimella|AUTHOR Sri Garimella]]
</p><p class="cpabstractcardaffiliationlist">Amazon.com, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2401–2405
</span></p></div>
<div class="cpabstractcardabstract"><p>Supplementing log filter-bank energies with i-vectors is a popular method for adaptive training of deep neural network acoustic models. While offline i-vectors (the target utterance or other relevant adaptation material is available for i-vector extraction prior to decoding) have been well studied, there is little analysis of online i-vectors and their robustness in multi-user scenarios where speaker changes can be frequent and unpredictable. The authors of [1] showed that online adaptation could be achieved through segmental i-vectors computed using the hidden Markov model (HMM) state alignments of utterances decoded in the recent past. While this approach works well in general, it could be rendered ineffective by speaker changes. In this paper, we study robust extensions of the ideas proposed in [1] by: (a) updating i-vectors on a per-frame basis based on the incoming target utterance, and (b) using lattice posteriors instead of one-best HMM state alignments. Experiments with different i-vector implementations show that: (a) when speaker changes occur, lattice-based frame-level i-vectors provide up to 6% word error rate reduction relative to the baseline [1], and (b) online i-vectors are more effective, in general, when the microphone characteristics of test utterances are not seen in training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ajay Srinivasamurthy|AUTHOR Ajay Srinivasamurthy]]^^1^^, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^1^^, [[Ivan Himawan|AUTHOR Ivan Himawan]]^^1^^, [[György Szaszák|AUTHOR György Szaszák]]^^2^^, [[Youssef Oualil|AUTHOR Youssef Oualil]]^^3^^, [[Hartmut Helmke|AUTHOR Hartmut Helmke]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Idiap Research Institute, Switzerland; ^^2^^Universität des Saarlandes, Germany; ^^3^^Universität des Saarlandes, Germany; ^^4^^DLR, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2406–2410
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic Speech Recognition (ASR) can introduce higher levels of automation into Air Traffic Control (ATC), where spoken language is still the predominant form of communication. While ATC uses standard phraseology and a limited vocabulary, we need to adapt the speech recognition systems to local acoustic conditions and vocabularies at each airport to reach optimal performance. Due to continuous operation of ATC systems, a large and increasing amount of untranscribed speech data is available, allowing for semi-supervised learning methods to build and adapt ASR models. In this paper, we first identify the challenges in building ASR systems for specific ATC areas and propose to utilize out-of-domain data to build baseline ASR models. Then we explore different methods of data selection for adapting baseline models by exploiting the continuously increasing untranscribed data. We develop a basic approach capable of exploiting semantic representations of ATC commands. We achieve relative improvement in both word error rate (23.5%) and concept error rates (7%) when adapting ASR models to different ATC conditions in a semi-supervised manner.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Taesup Kim|AUTHOR Taesup Kim]]^^1^^, [[Inchul Song|AUTHOR Inchul Song]]^^2^^, [[Yoshua Bengio|AUTHOR Yoshua Bengio]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Université de Montréal, Canada; ^^2^^SAIT, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2411–2415
</span></p></div>
<div class="cpabstractcardabstract"><p>Layer normalization is a recently introduced technique for normalizing the activities of neurons in deep neural networks to improve the training speed and stability. In this paper, we introduce a new layer normalization technique called Dynamic Layer Normalization (DLN) for adaptive neural acoustic modeling in speech recognition. By dynamically generating the scaling and shifting parameters in layer normalization, DLN adapts neural acoustic models to the acoustic variability arising from various factors such as speakers, channel noises, and environments. Unlike other adaptive acoustic models, our proposed approach does not require additional adaptation data or speaker information such as i-vectors. Moreover, the model size is fixed as it dynamically generates adaptation parameters. We apply our proposed DLN to deep bidirectional LSTM acoustic models and evaluate them on two benchmark datasets for large vocabulary ASR experiments: WSJ and TED-LIUM release 2. The experimental results show that our DLN improves neural acoustic models in terms of transcription accuracy by dynamically adapting to various speakers and environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[P. Gangamohan|AUTHOR P. Gangamohan]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2297–2300
</span></p></div>
<div class="cpabstractcardabstract"><p>During production of voiced speech, there exists impulse-like excitations due to abrupt closure of vocal folds. These impulse-like excitations are often referred as epochs or glottal closure instants (GCIs). The zero frequency filtering (ZFF) method exploits the properties of impulse-like excitation by passing a speech signal through the resonator whose pole pair is located at 0 Hz. As the resonator is unstable, the polynomial growth/decay is observed in the filtered signal, thus requiring a trend removal operation. It is observed that the length of the window for trend removal operation is critical in speech signals where there are more fluctuations in the fundamental frequency (F,,0,,). In this paper, a simple finite impulse response (FIR) implementation is proposed. The FIR filter is designed by placing large number of zeros at (f,,s)/(2)],, Hz (f,,s,, represents the sampling frequency), closer to the unit circle, in the z-plane. Experimental results show that the proposed method is robust and computationally less complex when compared to the ZFF method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kanru Hua|AUTHOR Kanru Hua]]
</p><p class="cpabstractcardaffiliationlist">University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2301–2305
</span></p></div>
<div class="cpabstractcardabstract"><p>We present improvements to the refinement stage of YANGsaf[1] (Yet ANother Glottal source analysis framework), a recently published F0 estimation algorithm by Kawahara et al., for noisy/breathy speech signals. The baseline system, based on time-warping and weighted average of multi-band instantaneous frequency estimates, is still sensitive to additive noise when none of the harmonic provide reliable frequency estimate at low SNR. We alleviate this problem by calibrating the weighted averaging process based on statistics gathered from a Monte-Carlo simulation, and applying Kalman filtering to refined F0 trajectory with time-varying measurement and process distributions. The improved algorithm, adYANGsaf (adaptive Yet ANother Glottal source analysis framework), achieves significantly higher accuracy and smoother F0 trajectory on noisy speech while retaining its accuracy on clean speech, with little computational overhead introduced.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jitendra Kumar Dhiman|AUTHOR Jitendra Kumar Dhiman]], [[Nagaraj Adiga|AUTHOR Nagaraj Adiga]], [[Chandra Sekhar Seelamantula|AUTHOR Chandra Sekhar Seelamantula]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2306–2310
</span></p></div>
<div class="cpabstractcardabstract"><p>We consider a two-dimensional demodulation framework for spectro-temporal analysis of the speech signal. We construct narrowband (NB) speech spectrograms, and demodulate them using the Riesz transform, which is a two-dimensional extension of the Hilbert transform. The demodulation results in time-frequency envelope (amplitude modulation or AM) and time-frequency carrier (frequency modulation or FM). The AM corresponds to the vocal tract and is referred to as the vocal tract spectrogram. The FM corresponds to the underlying excitation and is referred to as the carrier spectrogram. The carrier spectrogram exhibits a high degree of time-frequency consistency for voiced sounds. For unvoiced sounds, such a structure is lacking. In addition, the carrier spectrogram reflects the fundamental frequency (F0) variation of the speech signal. We develop a technique to determine the F0 from the carrier spectrogram. The time-frequency consistency is used to determine which time-frequency regions correspond to voiced segments. Comparisons with the state-of-the-art F0 estimation algorithms show that the proposed F0 estimator has high accuracy for telephone channel speech and is robust to noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kenichiro Miwa|AUTHOR Kenichiro Miwa]], [[Masashi Unoki|AUTHOR Masashi Unoki]]
</p><p class="cpabstractcardaffiliationlist">JAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2311–2315
</span></p></div>
<div class="cpabstractcardabstract"><p>Estimating the fundamental frequency (F,,0,,) of a target sound in noisy reverberant environments is a challenging issue in not only sound analysis/synthesis but also sound enhancement. This paper proposes a method for robustly and accurately estimating the F,,0,, of a time-variant complex tone on the basis of an amplitude modulation/demodulation technique. It is based on the mechanism of the pitch perception of amplitude modulated signal and the frame-work of power envelope restoration based on the concept of modulation transfer function. Computer simulations were carried out to discuss feasibility of the accuracy and robustness of the proposed method for estimating the F,,0,, in heavy noisy reverberant environments. The comparative results revealed that the percentage correct rates of the estimated F,,0,,s using five recent methods (TEMPO2, YIN, PHIA, CmpCep, and SWIPE’) were drastically reduced as the SNR decreased and the reverberation time increased. The results also demonstrated that the proposed method robustly and accurately estimated the F,,0,, in both heavy noisy and reverberant environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simon Graf|AUTHOR Simon Graf]]^^1^^, [[Tobias Herbig|AUTHOR Tobias Herbig]]^^1^^, [[Markus Buck|AUTHOR Markus Buck]]^^1^^, [[Gerhard Schmidt|AUTHOR Gerhard Schmidt]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nuance Communications, Germany; ^^2^^Christian-Albrechts-Universität zu Kiel, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2316–2320
</span></p></div>
<div class="cpabstractcardabstract"><p>Detection of voiced speech and estimation of the pitch frequency are important tasks for many speech processing algorithms. Pitch information can be used, e.g., to reconstruct voiced speech corrupted by noise.
In automotive environments, driving noise especially affects voiced speech portions in the lower frequencies. Pitch estimation is therefore important, e.g., for in-car-communication systems. Such systems amplify the driver’s voice and allow for convenient conversations with backseat passengers. Low latency is required for this application, which requires the use of short window lengths and short frame shifts between consecutive frames. Conventional pitch estimation techniques, however, rely on long windows that exceed the pitch period of human speech. In particular, male speakers’ low pitch frequencies are difficult to resolve.
In this publication, we introduce a technique that approaches pitch estimation from a different perspective. The pitch information is extracted based on phase differences between multiple low-resolution spectra instead of a single long window. The technique benefits from the high temporal resolution provided by the short frame shift and is capable to deal with the low spectral resolution caused by short window lengths. Using the new approach, even very low pitch frequencies can be estimated very efficiently.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masanori Morise|AUTHOR Masanori Morise]]
</p><p class="cpabstractcardaffiliationlist">University of Yamanashi, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2321–2325
</span></p></div>
<div class="cpabstractcardabstract"><p>A fundamental frequency (F0) estimator named Harvest is described. The unique points of Harvest are that it can obtain a reliable F0 contour and reduce the error that the voiced section is wrongly identified as the unvoiced section. It consists of two steps: estimation of F0 candidates and generation of a reliable F0 contour on the basis of these candidates. In the first step, the algorithm uses fundamental component extraction by many band-pass filters with different center frequencies and obtains the basic F0 candidates from filtered signals. After that, basic F0 candidates are refined and scored by using the instantaneous frequency, and then several F0 candidates in each frame are estimated. Since the frame-by-frame processing based on the fundamental component extraction is not robust against temporally local noise, a connection algorithm using neighboring F0s is used in the second step. The connection takes advantage of the fact that the F0 contour does not precipitously change in a short interval. We carried out an evaluation using two speech databases with electroglottograph (EGG) signals to compare Harvest with several state-of-the-art algorithms. Results showed that Harvest achieved the best performance of all algorithms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sabrina Stehwien|AUTHOR Sabrina Stehwien]], [[Ngoc Thang Vu|AUTHOR Ngoc Thang Vu]]
</p><p class="cpabstractcardaffiliationlist">Universität Stuttgart, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2326–2330
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper demonstrates the potential of convolutional neural networks (CNN) for detecting and classifying prosodic events on words, specifically pitch accents and phrase boundary tones, from frame-based acoustic features. Typical approaches use not only feature representations of the word in question but also its surrounding context. We show that adding position features indicating the current word benefits the CNN. In addition, this paper discusses the generalization from a speaker-dependent modelling approach to a speaker-independent setup. The proposed method is simple and efficient and yields strong results not only in speaker-dependent but also speaker-independent cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ramiro H. Gálvez|AUTHOR Ramiro H. Gálvez]]^^1^^, [[Štefan Beňuš|AUTHOR Štefan Beňuš]]^^2^^, [[Agustín Gravano|AUTHOR Agustín Gravano]]^^1^^, [[Marian Trnka|AUTHOR Marian Trnka]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Buenos Aires, Argentina; ^^2^^UKF, Slovak Republic; ^^3^^Slovak Academy of Sciences, Slovak Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2331–2335
</span></p></div>
<div class="cpabstractcardabstract"><p>Two primary sources of information are provided in human speech. On the one hand, the verbal channel encodes linguistic content, while on the other hand, the vocal channel transmits paralinguistic information, mainly through prosody. In line with several studies that induce a conflict between these two channels to better understand the role of prosody, we conducted an experiment in which subjects had to listen to a series of statements synthesized with varying prosody and indicate if they believed them to be true or false. We find evidence suggesting that acoustic/prosodic (a/p) features of the synthesized statements affect response times (a well-known proxy for cognitive load). Our results suggest that prosody in synthesized speech may play a role of either facilitation or interference when subjects judge the truthfulness of a statement. Furthermore, we find that this pattern is amplified when the a/p features of the synthesized statements are analyzed relative to the subjects’ own a/p features. This suggests that the entrainment of TTS voices has serious implications in the perceived trustworthiness of the system’s skills.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Margaret Zellers|AUTHOR Margaret Zellers]], [[Antje Schweitzer|AUTHOR Antje Schweitzer]]
</p><p class="cpabstractcardaffiliationlist">Universität Stuttgart, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2336–2340
</span></p></div>
<div class="cpabstractcardabstract"><p>Speakers in conversations may adapt their turn pitch relative to that of preceding turns to signal alignment with their interlocutor. However, the reference frame for pitch matching across turns is still unclear. Researchers studying pitch in the context of conversation have argued for an initializing approach, in which turn pitch must be judged relative to pitch in preceding turns. However, perceptual studies have indicated that listeners are able to reliably identify the location of pitch values within an individual speaker’s range; that is, even without conversational context, they are able to normalize to speakers. This would imply that speakers might match normalized pitch instead of absolute pitch. Using a combined quantitative-qualitative approach, we investigate the relationship between pitch in adjacent turns in spontaneous German conversation. We use two different methods of evaluating pitch in adjacent turns, reflecting normalizing and initializing approaches respectively. We find that the results are well correlated with conversational participants’ evaluation of the conversation. Furthermore, evaluating locations with matched or mismatched pitch can help distinguish between blind and face-to-face conversational situations, as well as identifying locations where specific discourse strategies (such as tag questions) have been deployed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sankar Mukherjee|AUTHOR Sankar Mukherjee]]^^1^^, [[Alessandro D’Ausilio|AUTHOR Alessandro D’Ausilio]]^^1^^, [[Noël Nguyen|AUTHOR Noël Nguyen]]^^2^^, [[Luciano Fadiga|AUTHOR Luciano Fadiga]]^^1^^, [[Leonardo Badino|AUTHOR Leonardo Badino]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Istituto Italiano di Tecnologia, Italy; ^^2^^LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2341–2345
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech accommodation happens when two people engage in verbal conversation. In this paper two types of accommodation are investigated — one dependent on cognitive, physiological, functional and social constraints (Convergence), the other dependent on linguistic and paralinguistic factors (Synchrony). Convergence refers to the situation when two speakers’ speech characteristics move towards a common point. Synchrony happens if speakers’ prosodic features become correlated over time. Here we analyze relations between the two phenomena at the single word level. Although calculation of Synchrony is fairly straightforward, measuring Convergence is even more problematic as proved by a long history of debates on how to define it. In this paper we consider Convergence as an emergent behavior and investigate it by developing a robust and automatic method based on Gaussian Mixture Model (GMM). Our results show that high Synchrony of F0 between two speakers leads to greater amount of Convergence. This provides robust support for the idea that Synchrony and Convergence are interrelated processes, particularly in female participants.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jordi Luque|AUTHOR Jordi Luque]], [[Carlos Segura|AUTHOR Carlos Segura]], [[Ariadna Sánchez|AUTHOR Ariadna Sánchez]], [[Martí Umbert|AUTHOR Martí Umbert]], [[Luis Angel Galindo|AUTHOR Luis Angel Galindo]]
</p><p class="cpabstractcardaffiliationlist">Telefónica I+D, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2346–2350
</span></p></div>
<div class="cpabstractcardabstract"><p>Call Centre data is typically collected by organizations and corporations in order to ensure the quality of service, supporting for example mining capabilities for monitoring customer satisfaction. In this work, we analyze the significance of various acoustic features extracted from customer-agents’ spoken interaction in predicting self-reported satisfaction by the customer. We also investigate whether speech prosodic features can deliver complementary information to speech transcriptions provided by an ASR. We explore the possibility of using a deep neural architecture to perform early feature fusion on both prosodic and linguistic information. Convolutional Neural Networks are trained on a combination of word embedding and acoustic features for the binary classification task of “low” and “high” satisfaction prediction. We conducted our experiments analysing real call-centre interactions of a large corporation in a Spanish spoken country. Our experiments show that linguistic features can predict self-reported satisfaction more accurately than those based on prosodic and conversational descriptors. We also find that dialog turn-level conversational features generally outperforms frame-level signal descriptors. Finally, the fusion of linguistic and prosodic features reports the best performance in our experiments, suggesting the complementarity of the information conveyed by each set of behavioral representation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pablo Brusco|AUTHOR Pablo Brusco]], [[Juan Manuel Pérez|AUTHOR Juan Manuel Pérez]], [[Agustín Gravano|AUTHOR Agustín Gravano]]
</p><p class="cpabstractcardaffiliationlist">Universidad de Buenos Aires, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2351–2355
</span></p></div>
<div class="cpabstractcardabstract"><p>We present the results of a series of machine learning experiments aimed at exploring the differences and similarities in the production of turn-taking cues in American English and Argentine Spanish. An analysis of prosodic features automatically extracted from 21 dyadic conversations (12 En, 9 Sp) revealed that, when signaling Holds, speakers of both languages tend to use roughly the same combination of cues, characterized by a sustained final intonation, a shorter duration of turn-final inter-pausal units, and a distinct voice quality. However, in speech preceding Smooth Switches or Backchannels, we observe the existence of the same set of prosodic turn-taking cues in both languages, although the ways in which these cues are combined together to form complex signals differ. Still, we find that these differences do not degrade below chance the performance of cross-linguistic systems for automatically detecting turn-taking signals. These results are relevant to the construction of multilingual spoken dialogue systems, which need to adapt not only their ASR modules but also the way prosodic turn-taking cues are synthesized and recognized.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Olga Egorow|AUTHOR Olga Egorow]], [[Andreas Wendemuth|AUTHOR Andreas Wendemuth]]
</p><p class="cpabstractcardaffiliationlist">Otto-von-Guericke-Universität Magdeburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2356–2360
</span></p></div>
<div class="cpabstractcardabstract"><p>One interesting phenomenon of natural conversation is overlapping speech. Besides causing difficulties in automatic speech processing, such overlaps carry information on the state of the overlapper: competitive overlaps (i.e. “interruptions”) can signal disagreement or the feeling of being overlooked, and cooperative overlaps (i.e. supportive interjections) can signal agreement and interest. These hints can be used to improve human-machine interaction. In this paper we present an approach for automatic classification of competitive and cooperative overlaps using the emotional content of the speakers’ utterances before and after the overlap. For these experiments, we use real-world data from human-human interactions in call centres. We also compare our approach to standard acoustic classification on the same data and come to the conclusion, that emotional features are clearly superior to acoustic features for this task, resulting in an unweighted average f-measure of 71.9%. But we also find that acoustic features should not be entirely neglected: using a late fusion procedure, we can further improve the unweighted average f-measure by 2.6%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chin-Po Chen|AUTHOR Chin-Po Chen]]^^1^^, [[Xian-Hong Tseng|AUTHOR Xian-Hong Tseng]]^^1^^, [[Susan Shur-Fen Gau|AUTHOR Susan Shur-Fen Gau]]^^2^^, [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Tsing Hua University, Taiwan; ^^2^^National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2361–2365
</span></p></div>
<div class="cpabstractcardabstract"><p>Autism spectrum disorder (ASD) is a highly-prevalent neural developmental disorder often characterized by social communicative deficits and restricted repetitive interest. The heterogeneous nature of ASD in its behavior manifestations encompasses broad syndromes such as, Classical Autism (AD), High-functioning Autism (HFA), and Asperger syndrome (AS). In this work, we compute a variety of multimodal behavior features, including body movements, acoustic characteristics, and turn-taking events dynamics, of the participant, the investigator and the interaction between the two directly from audio-video recordings by leveraging the Autism Diagnostic Observational Schedule (ADOS) as a clinically-valid behavior data elicitation technique. Several of these signal-derived behavioral measures show statistically significant differences among the three syndromes. Our analyses indicate that these features may be pointing to the underlying differences in the behavior characterizations of social functioning between AD, AS, and HFA — corroborating some of the previous literature. Further, our signal-derived behavior measures achieve competitive, sometimes exceeding, recognition accuracies in discriminating between the three syndromes of ASD when compared to investigator’s clinical-rating on participant’s social and communicative behaviors during ADOS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yun-Shao Lin|AUTHOR Yun-Shao Lin]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]
</p><p class="cpabstractcardaffiliationlist">National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2366–2370
</span></p></div>
<div class="cpabstractcardabstract"><p>The overall interaction atmosphere is often a result of complex interplay between individual interlocutor’s behavior expressions and joint manifestation of dyadic interaction dynamics. There is very limited work, if any, that has computationally analyzed a human interaction at the dyad-level. Hence, in this work, we propose to compute an extensive novel set of features representing multi-faceted aspects of a dyadic interaction. These features are grouped into two broad categories: expressive and structural behavior dynamics, where each captures information about within-speaker behavior manifestation, inter-speaker behavior dynamics, durational and transitional statistics providing holistic behavior quantifications at the dyad-level. We carry out an experiment of recognizing targeted affective atmosphere using the proposed expressive and structural behavior dynamics features derived from audio and video modalities. Our experiment shows that the inclusion of both expressive and structural behavior dynamics is essential in achieving promising recognition accuracies across six different classes (72.5%), where structural-based features improve the recognition rates on classes of sad and surprise. Further analyses reveal important aspects of multimodal behavior dynamics within dyadic interactions that are related to the affective atmospheric scene.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raymond Brueckner|AUTHOR Raymond Brueckner]]^^1^^, [[Maximilian Schmitt|AUTHOR Maximilian Schmitt]]^^2^^, [[Maja Pantic|AUTHOR Maja Pantic]]^^3^^, [[Björn Schuller|AUTHOR Björn Schuller]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität München, Germany; ^^2^^Universität Passau, Germany; ^^3^^Imperial College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2371–2375
</span></p></div>
<div class="cpabstractcardabstract"><p>The automatic detection and classification of social signals is an important task, given the fundamental role nonverbal behavioral cues play in human communication. We present the first cross-lingual study on the detection of laughter and fillers in conversational and spontaneous speech collected ‘in the wild’ over IP (internet protocol). Further, this is the first comparison of LSTM and GRU networks to shed light on their performance differences. We report frame-based results in terms of the unweighted-average area-under-the-curve (UAAUC) measure and will shortly discuss its suitability for this task. In the mono-lingual setup our best deep BLSTM system achieves 87.0% and 86.3% UAAUC for English and German, respectively. Interestingly, the cross-lingual results are only slightly lower, yielding 83.7% for a system trained on English, but tested on German, and 85.0% in the opposite case. We show that LSTM and GRU architectures are valid alternatives for e. g., on-line and compute-sensitive applications, since their application incurs a relative UAAUC decrease of only approximately 5% with respect to our best systems. Finally, we apply additional smoothing to correct for erroneous spikes and drops in the posterior trajectories to obtain an additional gain in all setups.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]
</p><p class="cpabstractcardaffiliationlist">MTA-SZTE RGAI, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2376–2380
</span></p></div>
<div class="cpabstractcardabstract"><p>Social signal detection, that is, the task of identifying vocalizations like laughter and filler events is a popular task within computational paralinguistics. Recent studies have shown that besides applying state-of-the-art machine learning methods, it is worth making use of the contextual information and adjusting the frame-level scores based on the local neighbourhood. In this study we apply a weighted average time series smoothing filter for laughter and filler event identification, and set the weights using a state-of-the-art optimization method, namely the Covariance Matrix Adaptation Evolution Strategy (CMA-ES). Our results indicate that this is a viable way of improving the Area Under the Curve (AUC) scores: our resulting scores are much better than the accuracy scores of the raw likelihoods produced by Deep Neural Networks trained on three different feature sets, and we also significantly outperform standard time series filters as well as DNNs used for smoothing. Our score achieved on the test set of a public English database containing spontaneous mobile phone conversations is the highest one published so far that was realized by feed-forward techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fasih Haider|AUTHOR Fasih Haider]]^^1^^, [[Fahim A. Salim|AUTHOR Fahim A. Salim]]^^1^^, [[Saturnino Luz|AUTHOR Saturnino Luz]]^^2^^, [[Carl Vogel|AUTHOR Carl Vogel]]^^1^^, [[Owen Conlan|AUTHOR Owen Conlan]]^^1^^, [[Nick Campbell|AUTHOR Nick Campbell]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Trinity College Dublin, Ireland; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2381–2385
</span></p></div>
<div class="cpabstractcardabstract"><p>There is an enormous amount of audio-visual content available on-line in the form of talks and presentations. The prospective users of the content face difficulties in finding the right content for them. However, automatic detection of interesting (engaging vs. non-engaging) content can help users to find the videos according to their preferences. It can also be helpful for a recommendation and personalised video segmentation system. This paper presents a study of engagement based on TED talks (1338 videos) which are rated by on-line viewers (users). It proposes novel models to predict the user’s (on-line viewers) engagement using high-level visual features (camera angles), the audience’s laughter and applause, and the presenter’s speech expressions. The results show that these features contribute towards the prediction of user engagement in these talks. However, finding the engaging speech expressions can also help a system in making summaries of TED Talks (video summarization) and creating feedback to presenters about their speech expressions during talks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]], [[Anne Kösem|AUTHOR Anne Kösem]]
</p><p class="cpabstractcardaffiliationlist">MPI for Psycholinguistics, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2416–2420
</span></p></div>
<div class="cpabstractcardabstract"><p>Brain oscillations have been shown to track the slow amplitude fluctuations in speech during comprehension. Moreover, there is evidence that these stimulus-induced cortical rhythms may persist even after the driving stimulus has ceased. However, how exactly this neural entrainment shapes speech perception remains debated. This behavioral study investigated whether and how the frequency and phase of an entrained rhythm would influence the temporal sampling of subsequent speech.
In two behavioral experiments, participants were presented with slow and fast isochronous tone sequences, followed by Dutch target words ambiguous between as /ʀiptas/ “ash” (with a short vowel) and aas /a:s/ “bait” (with a long vowel). Target words were presented at various phases of the entrained rhythm. Both experiments revealed effects of the frequency of the tone sequence on target word perception: fast sequences biased listeners to more long /a:s/ responses. However, no evidence for phase effects could be discerned.
These findings show that an entrained rhythm’s frequency, but not phase, influences the temporal sampling of subsequent speech. These outcomes are compatible with theories suggesting that sensory timing is evaluated relative to entrained frequency. Furthermore, they suggest that phase tracking of (syllabic) rhythms by theta oscillations plays a limited role in speech parsing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiao Wang|AUTHOR Xiao Wang]]^^1^^, [[Yanhui Zhang|AUTHOR Yanhui Zhang]]^^1^^, [[Gang Peng|AUTHOR Gang Peng]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chinese University of Hong Kong, China; ^^2^^Hong Kong Polytechnic University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2421–2425
</span></p></div>
<div class="cpabstractcardabstract"><p>It is still a question of debate whether the N1-P2 complex is an index of low-level auditory processes or whether it can capture higher-order information encoded in the immediate context. To address this issue, the current study examined the morphology of the N1-P2 complex as a function of context regularities instantiated at the sublexical level. We presented two types of speech targets in isolation and in contexts comprising sequences of Cantonese words sharing either the entire rime units or just the rime segments (thus lacking lexical tone consistency). Results revealed a pervasive yet unequal attenuation of the N1 and P2 components: The degree of N1 attenuation tended to decrease while that of P2 increased due to enhanced detectability of more regular speech patterns, as well as their enhanced predictability in the immediate context. The distinct behaviors of N1 and P2 event-related potentials could be explained by the influence of perceptual experience and the hierarchical encoding of context regularities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sakshi Verma|AUTHOR Sakshi Verma]]^^1^^, [[K.L. Prateek|AUTHOR K.L. Prateek]]^^1^^, [[Karthik Pandia|AUTHOR Karthik Pandia]]^^1^^, [[Nauman Dawalatabad|AUTHOR Nauman Dawalatabad]]^^1^^, [[Rogier Landman|AUTHOR Rogier Landman]]^^2^^, [[Jitendra Sharma|AUTHOR Jitendra Sharma]]^^2^^, [[Mriganka Sur|AUTHOR Mriganka Sur]]^^2^^, [[Hema A. Murthy|AUTHOR Hema A. Murthy]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Madras, India; ^^2^^MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2426–2430
</span></p></div>
<div class="cpabstractcardabstract"><p>Various studies suggest that marmosets ( Callithrix jacchus) show behavior similar to that of humans in many aspects. Analyzing their calls would not only enable us to better understand these species but would also give insights into the evolution of human languages and vocal tract. This paper describes a technique to discover the patterns in marmoset vocalization in an unsupervised fashion. The proposed unsupervised clustering approach operates in two stages. Initially, voice activity detection (VAD) is applied to remove silences and non-voiced regions from the audio. This is followed by a group-delay based segmentation on the voiced regions to obtain smaller segments. In the second stage, a two-tier clustering is performed on the segments obtained. Individual hidden Markov models (HMMs) are built for each of the segments using a multiple frame size and multiple frame rate. The HMMs are then clustered until each cluster is made up of a large number of segments. Once all the clusters get enough number of segments, one Gaussian mixture model (GMM) is built for each of the clusters. These clusters are then merged using Kullback-Leibler (KL) divergence. The algorithm converges to the total number of distinct sounds in the audio, as evidenced by listening tests.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hiroki Watanabe|AUTHOR Hiroki Watanabe]], [[Hiroki Tanaka|AUTHOR Hiroki Tanaka]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2431–2435
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent speech perception models propose that neural oscillations in theta band show phase locking to speech envelope to extract syllabic information and rapid temporal information is processed by the corresponding higher frequency band (e.g., low gamma). It is suggested that phase-locked responses to acoustic features show consistent patterns across subjects. Previous magnetoencephalographic (MEG) experiment showed that subject-dependent template matching classification by theta phase patterns could discriminate three English spoken sentences. In this paper, we adopt electroencephalography (EEG) to the spoken sentence discrimination on Japanese language, and we investigate the performances in various different settings by using: (1) template matching and support vector machine (SVM) classifiers; (2) subject dependent and independent models; (3) multiple frequency bands including theta, alpha, beta, low gamma, and the combination of all frequency bands. The performances in almost settings were higher than the chance level. While performances of SVM and template matching did not differ, the performance with combination of multiple frequency bands outperformed the one that trained only on single frequency bands. Best accuracies in subject dependent and independent models achieved 55.2% by SVM on the combination of all frequency bands and 44.0% by template matching on the combination of all frequency bands, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Noémie te Rietmolen|AUTHOR Noémie te Rietmolen]]^^1^^, [[Radouane El Yagoubi|AUTHOR Radouane El Yagoubi]]^^2^^, [[Alain Ghio|AUTHOR Alain Ghio]]^^3^^, [[Corine Astésano|AUTHOR Corine Astésano]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^URI OCTOGONE-LORDAT (EA 4156), France; ^^2^^CLLE (UMR 5263), France; ^^3^^LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2436–2440
</span></p></div>
<div class="cpabstractcardabstract"><p>French accentuation is held to belong to the level of the phrase. Consequently French is considered ‘a language without accent’ with speakers that are ‘deaf to stress’. Recent ERP-studies investigating the French initial accent (IA) however demonstrate listeners not only discriminate between different stress patterns, but also prefer words to be marked with IA early in the process of speech comprehension. Still, as words were presented in isolation, it remains unclear whether the preference applied to the lexical or to the phrasal level. In the current ERP-study, we address this ambiguity and manipulate IA on words embedded in a sentence. Furthermore, we orthogonally manipulate semantic congruity to investigate the interplay between accentuation and later speech processing stages. Preliminary results on 14 participants reveal a significant interaction effect: the centro-frontally located N400 was larger for words without IA, with a bigger effect for semantically incongruent sentences. This indicates that IA is encoded at a lexical level and facilitates semantic processing. Furthermore, as participants attended to the semantic content of the sentences, the finding underlines the automaticity of stress processing. In sum, we demonstrate accentuation plays an important role in French speech comprehension and call for the traditional view to be reconsidered.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bin Zhao|AUTHOR Bin Zhao]], [[Jianwu Dang|AUTHOR Jianwu Dang]], [[Gaoyan Zhang|AUTHOR Gaoyan Zhang]]
</p><p class="cpabstractcardaffiliationlist">Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2441–2445
</span></p></div>
<div class="cpabstractcardabstract"><p>The somatotopic activation in the sensorimotor cortex during speech comprehension has been redundantly documented and largely explained by the notion of embodied semantics, which suggests that processing auditory words referring to body movements recruits the same somatotopic regions for that action execution. For this issue, the motor theory of speech perception provided another explanation, suggesting that the perception of speech sounds produced by a specific articulator movement may recruit the motor representation of that articulator in the precentral gyrus. To examine the latter theory, we used a set of Chinese synonyms with different articulatory features, involving lip gestures (LipR) or not (LipN), and recorded the electroencephalographic (EEG) signals while subjects passively listened to them. It was found that at about 200 ms post-onset, the event-related potential of LipR and LipN showed a significant polarity reversal near the precentral lip motor areas. EEG source reconstruction results also showed more obvious somatotopic activation in the lip region for the LipR than the LipN. Our results provide a positive support for the effect of articulatory simulation on speech comprehension and basically agree with the motor theory of speech perception.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[G. Gelly|AUTHOR G. Gelly]], [[J.L. Gauvain|AUTHOR J.L. Gauvain]]
</p><p class="cpabstractcardaffiliationlist">LIMSI, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2566–2570
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the design of an acoustic language identification (LID) system based on LSTMs that directly maps a sequence of acoustic features to a vector in a vector space where the angular proximity corresponds to a measure of language/dialect similarity. A specific architecture for the LSTM-based language vector extractor is introduced along with the angular proximity loss function to train it. This new LSTM-based LID system is quicker to train than a standard RNN topology using stacked layers trained with the cross-entropy loss function and obtains significantly lower language error rates. Experiments compare this approach to our previous developments on the subject, as well as to two widely used LID techniques: a phonotactic system using DNN acoustic models and an i-vector system. Results are reported on two different data sets: the 14 languages of NIST LRE07 and the 20 closely related languages and dialects of NIST LRE15. In addition to reporting the NIST Cavg metric which served as the primary metric for the LRE07 and LRE15 evaluations, the average LER is provided.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ma Jin|AUTHOR Ma Jin]]^^1^^, [[Yan Song|AUTHOR Yan Song]]^^1^^, [[Ian McLoughlin|AUTHOR Ian McLoughlin]]^^2^^, [[Wu Guo|AUTHOR Wu Guo]]^^1^^, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^University of Kent, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2571–2575
</span></p></div>
<div class="cpabstractcardabstract"><p>A key problem in spoken language identification (LID) is how to design effective representations which are specific to language information. Recent advances in deep neural networks have led to significant improvements in results, with deep end-to-end methods proving effective. This paper proposes a novel network which aims to model an effective representation for high (first and second)-order statistics of LID-senones, defined as being LID analogues of senones in speech recognition. The high-order information extracted through bilinear pooling is robust to speakers, channels and background noise. Evaluation with NIST LRE 2009 shows improved performance compared to current state-of-the-art DBF/i-vector systems, achieving over 33% and 20% relative equal error rate (EER) improvement for 3s and 10s utterances and over 40% relative C,,avg,, improvement for all durations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qian Zhang|AUTHOR Qian Zhang]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2576–2580
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, bottleneck features (BNF) with an i-Vector strategy has been used for state-of-the-art language/dialect identification. However, traditional bottleneck extraction requires an additional transcribed corpus which is used for acoustic modeling. Alternatively, an unsupervised BNF extraction diagram is proposed in our study, which is derived from the traditional structure but trained with an estimated phonetic label. The proposed method is evaluated on a 4-way Chinese dialect dataset and a 5-way closely spaced Pan-Arabic corpus. Compared to a baseline i-Vector system based on acoustic features MFCCs, the proposed unsupervised BNF consistently achieves better performance across two corpora. Specifically, the EER and overall performance C,,avg * 100,, are improved by a relative +48% and +52%, respectively. Even under the condition with limited training data, the proposed feature still achieves up to 24% relative improvement compared to baseline, all without the need of a secondary transcribed corpus.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saad Irtza|AUTHOR Saad Irtza]]^^1^^, [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]]^^1^^, [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]^^1^^, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of New South Wales, Australia; ^^2^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2581–2585
</span></p></div>
<div class="cpabstractcardabstract"><p>State-of-the-art language identification (LID) systems are not easily scalable to accommodate new languages. Specifically, as the number of target languages grows the error rate of these LID systems increases rapidly. This paper addresses such a challenge by adopting a hierarchical language identification (HLID) framework. We demonstrate the superior scalability of the HLID framework. In particular, HLID only requires the training of relevant nodes in a hierarchical structure instead of re-training the entire tree. Experiments conducted on a dataset that combined languages from the NIST LRE 2007, 2009, 2011 and 2015 databases show that as the number of target languages grows from 28 to 42, the performance of a single level (non-hierarchical) system deteriorates by around 11% while that of the hierarchical system only deteriorates by about 3.4% in terms of C,,avg,,. Finally, experiments also suggest that SVM based systems are more scalable than GPLDA based systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yao Qian|AUTHOR Yao Qian]]^^1^^, [[Keelan Evanini|AUTHOR Keelan Evanini]]^^1^^, [[Xinhao Wang|AUTHOR Xinhao Wang]]^^1^^, [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]^^1^^, [[Robert A. Pugh|AUTHOR Robert A. Pugh]]^^1^^, [[Patrick L. Lange|AUTHOR Patrick L. Lange]]^^1^^, [[Hillary R. Molloy|AUTHOR Hillary R. Molloy]]^^1^^, [[Frank K. Soong|AUTHOR Frank K. Soong]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Educational Testing Service, USA; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2586–2590
</span></p></div>
<div class="cpabstractcardabstract"><p>Identifying a speaker’s native language with his speech in a second language is useful for many human-machine voice interface applications. In this paper, we use a sub-phone-based i-vector approach to identify non-native English speakers’ native languages by their English speech input. Time delay deep neural networks (TDNN) are trained on LVCSR corpora for improving the alignment of speech utterances with their corresponding sub-phonemic “senone” sequences. The phonetic variability caused by a speaker’s native language can be better modeled with the sub-phone models than the conventional phone model based approach. Experimental results on the database released for the 2016 Interspeech ComParE Native Language challenge with 11 different L1s show that our system outperforms the best system by a large margin (87.2% UAR compared to 81.3% UAR for the best system from the 2016 ComParE challenge).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sameer Khurana|AUTHOR Sameer Khurana]]^^1^^, [[Maryam Najafian|AUTHOR Maryam Najafian]]^^2^^, [[Ahmed Ali|AUTHOR Ahmed Ali]]^^1^^, [[Tuka Al Hanai|AUTHOR Tuka Al Hanai]]^^2^^, [[Yonatan Belinkov|AUTHOR Yonatan Belinkov]]^^2^^, [[James Glass|AUTHOR James Glass]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HBKU, Qatar; ^^2^^MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2591–2595
</span></p></div>
<div class="cpabstractcardabstract"><p>As a continuation of our efforts towards tackling the problem of spoken Dialect Identification (DID) for Arabic languages, we present the QCRI-MIT Advanced Dialect Identification System (QMDIS). QMDIS is an automatic spoken DID system for Dialectal Arabic (DA). In this paper, we report a comprehensive study of the three main components used in the spoken DID task: phonotactic, lexical and acoustic. We use Support Vector Machines (SVMs), Logistic Regression (LR) and Convolutional Neural Networks (CNNs) as backend classifiers throughout the study. We perform all our experiments on a publicly available dataset and present new state-of-the-art results. QMDIS discriminates between the five most widely used dialects of Arabic: namely Egyptian, Gulf, Levantine, North African, and Modern Standard Arabic (MSA).We report ~73% accuracy for system combination. All the data and the code used in our experiments are publicly available for research.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Purvi Agrawal|AUTHOR Purvi Agrawal]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2446–2450
</span></p></div>
<div class="cpabstractcardabstract"><p>The performance of an automatic speech recognition (ASR) system degrades severely in noisy and reverberant environments in part due to the lack of robustness in the underlying representations used in the ASR system. On the other hand, the auditory processing studies have shown the importance of modulation filtered spectrogram representations in robust human speech recognition. Inspired by these evidences, we propose a speech representation learning paradigm using data-driven 2-D spectro-temporal modulation filter learning. In particular, multiple representations are derived using the convolutional restricted Boltzmann machine (CRBM) model in an unsupervised manner from the input speech spectrogram. A filter selection criteria based on average number of active hidden units is also employed to select the representations for ASR. The experiments are performed on Wall Street Journal (WSJ) Aurora-4 database with clean and multi condition training setup. In these experiments, the ASR results obtained from the proposed modulation filtering approach shows significant robustness to noise and channel distortions compared to other feature extraction methods (average relative improvements of 19% over baseline features in clean training). Furthermore, the ASR experiments performed on reverberant speech data from the REVERB challenge corpus highlight the benefits of the proposed representation learning scheme for far field speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masato Mimura|AUTHOR Masato Mimura]], [[Yoshiaki Bando|AUTHOR Yoshiaki Bando]], [[Kazuki Shimada|AUTHOR Kazuki Shimada]], [[Shinsuke Sakai|AUTHOR Shinsuke Sakai]], [[Kazuyoshi Yoshii|AUTHOR Kazuyoshi Yoshii]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]
</p><p class="cpabstractcardaffiliationlist">Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2451–2455
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a novel acoustic beamforming method using blind source separation (BSS) techniques based on non-negative matrix factorization (NMF). In conventional mask-based approaches, hard or soft masks are estimated and beamforming is performed using speech and noise spatial covariance matrices calculated from masked noisy observations, but the phase information of the target speech is not adequately preserved. In the proposed method, we perform complex-domain source separation based on multi-channel NMF with rank-1 spatial model (rank-1 MNMF) to obtain a speech spatial covariance matrix for estimating a steering vector for the target speech utilizing the separated speech observation in each time-frequency bin. This accurate steering vector estimation is effectively combined with our novel noise mask prediction method using multi-channel robust NMF (MRNMF) to construct a Maximum Likelihood (ML) beamformer that achieved a better speech recognition performance than a state-of-the-art DNN-based beamformer with no environment-specific training. Superiority of the phase preserving source separation to real-valued masks in beamforming is also confirmed through ASR experiments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dong Yu|AUTHOR Dong Yu]]^^1^^, [[Xuankai Chang|AUTHOR Xuankai Chang]]^^2^^, [[Yanmin Qian|AUTHOR Yanmin Qian]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent AI Lab, USA; ^^2^^Shanghai Jiao Tong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2456–2460
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel technique for direct recognition of multiple speech streams given the single channel of mixed speech, without first separating them. Our technique is based on permutation invariant training (PIT) for automatic speech recognition (ASR). In PIT-ASR, we compute the average cross entropy (CE) over all frames in the whole utterance for each possible output-target assignment, pick the one with the minimum CE, and optimize for that assignment. PIT-ASR forces all the frames of the same speaker to be aligned with the same output layer. This strategy elegantly solves the label permutation problem and speaker tracing problem in one shot. Our experiments on artificially mixed AMI data showed that the proposed approach is very promising.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuuki Tachioka|AUTHOR Yuuki Tachioka]]^^1^^, [[Tomohiro Narita|AUTHOR Tomohiro Narita]]^^1^^, [[Iori Miura|AUTHOR Iori Miura]]^^2^^, [[Takanobu Uramoto|AUTHOR Takanobu Uramoto]]^^2^^, [[Natsuki Monta|AUTHOR Natsuki Monta]]^^2^^, [[Shingo Uenohara|AUTHOR Shingo Uenohara]]^^2^^, [[Ken’ichi Furuya|AUTHOR Ken’ichi Furuya]]^^2^^, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^3^^, [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Mitsubishi Electric, Japan; ^^2^^Oita University, Japan; ^^3^^MERL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2461–2465
</span></p></div>
<div class="cpabstractcardabstract"><p>Multi-channel non-negative matrix factorization (MNMF) is a multi-channel extension of NMF and often outperforms NMF because it can deal with spatial and spectral information simultaneously. On the other hand, MNMF has a larger number of parameters and its performance heavily depends on the initial values. MNMF factorizes an observation matrix into four matrices: spatial correlation, basis, cluster-indicator latent variables, and activation matrices. This paper proposes effective initialization methods for these matrices. First, the spatial correlation matrix, which shows the largest initial value dependencies, is initialized using the cross-spectrum method from enhanced speech by binary masking. Second, when the target is speech, constructing bases from phonemes existing in an utterance can improve the performance: this paper proposes a speech bases selection by using automatic speech recognition (ASR). Third, we also propose an initialization method for the cluster-indicator latent variables that couple the spatial and spectral information, which can achieve the simultaneous optimization of above two matrices. Experiments on a noisy ASR task show that the proposed initialization significantly improves the performance of MNMF by reducing the initial value dependencies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Jon Barker|AUTHOR Jon Barker]], [[Thomas Hain|AUTHOR Thomas Hain]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2466–2470
</span></p></div>
<div class="cpabstractcardabstract"><p>Vector Taylor Series (VTS) is a powerful technique for robust ASR but, in its standard form, it can only be applied to log-filter bank and MFCC features. In earlier work, we presented a generalised VTS (gVTS) that extends the applicability of VTS to front-ends which employ a power transformation non-linearity. gVTS was shown to provide performance improvements in both clean and additive noise conditions. This paper makes two novel contributions. Firstly, while the previous gVTS formulation assumed that noise was purely additive, we now derive gVTS formulae for the case of speech in the presence of both additive noise and channel distortion. Second, we propose a novel iterative method for estimating the channel distortion which utilises gVTS itself and converges after a few iterations. Since the new gVTS blindly assumes the existence of both additive noise and channel effects, it is important not to introduce extra distortion when either are absent. Experimental results conducted on LVCSR Aurora-4 database show that the new formulation passes this test. In the presence of channel noise only, it provides relative WER reductions of up to 30% and 26%, compared with previous gVTS and multi-style training with cepstral mean normalisation, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Brian King|AUTHOR Brian King]]^^1^^, [[I-Fan Chen|AUTHOR I-Fan Chen]]^^1^^, [[Yonatan Vaizman|AUTHOR Yonatan Vaizman]]^^2^^, [[Yuzong Liu|AUTHOR Yuzong Liu]]^^1^^, [[Roland Maas|AUTHOR Roland Maas]]^^1^^, [[Sree Hari Krishnan Parthasarathi|AUTHOR Sree Hari Krishnan Parthasarathi]]^^1^^, [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon.com, USA; ^^2^^University of California at San Diego, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2471–2475
</span></p></div>
<div class="cpabstractcardabstract"><p>A challenge for speech recognition for voice-controlled household devices, like the Amazon Echo or Google Home, is robustness against interfering background speech. Formulated as a far-field speech recognition problem, another person or media device in proximity can produce background speech that can interfere with the device-directed speech. We expand on our previous work on device-directed speech detection in the far-field speech setting and introduce two approaches for robust acoustic modeling. Both methods are based on the idea of using an anchor word taken from the device directed speech. Our first method employs a simple yet effective normalization of the acoustic features by subtracting the mean derived over the anchor word. The second method utilizes an encoder network projecting the anchor word onto a fixed-size embedding, which serves as an additional input to the acoustic model. The encoder network and acoustic model are jointly trained. Results on an in-house dataset reveal that, in the presence of background speech, the proposed approaches can achieve up to 35% relative word error rate reduction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ankur Bapna|AUTHOR Ankur Bapna]], [[Gokhan Tür|AUTHOR Gokhan Tür]], [[Dilek Hakkani-Tür|AUTHOR Dilek Hakkani-Tür]], [[Larry Heck|AUTHOR Larry Heck]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2476–2480
</span></p></div>
<div class="cpabstractcardabstract"><p>State-of-the-art slot filling models for goal-oriented human/ machine conversational language understanding systems rely on deep learning methods. While multi-task training of such models alleviates the need for large in-domain annotated datasets, bootstrapping a semantic parsing model for a new domain using only the semantic frame, such as the back-end API or knowledge graph schema, is still one of the holy grail tasks of language understanding for dialogue systems. This paper proposes a deep learning based approach that can utilize only the slot description in context without the need for any labeled or unlabeled in-domain examples, to quickly bootstrap a new domain. The main idea of this paper is to leverage the encoding of the slot names and descriptions within a multi-task deep learned slot filling model, to implicitly align slots across domains. The proposed approach is promising for solving the domain scaling problem and eliminating the need for any manually annotated data or explicit schema alignment. Furthermore, our experiments on multiple domains show that this approach results in significantly better slot-filling performance when compared to using only in-domain data, especially in the low data regime.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Despoina Georgiadou|AUTHOR Despoina Georgiadou]], [[Vassilios Diakoloukas|AUTHOR Vassilios Diakoloukas]], [[Vassilios Tsiaras|AUTHOR Vassilios Tsiaras]], [[Vassilios Digalakis|AUTHOR Vassilios Digalakis]]
</p><p class="cpabstractcardaffiliationlist">Technical University of Crete, Greece</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2481–2485
</span></p></div>
<div class="cpabstractcardabstract"><p>A prevalent and challenging task in spoken language understanding is slot filling. Currently, the best approaches in this domain are based on recurrent neural networks (RNNs). However, in their simplest form, RNNs cannot learn long-term dependencies in the data. In this paper, we propose the use of ClockWork recurrent neural network (CW-RNN) architectures in the slot-filling domain. CW-RNN is a multi-timescale implementation of the simple RNN architecture, which has proven to be powerful since it maintains relatively small model complexity. In addition, CW-RNN exhibits a great ability to model long-term memory inherently. In our experiments on the ATIS benchmark data set, we also evaluate several novel variants of CW-RNN and we find that they significantly outperform simple RNNs and they achieve results among the state-of-the-art, while retaining smaller complexity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohamed Ameur Ben Jannet|AUTHOR Mohamed Ameur Ben Jannet]]^^1^^, [[Olivier Galibert|AUTHOR Olivier Galibert]]^^2^^, [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]^^3^^, [[Sophie Rosset|AUTHOR Sophie Rosset]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIMSI, France; ^^2^^LNE, France; ^^3^^LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2486–2490
</span></p></div>
<div class="cpabstractcardabstract"><p>Information retrieval from speech is a key technology for many applications, as it allows access to large amounts of audio data. This technology requires two major components: an automatic speech recognizer (ASR) and a text-based information retrieval module such as a key word extractor or a named entity recognizer (NER). When combining the two components, the resulting final application needs to be globally optimized. However, ASR and information retrieval are usually developed and optimized separately. The ASR tends to be optimized to reduce the word error rate (WER), a metric which does not take into account the contextual and syntactic roles of the words, which are valuable information for information retrieval systems. In this paper we investigate different ways to tune the ASR for a speech-based NER system. In an end-to-end configuration we also tested several ASR metrics, including WER, NE-WER and ATENE, as well as the use of an oracle during the development step. Our results show that using a NER oracle to tune the system reduces the named entity recognition error rate by more than 1% absolute, and using the ATENE metric allows us to reduce it by more than 0.75%. We also show that these optimization approaches favor a higher ASR language model weight which entails an overall gain in NER performance, despite a local increase of the WER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marco Dinarelli|AUTHOR Marco Dinarelli]]^^1^^, [[Vedran Vukotic|AUTHOR Vedran Vukotic]]^^2^^, [[Christian Raymond|AUTHOR Christian Raymond]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Lattice (UMR 8094), France; ^^2^^INSA, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2491–2495
</span></p></div>
<div class="cpabstractcardabstract"><p>Modeling target label dependencies is important for sequence labeling tasks. This may become crucial in the case of Spoken Language Understanding (SLU) applications, especially for the slot-filling task where models have to deal often with a high number of target labels. Conditional Random Fields (CRF) were previously considered as the most efficient algorithm in these conditions. More recently, different architectures of Recurrent Neural Networks (RNNs) have been proposed for the SLU slot-filling task. Most of them, however, have been successfully evaluated on the simple ATIS database, on which it is difficult to draw significant conclusions. In this paper we propose new variants of RNNs able to learn efficiently and effectively label dependencies by integrating label embeddings. We show first that modeling label dependencies is useless on the (simple) ATIS database and unstructured models can produce state-of-the-art results on this benchmark. On ATIS our new variants achieve the same results as state-of-the-art models, while being much simpler. On the other hand, on the MEDIA benchmark, we show that the modification introduced in the proposed RNN outperforms traditional RNNs and CRF models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhong Meng|AUTHOR Zhong Meng]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]
</p><p class="cpabstractcardaffiliationlist">Georgia Institute of Technology, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2496–2500
</span></p></div>
<div class="cpabstractcardabstract"><p>The topic spotting performance on spontaneous conversational speech can be significantly improved by operating a support vector machine with a latent semantic rational kernel (LSRK) on the decoded word lattices (i.e., weighted finite-state transducers) of the speech [1]. In this work, we propose the minimum semantic error cost (MSEC) training of a deep bidirectional long short-term memory (BLSTM)-hidden Markov model acoustic model for generating lattices that are semantically accurate and are better suited for topic spotting with LSRK. With the MSEC training, the expected semantic error cost of all possible word sequences on the lattices is minimized given the reference. The word-word semantic error cost is first computed from either the latent semantic analysis or distributed vector-space word representations learned from the recurrent neural networks and is then accumulated to form the expected semantic error cost of the hypothesized word sequences. The proposed method achieves 3.5%–4.5% absolute topic classification accuracy improvement over the baseline BLSTM trained with cross-entropy on Switchboard-1 Release 2 dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunxi Liu|AUTHOR Chunxi Liu]], [[Jan Trmal|AUTHOR Jan Trmal]], [[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Craig Harman|AUTHOR Craig Harman]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2501–2505
</span></p></div>
<div class="cpabstractcardabstract"><p>Modern topic identification (topic ID) systems for speech use automatic speech recognition (ASR) to produce speech transcripts, and perform supervised classification on such ASR outputs. However, under resource-limited conditions, the manually transcribed speech required to develop standard ASR systems can be severely limited or unavailable. In this paper, we investigate alternative unsupervised solutions to obtaining tokenizations of speech in terms of a vocabulary of automatically discovered word-like or phoneme-like units, without depending on the supervised training of ASR systems. Moreover, using automatic phoneme-like tokenizations, we demonstrate that a convolutional neural network based framework for learning spoken document representations provides competitive performance compared to a standard bag-of-words representation, as evidenced by comprehensive topic ID evaluations on both single-label and multi-label classification tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bing Liu|AUTHOR Bing Liu]], [[Ian Lane|AUTHOR Ian Lane]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2506–2510
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a novel end-to-end trainable neural network model for task-oriented dialog systems. The model is able to track dialog state, issue API calls to knowledge base (KB), and incorporate structured KB query results into system responses to successfully complete task-oriented dialogs. The proposed model produces well-structured system responses by jointly learning belief tracking and KB result processing conditioning on the dialog history. We evaluate the model in a restaurant search domain using a dataset that is converted from the second Dialog State Tracking Challenge (DSTC2) corpus. Experiment results show that the proposed model can robustly track dialog state given the dialog history. Moreover, our model demonstrates promising results in producing appropriate system responses, outperforming prior end-to-end trainable neural network models using per-response accuracy evaluation metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heriberto Cuayáhuitl|AUTHOR Heriberto Cuayáhuitl]]^^1^^, [[Seunghak Yu|AUTHOR Seunghak Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Lincoln, UK; ^^2^^Samsung Electronics, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2511–2515
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep reinforcement learning dialogue systems are attractive because they can jointly learn their feature representations and policies without manual feature engineering. But its application is challenging due to slow learning. We propose a two-stage method for accelerating the induction of single or multi-domain dialogue policies. While the first stage reduces the amount of weight updates over time, the second stage uses very limited minibatches (of as much as two learning experiences) sampled from experience replay memories. The former frequently updates the weights of the neural nets at early stages of training, and decreases the amount of updates as training progresses by performing updates during exploration and by skipping updates during exploitation. The learning process is thus accelerated through less weight updates in both stages. An empirical evaluation in three domains (restaurants, hotels and tv guide) confirms that the proposed method trains policies 5 times faster than a baseline without the proposed method. Our findings are useful for training larger-scale neural-based spoken dialogue systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ali Orkan Bayer|AUTHOR Ali Orkan Bayer]], [[Evgeny A. Stepanov|AUTHOR Evgeny A. Stepanov]], [[Giuseppe Riccardi|AUTHOR Giuseppe Riccardi]]
</p><p class="cpabstractcardaffiliationlist">Università di Trento, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2516–2520
</span></p></div>
<div class="cpabstractcardabstract"><p>Training task-oriented dialogue systems requires significant amount of manual effort and integration of many independently built components; moreover, the pipeline is prone to error-propagation. End-to-end training has been proposed to overcome these problems by training the whole system over the utterances of both dialogue parties. In this paper we present an end-to-end spoken dialogue system architecture that is based on turn embeddings. Turn embeddings encode a robust representation of user turns with a local dialogue history and they are trained using sequence-to-sequence models. Turn embeddings are trained by generating the previous and the next turns of the dialogue and additionally perform spoken language understanding. The end-to-end spoken dialogue system is trained using the pre-trained turn embeddings in a stateful architecture that considers the whole dialogue history. We observe that the proposed spoken dialogue system architecture outperforms the models based on local-only dialogue history and it is robust to automatic speech recognition errors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oleg Akhtiamov|AUTHOR Oleg Akhtiamov]]^^1^^, [[Maxim Sidorov|AUTHOR Maxim Sidorov]]^^1^^, [[Alexey A. Karpov|AUTHOR Alexey A. Karpov]]^^2^^, [[Wolfgang Minker|AUTHOR Wolfgang Minker]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Ulm, Germany; ^^2^^ITMO University, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2521–2525
</span></p></div>
<div class="cpabstractcardabstract"><p>The necessity of addressee detection arises in multiparty spoken dialogue systems which deal with human-human-computer interaction. In order to cope with this kind of interaction, such a system is supposed to determine whether the user is addressing the system or another human. The present study is focused on multimodal addressee detection and describes three levels of speech and text analysis: acoustical, syntactical, and lexical. We define the connection between different levels of analysis and the classification performance for different categories of speech and determine the dependence of addressee detection performance on speech recognition accuracy. We also compare the obtained results with the results of the original research performed by the authors of the Smart Video Corpus which we use in our computations. Our most effective meta-classifier working with acoustical, syntactical, and lexical features reaches an unweighted average recall equal to 0.917 showing almost a nine percent advantage over the best baseline model, though this baseline classifier additionally uses head orientation data. We also propose a universal meta-model based on acoustical and syntactical analysis, which may theoretically be applied in different domains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Chee Wee Leong|AUTHOR Chee Wee Leong]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2526–2530
</span></p></div>
<div class="cpabstractcardabstract"><p>We analyze the efficacy of a small crowd of naïve human raters in rating engagement during human–machine dialog interactions. Each rater viewed multiple 10 second, thin-slice videos of non-native English speakers interacting with a computer-assisted language learning (CALL) system and rated how engaged and disengaged those callers were while interacting with the automated agent. We observe how the crowd’s ratings compared to callers’ self ratings of engagement, and further study how the distribution of these rating assignments vary as a function of whether the automated system or the caller was speaking. Finally, we discuss the potential applications and pitfalls of such a crowdsourced paradigm in designing, developing and analyzing engagement-aware dialog systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ivan Kraljevski|AUTHOR Ivan Kraljevski]], [[Diane Hirschfeld|AUTHOR Diane Hirschfeld]]
</p><p class="cpabstractcardaffiliationlist">voice INTER connect, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2531–2535
</span></p></div>
<div class="cpabstractcardabstract"><p>This present paper aims at answering the question whether there are distinctive cross-linguistic differences associated with hyperarticulated speech in correction dialogue acts. The objective is to assess the effort for adaptation of a multilingual dialogue system in 9 different languages, regarding the recovery strategies, particularly corrections. If the presence of hyperarticulation significantly differs across languages, it will have a significant impact on the dialogue design and recovery strategies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin Milde|AUTHOR Benjamin Milde]], [[Christoph Schmidt|AUTHOR Christoph Schmidt]], [[Joachim Köhler|AUTHOR Joachim Köhler]]
</p><p class="cpabstractcardaffiliationlist">Fraunhofer IAIS, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2536–2540
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, neural sequence-to-sequence (Seq2Seq) models have been applied to the problem of grapheme-to-phoneme (G2P) conversion. These models offer a straightforward way of modeling the conversion by jointly learning the alignment and translation of input to output tokens in an end-to-end fashion. However, until now this approach did not show improved error rates on its own compared to traditional joint-sequence based n-gram models for G2P. In this paper, we investigate how multitask learning can improve the performance of Seq2Seq G2P models. A single Seq2Seq model is trained on multiple phoneme lexicon datasets containing multiple languages and phonetic alphabets. Although multi-language learning does not show improved error rates, combining standard datasets and crawled data with different phonetic alphabets of the same language shows promising error reductions on English and German Seq2Seq G2P conversion. Finally, combining Seq2seq G2P models with standard n-grams based models yields significant improvements over using either model alone.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Vimal Manohar|AUTHOR Vimal Manohar]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2541–2545
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech recognition systems for irregularly-spelled languages like English normally require hand-written pronunciations. In this paper, we describe a system for automatically obtaining pronunciations of words for which pronunciations are not available, but for which transcribed data exists. Our method integrates information from the letter sequence and from the acoustic evidence. The novel aspect of the problem that we address is the problem of how to prune entries from such a lexicon (since, empirically, lexicons with too many entries do not tend to be good for ASR performance). Experiments on various ASR tasks show that, with the proposed framework, starting with an initial lexicon of several thousand words, we are able to learn a lexicon which performs close to a full expert lexicon in terms of WER performance on test data, and is better than lexicons built using G2P alone or with a pruning criterion based on pronunciation probability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takahiro Shinozaki|AUTHOR Takahiro Shinozaki]]^^1^^, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^, [[Daichi Mochihashi|AUTHOR Daichi Mochihashi]]^^3^^, [[Graham Neubig|AUTHOR Graham Neubig]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tokyo Institute of Technology, Japan; ^^2^^MERL, USA; ^^3^^ISM, Japan; ^^4^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2546–2550
</span></p></div>
<div class="cpabstractcardabstract"><p>While the performance of automatic speech recognition systems has recently approached human levels in some tasks, the application is still limited to specific domains. This is because system development relies on extensive supervised training and expert tuning in the target domain. To solve this problem, systems must become more self-sufficient, having the ability to learn directly from speech and adapt to new tasks. One open question in this area is how to learn a pronunciation dictionary containing the appropriate vocabulary. Humans can recognize words, even ones they have never heard before, by reading text and understanding the context in which a word is used. However, this ability is missing in current speech recognition systems. In this work, we propose a new framework that automatically expands an initial pronunciation dictionary using independently sampled acoustic and textual data. While the task is very challenging and in its initial stage, we demonstrate that a model based on Bayesian learning of Dirichlet processes can acquire word pronunciations from phone transcripts and text of the WSJ data set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peter Smit|AUTHOR Peter Smit]], [[Sami Virpioja|AUTHOR Sami Virpioja]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2551–2555
</span></p></div>
<div class="cpabstractcardabstract"><p>Because in agglutinative languages the number of observed word forms is very high, subword units are often utilized in speech recognition. However, the proper use of subword units requires careful consideration of details such as silence modeling, position-dependent phones, and combination of the units. In this paper, we implement subword modeling in the Kaldi toolkit by creating modified lexicon by finite-state transducers to represent the subword units correctly. We experiment with multiple types of word boundary markers and achieve the best results by adding a marker to the left or right side of a subword unit whenever it is not preceded or followed by a word boundary, respectively. We also compare three different toolkits that provide data-driven subword segmentations. In our experiments on a variety of Finnish and Estonian datasets, the best subword models do outperform word-based models and naive subword implementations. The largest relative reduction in WER is a 23% over word-based models for a Finnish read speech dataset. The results are also better than any previously published ones for the same datasets, and the improvement on all datasets is more than 5%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Antoine Bruguier|AUTHOR Antoine Bruguier]], [[Danushen Gnanapragasam|AUTHOR Danushen Gnanapragasam]], [[Leif Johnson|AUTHOR Leif Johnson]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2556–2560
</span></p></div>
<div class="cpabstractcardabstract"><p>Most speech recognition systems rely on pronunciation dictionaries to provide accurate transcriptions. Typically, some pronunciations are carved manually, but many are produced using pronunciation learning algorithms. Successful algorithms must have the ability to generate rich pronunciation variants, e.g. to accommodate words of foreign origin, while being robust to artifacts of the training data, e.g. noise in the acoustic segments from which the pronunciations are learned if the method uses acoustic signals. We propose a general finite-state transducer (FST) framework to describe such algorithms. This representation is flexible enough to accommodate a wide variety of pronunciation learning algorithms, including approaches that rely on the availability of acoustic data, and methods that only rely on the spelling of the target words. In particular, we show that the pronunciation FST can be built from a recurrent neural network (RNN) and tuned to provide rich yet constrained pronunciations. This new approach reduces the number of incorrect pronunciations learned from Google Voice traffic by up to 25% relative.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Einat Naaman|AUTHOR Einat Naaman]], [[Yossi Adi|AUTHOR Yossi Adi]], [[Joseph Keshet|AUTHOR Joseph Keshet]]
</p><p class="cpabstractcardaffiliationlist">Bar-Ilan University, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2561–2565
</span></p></div>
<div class="cpabstractcardabstract"><p>A significant source of errors in Automatic Speech Recognition (ASR) systems is due to pronunciation variations which occur in spontaneous and conversational speech. Usually ASR systems use a finite lexicon that provides one or more pronunciations for each word. In this paper, we focus on learning a similarity function between two pronunciations. The pronunciations can be the canonical and the surface pronunciations of the same word or they can be two surface pronunciations of different words. This task generalizes problems such as lexical access (the problem of learning the mapping between words and their possible pronunciations), and defining word neighborhoods. It can also be used to dynamically increase the size of the pronunciation lexicon, or in predicting ASR errors. We propose two methods, which are based on recurrent neural networks, to learn the similarity function. The first is based on binary classification, and the second is based on learning the ranking of the pronunciations. We demonstrate the efficiency of our approach on the task of lexical access using a subset of the Switchboard conversational speech corpus. Results suggest that on this task our methods are superior to previous methods which are based on graphical Bayesian methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[K.N.R.K. Raju Alluri|AUTHOR K.N.R.K. Raju Alluri]], [[Sivanand Achanta|AUTHOR Sivanand Achanta]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[Anil Kumar Vuppala|AUTHOR Anil Kumar Vuppala]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2596–2600
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speaker verification systems are vulnerable to spoofing attacks. Recently, various countermeasures have been developed for detecting high technology attacks such as speech synthesis and voice conversion. However, there is a wide gap in dealing with replay attacks. In this paper, we propose a new feature for replay attack detection based on single frequency filtering (SFF), which provides high temporal and spectral resolution at each instant. Single frequency filtering cepstral coefficients (SFFCC) with Gaussian mixture model classifier are used for the experimentation on the standard BTAS-2016 corpus. The previously reported best result, which is based on constant Q cepstral coefficients (CQCC) achieved a half total error rate of 0.67% on this data-set. Our proposed method outperforms the state of the art (CQCC) with a half total error rate of 0.0002%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hardik B. Sailor|AUTHOR Hardik B. Sailor]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]
</p><p class="cpabstractcardaffiliationlist">DA-IICT, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2601–2605
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech Synthesis (SS) and Voice Conversion (VC) presents a genuine risk of attacks for Automatic Speaker Verification (ASV) technology. In this paper, we use our recently proposed unsupervised filterbank learning technique using Convolutional Restricted Boltzmann Machine (ConvRBM) as a front-end feature representation. ConvRBM is trained on training subset of ASV spoof 2015 challenge database. Analyzing the filterbank trained on this dataset shows that ConvRBM learned more low-frequency subband filters compared to training on natural speech database such as TIMIT. The spoofing detection experiments were performed using Gaussian Mixture Models (GMM) as a back-end classifier. ConvRBM-based cepstral coefficients (ConvRBM-CC) perform better than hand crafted Mel Frequency Cepstral Coefficients (MFCC). On the evaluation set, ConvRBM-CC features give an absolute reduction of 4.76% in Equal Error Rate (EER) compared to MFCC features. Specifically, ConvRBM-CC features significantly perform better in both known attacks (1.93%) and unknown attacks (5.87%) compared to MFCC features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gajan Suthokumar|AUTHOR Gajan Suthokumar]], [[Kaavya Sriskandaraja|AUTHOR Kaavya Sriskandaraja]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Chamith Wijenayake|AUTHOR Chamith Wijenayake]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]
</p><p class="cpabstractcardaffiliationlist">University of New South Wales, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2606–2610
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoofing detection systems for automatic speaker verification have moved from only modelling voiced frames to modelling all speech frames. Unvoiced speech has been shown to carry information about spoofing attacks and anti-spoofing systems may further benefit by treating voiced and unvoiced speech differently. In this paper, we separate speech into low and high energy frames and independently model the distributions of both to form two spoofing detection systems that are then fused at the score level. Experiments conducted on the ASVspoof 2015, BTAS 2016 and Spoofing and Anti-Spoofing (SAS) corpora demonstrate that the proposed approach of fusing two independent high and low energy spoofing detection systems consistently outperforms the standard approach that does not distinguish between high and low energy frames.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Achintya Kr. Sarkar|AUTHOR Achintya Kr. Sarkar]]^^1^^, [[Md. Sahidullah|AUTHOR Md. Sahidullah]]^^2^^, [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]]^^1^^, [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalborg University, Denmark; ^^2^^University of Eastern Finland, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2611–2615
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speaker verification (ASV) systems are vulnerable to spoofing attacks using speech generated by voice conversion and speech synthesis techniques. Commonly, a countermeasure (CM) system is integrated with an ASV system for improved protection against spoofing attacks. But integration of the two systems is challenging and often leads to increased false rejection rates. Furthermore, the performance of CM severely degrades if in-domain development data are unavailable. In this study, therefore, we propose a solution that uses two separate background models — one from human speech and another from spoofed data. During test, the ASV score for an input utterance is computed as the difference of the log-likelihood against the target model and the combination of the log-likelihoods against two background models. Evaluation experiments are conducted using the joint ASV and CM protocol of ASVspoof 2015 corpus consisting of text-independent ASV tasks with short utterances. Our proposed system reduces error rates in the presence of spoofing attacks by using out-of-domain spoofed data for system development, while maintaining the performance for zero-effort imposter attacks compared to the baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Arsha Nagrani|AUTHOR Arsha Nagrani]], [[Joon Son Chung|AUTHOR Joon Son Chung]], [[Andrew Zisserman|AUTHOR Andrew Zisserman]]
</p><p class="cpabstractcardaffiliationlist">University of Oxford, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2616–2620
</span></p></div>
<div class="cpabstractcardabstract"><p>Most existing datasets for speaker identification contain samples obtained under quite constrained conditions, and are usually hand-annotated, hence limited in size. The goal of this paper is to generate a large scale text-independent speaker identification dataset collected ‘in the wild’.
We make two contributions. First, we propose a fully automated pipeline based on computer vision techniques to create the dataset from open-source media. Our pipeline involves obtaining videos from YouTube; performing active speaker verification using a two-stream synchronization Convolutional Neural Network (CNN), and confirming the identity of the speaker using CNN based facial recognition. We use this pipeline to curate ‘VoxCeleb‘ which contains hundreds of thousands of ‘real world’ utterances for over 1,000 celebrities.
Our second contribution is to apply and compare various state of the art speaker identification techniques on our dataset to establish baseline performance. We show that a CNN based architecture obtains the best performance for both identification and verification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karen Jones|AUTHOR Karen Jones]], [[Stephanie Strassel|AUTHOR Stephanie Strassel]], [[Kevin Walker|AUTHOR Kevin Walker]], [[David Graff|AUTHOR David Graff]], [[Jonathan Wright|AUTHOR Jonathan Wright]]
</p><p class="cpabstractcardaffiliationlist">University of Pennsylvania, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2621–2624
</span></p></div>
<div class="cpabstractcardabstract"><p>The Call My Net 2015 (CMN15) corpus presents a new resource for Speaker Recognition Evaluation and related technologies. The corpus includes conversational telephone speech recordings for a total of 220 speakers spanning 4 languages: Tagalog, Cantonese, Mandarin and Cebuano. The corpus includes 10 calls per speaker made under a variety of noise conditions. Calls were manually audited for language, speaker identity and overall quality. The resulting data has been used in the NIST 2016 SRE Evaluation and will be published in the Linguistic Data Consortium catalog. We describe the goals of the CMN15 corpus, including details of the collection protocol and auditing procedure and discussion of the unique properties of this corpus compared to prior NIST SRE evaluation corpora.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fadi Biadsy|AUTHOR Fadi Biadsy]], [[Mohammadreza Ghodsi|AUTHOR Mohammadreza Ghodsi]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2710–2714
</span></p></div>
<div class="cpabstractcardabstract"><p>Maximum Entropy (MaxEnt) language models are powerful models that can incorporate linguistic and non-linguistic contextual signals in a unified framework with a convex loss. MaxEnt models also have the advantage of scaling to large model and training data sizes We present the following two contributions to MaxEnt training: (1) By leveraging smaller amounts of transcribed data, we demonstrate that a MaxEnt LM trained on various types of corpora can be easily adapted to better match the test distribution of Automatic Speech Recognition (ASR); (2) A novel adaptive-training approach that efficiently models multiple types of non-linguistic features in a universal model. We evaluate the impact of these approaches on Google’s state-of-the-art ASR for the task of voice-search transcription and dictation. Training 10B parameter models utilizing a corpus of up to 1T words, we show large reductions in word error rate from adaptation across multiple languages. Also, human evaluations show significant improvements on a wide range of domains from using non-linguistic features. For example, adapting to geographical domains (e.g., US States and cities) affects about 4% of test utterances, with 2:1 win to loss ratio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Salil Deena|AUTHOR Salil Deena]], [[Raymond W.M. Ng|AUTHOR Raymond W.M. Ng]], [[Pranava Madhyastha|AUTHOR Pranava Madhyastha]], [[Lucia Specia|AUTHOR Lucia Specia]], [[Thomas Hain|AUTHOR Thomas Hain]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2715–2719
</span></p></div>
<div class="cpabstractcardabstract"><p>Recurrent neural network language models (RNNLMs) can be augmented with auxiliary features, which can provide an extra modality on top of the words. It has been found that RNNLMs perform best when trained on a large corpus of generic text and then fine-tuned on text corresponding to the sub-domain for which it is to be applied. However, in many cases the auxiliary features are available for the sub-domain text but not for the generic text. In such cases, semi-supervised techniques can be used to infer such features for the generic text data such that the RNNLM can be trained and then fine-tuned on the available in-domain data with corresponding auxiliary features.
In this paper, several novel approaches are investigated for dealing with the semi-supervised adaptation of RNNLMs with auxiliary features as input. These approaches include: using zero features during training to mask the weights of the feature sub-network; adding the feature sub-network only at the time of fine-tuning; deriving the features using a parametric model and; back-propagating to infer the features on the generic text. These approaches are investigated and results are reported both in terms of PPL and WER on a multi-genre broadcast ASR task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mittul Singh|AUTHOR Mittul Singh]], [[Youssef Oualil|AUTHOR Youssef Oualil]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2720–2724
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditionally, short-range Language Models (LMs) like the conventional n-gram models have been used for language model adaptation. Recent work has improved performance for such tasks using adapted long-span models like Recurrent Neural Network LMs (RNNLMs). With the first pass performed using a large background n-gram LM, the adapted RNNLMs are mostly used to rescore lattices or N-best lists, as a second step in the decoding process. Ideally, these adapted RNNLMs should be applied for first-pass decoding. Thus, we introduce two ways of applying adapted long-short-term-memory (LSTM) based RNNLMs for first-pass decoding. Using available techniques to convert LSTMs to approximated versions for first-pass decoding, we compare approximated LSTMs adapted in a Fast Marginal Adaptation framework (FMA) and an approximated version of architecture-based-adaptation of LSTM. On a conversational speech recognition task, these differently approximated and adapted LSTMs combined with a trigram LM outperform other adapted and unadapted LMs. Here, the architecture-adapted LSTM combination obtains a 35.9% word error rate (WER) and is outperformed by FMA-based LSTM combination obtaining the overall lowest WER of 34.4%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ciprian Chelba|AUTHOR Ciprian Chelba]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]], [[Fadi Biadsy|AUTHOR Fadi Biadsy]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2725–2729
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a new method for estimating the sparse non-negative model (SNM) by using a small amount of held-out data and the multinomial loss that is natural for language modeling; we validate it experimentally against the previous estimation method which uses leave-one-out on training data and a binary loss function and show that it performs equally well. Being able to train on held-out data is very important in practical situations where training data is mismatched from held-out/test data. We find that fairly small amounts of held-out data (on the order of 30–70 thousand words) are sufficient for training the adjustment model, which is the only model component estimated using gradient descent; the bulk of model parameters are relative frequencies counted on training data.
A second contribution is a comparison between SNM and the related class of Maximum Entropy language models. While much cheaper computationally, we show that SNM achieves slightly better perplexity results for the same feature set and same speech recognition accuracy on voice search and short message dictation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manoj Kumar|AUTHOR Manoj Kumar]], [[Daniel Bone|AUTHOR Daniel Bone]], [[Kelly McWilliams|AUTHOR Kelly McWilliams]], [[Shanna Williams|AUTHOR Shanna Williams]], [[Thomas D. Lyon|AUTHOR Thomas D. Lyon]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2730–2734
</span></p></div>
<div class="cpabstractcardabstract"><p>The mutual influence of participant behavior in a dyadic interaction has been studied for different modalities and quantified by computational models. In this paper, we consider the task of automatic recognition for children’s speech, in the context of child-adult spoken interactions during interviews of children suspected to have been maltreated. Our long-term goal is to provide insights within this immensely important, sensitive domain through large-scale lexical and paralinguistic analysis. We demonstrate improvement in child speech recognition accuracy by conditioning on both the domain and the interlocutor’s (adult) speech. Specifically, we use information from the automatic speech recognizer outputs of the adult’s speech, for which we have more reliable estimates, to modify the recognition system of child’s speech in an unsupervised manner. By learning first at session level, and then at the utterance level, we demonstrate an absolute improvement of upto 28% WER and 55% perplexity over the baseline results. We also report results of a parallel human speech recognition (HSR) experiment where annotators are asked to transcribe child’s speech under two conditions: with and without contextual speech information. Demonstrated ASR improvements and the HSR experiment illustrate the importance of context in aiding child speech recognition, whether by humans or computers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weiwu Zhu|AUTHOR Weiwu Zhu]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2735–2738
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper demonstrates how Knowledge Graph (KG) and Search Query Click Logs (SQCL) can be leveraged in statistical language models to improve named entity recognition for online speech recognition systems. Due to the missing in the training data, some named entities may be recognized as other common words that have the similar pronunciation. KG and SQCL cover comprehensive and fresh named entities and queries that can be used to mitigate the wrong recognition. First, all the entities located in the same area in KG are clustered together, and the queries that contain the entity names are selected from SQCL as the training data of a geographical statistical language model for each entity cluster. These geographical language models make the unseen named entities less likely to occur during the model training, and can be dynamically switched according to the user location in the recognition phase. Second, if any named entities are identified in the previous utterances within a conversational dialog, the probability of the n-best word sequence paths that contain their related entities will be increased for the current utterance by utilizing the entity relationships from KG and SQCL. This way can leverage the long-term contexts within the dialog. Experiments for the proposed approach on voice queries from a spoken dialog system yielded a 12.5% relative perplexity reduction in the language model measurement, and a 1.1% absolute word error rate reduction in the speech recognition measurement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ron J. Weiss|AUTHOR Ron J. Weiss]]^^1^^, [[Jan Chorowski|AUTHOR Jan Chorowski]]^^1^^, [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]^^2^^, [[Yonghui Wu|AUTHOR Yonghui Wu]]^^1^^, [[Zhifeng Chen|AUTHOR Zhifeng Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2625–2629
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a recurrent encoder-decoder deep neural network architecture that directly translates speech in one language into text in another. The model does not explicitly transcribe the speech into text in the source language, nor does it require supervision from the ground truth source language transcription during training. We apply a slightly modified sequence-to-sequence with attention architecture that has previously been used for speech recognition and show that it can be repurposed for this more complex task, illustrating the power of attention-based models.
A single model trained end-to-end obtains state-of-the-art performance on the Fisher Callhome Spanish-English speech translation task, outperforming a cascade of independently trained sequence-to-sequence speech recognition and machine translation models by 1.8 BLEU points on the Fisher test set. In addition, we find that making use of the training data in both languages by multi-task training sequence-to-sequence speech translation and recognition models with a shared encoder network can improve performance by a further 1.4 BLEU points.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takatomo Kano|AUTHOR Takatomo Kano]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2630–2634
</span></p></div>
<div class="cpabstractcardabstract"><p>Sequence-to-sequence attentional-based neural network architectures have been shown to provide a powerful model for machine translation and speech recognition. Recently, several works have attempted to extend the models for end-to-end speech translation task. However, the usefulness of these models were only investigated on language pairs with similar syntax and word order (e.g., English-French or English-Spanish). In this work, we focus on end-to-end speech translation tasks on syntactically distant language pairs (e.g., English-Japanese) that require distant word reordering. To guide the encoder-decoder attentional model to learn this difficult problem, we propose a structured-based curriculum learning strategy. Unlike conventional curriculum learning that gradually emphasizes difficult data examples, we formalize learning strategies from easier network structures to more difficult network structures. Here, we start the training with end-to-end encoder-decoder for speech recognition or text-based machine translation task then gradually move to end-to-end speech translation task. The experiment results show that the proposed approach could provide significant improvements in comparison with the one without curriculum learning.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nicholas Ruiz|AUTHOR Nicholas Ruiz]], [[Mattia Antonino Di Gangi|AUTHOR Mattia Antonino Di Gangi]], [[Nicola Bertoldi|AUTHOR Nicola Bertoldi]], [[Marcello Federico|AUTHOR Marcello Federico]]
</p><p class="cpabstractcardaffiliationlist">FBK, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2635–2639
</span></p></div>
<div class="cpabstractcardabstract"><p>Machine translation systems are conventionally trained on textual resources that do not model phenomena that occur in spoken language. While the evaluation of neural machine translation systems on textual inputs is actively researched in the literature, little has been discovered about the complexities of translating spoken language data with neural models. We introduce and motivate interesting problems one faces when considering the translation of automatic speech recognition (ASR) outputs on neural machine translation (NMT) systems. We test the robustness of sentence encoding approaches for NMT encoder-decoder modeling, focusing on word-based over byte-pair encoding. We compare the translation of utterances containing ASR errors in state-of-the-art NMT encoder-decoder systems against a strong phrase-based machine translation baseline in order to better understand which phenomena present in ASR outputs are better represented under the NMT framework than approaches that represent translation as a linear model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Quoc Truong Do|AUTHOR Quoc Truong Do]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2640–2644
</span></p></div>
<div class="cpabstractcardabstract"><p>Emphasis is an important piece of paralinguistic information that is used to express different intentions, attitudes, or convey emotion. Recent works have tried to translate emphasis by developing additional emphasis estimation and translation components apart from an existing speech-to-speech translation (S2ST) system. Although these approaches can preserve emphasis, they introduce more complexity to the translation pipeline. The emphasis translation component has to wait for the target language sentence and word alignments derived from a machine translation system, resulting in a significant translation delay. In this paper, we proposed an approach that jointly trains and predicts words and emphasis in a unified architecture based on sequence-to-sequence models. The proposed model not only speeds up the translation pipeline but also allows us to perform joint training. Our experiments on the emphasis and word translation tasks showed that we could achieve comparable performance for both tasks compared with previous approaches while eliminating complex dependencies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eunah Cho|AUTHOR Eunah Cho]], [[Jan Niehues|AUTHOR Jan Niehues]], [[Alex Waibel|AUTHOR Alex Waibel]]
</p><p class="cpabstractcardaffiliationlist">KIT, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2645–2649
</span></p></div>
<div class="cpabstractcardabstract"><p>Insertion of proper segmentation and punctuation into an ASR transcript is crucial not only for the performance of subsequent applications but also for the readability of the text. In a simultaneous spoken language translation system, the segmentation model has to fulfill real-time constraints and minimize latency as well.
In this paper, we show the successful integration of an attentional encoder-decoder-based segmentation and punctuation insertion model into a real-time spoken language translation system. The proposed technique can be easily integrated into the real-time framework and improve the punctuation performance on reference transcripts as well as on ASR outputs. Compared to the conventional language model and prosody-based model, our experiments on end-to-end spoken language translation show that translation performance is improved by 1.3 BLEU points by adopting the NMT-based punctuation model, maintaining low-latency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Drude|AUTHOR Lukas Drude]], [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]
</p><p class="cpabstractcardaffiliationlist">Universität Paderborn, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2650–2654
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent advances in discriminatively trained mask estimation networks to extract a single source utilizing beamforming techniques demonstrate, that the integration of statistical models and deep neural networks (DNNs) are a promising approach for robust automatic speech recognition (ASR) applications. In this contribution we demonstrate how discriminatively trained embeddings on spectral features can be tightly integrated into statistical model-based source separation to separate and transcribe overlapping speech. Good generalization to unseen spatial configurations is achieved by estimating a statistical model at test time, while still leveraging discriminative training of deep clustering embeddings on a separate training set. We formulate an expectation maximization (EM) algorithm which jointly estimates a model for deep clustering embeddings and complex-valued spatial observations in the short time Fourier transform (STFT) domain at test time. Extensive simulations confirm, that the integrated model outperforms (a) a deep clustering model with a subsequent beamforming step and (b) an EM-based model with a beamforming step alone in terms of signal to distortion ratio (SDR) and perceptually motivated metric (PESQ) gains. ASR results on a reverberated dataset further show, that the aforementioned gains translate to reduced word error rates (WERs) even in reverberant environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kateřina Žmolíková|AUTHOR Kateřina Žmolíková]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Takuya Higuchi|AUTHOR Takuya Higuchi]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2655–2659
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we address the problem of extracting one target speaker from a multichannel mixture of speech. We use a neural network to estimate masks to extract the target speaker and derive beamformer filters using these masks, in a similar way as the recently proposed approach for extraction of speech in presence of noise. To overcome the permutation ambiguity of neural network mask estimation, which arises in presence of multiple speakers, we propose to inform the neural network about the target speaker so that it learns to follow the speaker characteristics through the utterance. We investigate and compare different methods of passing the speaker information to the network such as making one layer of the network dependent on speaker characteristics. Experiments on mixture of two speakers demonstrate that the proposed scheme can track and extract a target speaker for both closed and open speaker set cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]], [[Matthias Zöhrer|AUTHOR Matthias Zöhrer]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Graz, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2660–2664
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we use a logistic regression to learn a speech mask from the dominant eigenvector of the Power Spectral Density (PSD) matrix of a multi-channel speech signal corrupted by ambient noise. We employ this speech mask to construct the Generalized Eigenvalue (GEV) beamformer and a Wiener postfilter. Further, we extend the beamformer to compensate for speech distortions. We do not make any assumptions about the array geometry or the characteristics of the speech and noise sources. Those parameters are learned from training data. Our assumptions are that the speaker may move slowly in the near-field of the array, and that the noise is in the far-field. We compare our speech enhancement system against recent contributions using the CHiME4 corpus. We show that our approach yields superior results, both in terms of perceptual speech quality and speech mask estimation error.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sean U.N. Wood|AUTHOR Sean U.N. Wood]], [[Jean Rouat|AUTHOR Jean Rouat]]
</p><p class="cpabstractcardaffiliationlist">Université de Sherbrooke, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2665–2669
</span></p></div>
<div class="cpabstractcardabstract"><p>We develop an online variant of the GCC-NMF blind speech enhancement algorithm and study its performance on two-channel mixtures of speech and real-world noise from the SiSEC separation challenge. While GCC-NMF performs enhancement independently for each time frame, the NMF dictionary, its activation coefficients, and the target TDOA are derived using the entire mixture signal, thus precluding its use online. Pre-learning the NMF dictionary using the CHiME dataset and inferring its activation coefficients online yields similar overall PEASS scores to the mixture-learned method, thus generalizing to new speakers, acoustic environments, and noise conditions. Surprisingly, if we forgo coefficient inference altogether, this approach outperforms both the mixture-learned method and most algorithms from the SiSEC challenge to date. Furthermore, the trade-off between interference suppression and target fidelity may be controlled online by adjusting the target TDOA window width. Finally, integrating online target localization with max-pooled GCC-PHAT yields only somewhat decreased performance compared to offline localization. We test a real-time implementation of the online GCC-NMF blind speech enhancement system on a variety of hardware platforms, with performance made to degrade smoothly with decreasing computational power using smaller pre-learned dictionaries.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youna Ji|AUTHOR Youna Ji]], [[Jun Byun|AUTHOR Jun Byun]], [[Young-cheol Park|AUTHOR Young-cheol Park]]
</p><p class="cpabstractcardaffiliationlist">Yonsei University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2670–2674
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a coherence-based noise reduction algorithm is proposed for a dual-channel speech enhancement system operating in a complex noise environment. The spatial coherence between two omnidirectional microphones is one of the crucial information for the dual-channel speech enhancement system. In this paper, we introduce a new model of coherence function for the complex noise environment in which a target speech coexists with a coherent interference and diffuse noise around. From the coherence model, three numerical methods of computing the normalized signal to interference plus diffuse noise ratio (SINR), which is related to the Wiener filter gain, are derived. Objective parameters measured from the enhanced speech demonstrate superior performance of the proposed algorithm in terms of speech quality and intelligibility, over the conventional coherence-based noise reduction algorithm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yang Zhang|AUTHOR Yang Zhang]]^^1^^, [[Dinei Flor^encio|AUTHOR Dinei Flor^encio]]^^2^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2675–2679
</span></p></div>
<div class="cpabstractcardabstract"><p>We are interested in the task of speech beamforming in conference room meetings, with microphones built in the electronic devices brought and casually placed by meeting participants. This task is challenging because of the inaccuracy in position and interference calibration due to random microphone configuration, variance of microphone quality, reverberation etc. As a result, not many beamforming algorithms perform better than simply picking the closest microphone in this setting. We propose a beamforming called Glottal Residual Assisted Beamforming (GRAB). It does not rely on any position or interference calibration. Instead, it incorporates a source-filter speech model and minimizes the energy that cannot be accounted for by the model. Objective and subjective evaluations on both simulation and real-world data show that GRAB is able to suppress noise effectively while keeping the speech natural and dry. Further analyses reveal that GRAB can distinguish contaminated or reverberant channels and take appropriate action accordingly.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuanyuan Liu|AUTHOR Yuanyuan Liu]], [[Tan Lee|AUTHOR Tan Lee]], [[P.C. Ching|AUTHOR P.C. Ching]], [[Thomas K.T. Law|AUTHOR Thomas K.T. Law]], [[Kathy Y.S. Lee|AUTHOR Kathy Y.S. Lee]]
</p><p class="cpabstractcardaffiliationlist">Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2680–2684
</span></p></div>
<div class="cpabstractcardabstract"><p>Most previous studies on acoustic assessment of disordered voice were focused on extracting perturbation features from isolated vowels produced with steady-state phonation. Natural speech, however, is considered to be more preferable in the aspects of flexibility, effectiveness and reliability for clinical practice. This paper presents an investigation on applying automatic speech recognition (ASR) technology to disordered voice assessment of Cantonese speakers. A DNN-based ASR system is trained using phonetically-rich continuous utterances from normal speakers. It was found that frame-level phone posteriors obtained from the ASR system are strongly correlated with the severity level of voice disorder. Phone posteriors in utterances with severe disorder exhibit significantly larger variation than those with mild disorder. A set of utterance-level posterior features are computed to quantify such variation for pattern recognition purpose. An SVM based classifier is used to classify an input utterance into the categories of mild, moderate and severe disorder. The two-class classification accuracy for mild and severe disorders is 90.3%, and significant confusion between mild and moderate disorders is observed. For some of the subjects with severe voice disorder, the classification results are highly inconsistent among individual utterances. Furthermore, short utterances tend to have more classification errors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emre Yılmaz|AUTHOR Emre Yılmaz]], [[Mario Ganzeboom|AUTHOR Mario Ganzeboom]], [[Catia Cucchiarini|AUTHOR Catia Cucchiarini]], [[Helmer Strik|AUTHOR Helmer Strik]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit Nijmegen, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2685–2689
</span></p></div>
<div class="cpabstractcardabstract"><p>Incorporating automatic speech recognition (ASR) in individualized speech training applications is becoming more viable thanks to the improved generalization capabilities of neural network-based acoustic models. The main problem in developing applications for dysarthric speech is the relative in-domain data scarcity. Collecting representative amounts of dysarthric speech data is difficult due to rigorous ethical and medical permission requirements, problems in accessing patients who are generally vulnerable and often subject to altering health conditions and, last but not least, the high variability in speech resulting from different pathological conditions. Developing such applications is even more challenging for languages which in general have fewer resources, fewer speakers and, consequently, also fewer patients than English, as in the case of a mid-sized language like Dutch. In this paper, we investigate a multi-stage deep neural network (DNN) training scheme aimed at obtaining better modeling of dysarthric speech by using only a small amount of in-domain training data. The results show that the system employing the proposed training scheme considerably improves the recognition of Dutch dysarthric speech compared to a baseline system with single-stage training only on a large amount of normal speech or a small amount of in-domain data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Smith|AUTHOR Daniel Smith]]^^1^^, [[Alex Sneddon|AUTHOR Alex Sneddon]]^^2^^, [[Lauren Ward|AUTHOR Lauren Ward]]^^3^^, [[Andreas Duenser|AUTHOR Andreas Duenser]]^^1^^, [[Jill Freyne|AUTHOR Jill Freyne]]^^1^^, [[David Silvera-Tawil|AUTHOR David Silvera-Tawil]]^^1^^, [[Angela Morgan|AUTHOR Angela Morgan]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CSIRO, Australia; ^^2^^University of Sydney, Australia; ^^3^^University of Salford, UK; ^^4^^MCRI, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2690–2694
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the continued development of a system to provide early assessment of speech development issues in children and better triaging to professional services. Whilst corpora of children’s speech are increasingly available, recognition of disordered children’s speech is still a data-scarce task. Transfer learning methods have been shown to be effective at leveraging out-of-domain data to improve ASR performance in similar data-scarce applications. This paper combines transfer learning, with previously developed methods for constrained decoding based on expert speech pathology knowledge and knowledge of the target text. Results of this study show that transfer learning with out-of-domain adult speech can improve phoneme recognition for disordered children’s speech. Specifically, a Deep Neural Network (DNN) trained on adult speech and fine-tuned on a corpus of disordered children’s speech reduced the phoneme error rate (PER) of a DNN trained on a children’s corpus from 16.3% to 14.2%. Furthermore, this fine-tuned DNN also improved the performance of a Hierarchal Neural Network based acoustic model previously used by the system with a PER of 19.3%. We close with a discussion of our planned future developments of the system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]], [[S. Umesh|AUTHOR S. Umesh]], [[Basil Abraham|AUTHOR Basil Abraham]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2695–2699
</span></p></div>
<div class="cpabstractcardabstract"><p>Assistive technologies based on speech have been shown to improve the quality of life of people affected with dysarthria, a motor speech disorder. Multiple ways to improve Gaussian mixture model-hidden Markov model (GMM-HMM) and deep neural network (DNN) based automatic speech recognition (ASR) systems for TORGO database for dysarthric speech are explored in this paper. Past attempts in developing ASR systems for TORGO database were limited to training just monophone models and doing speaker adaptation over them. Although a recent work attempted training triphone and neural network models, parameters like the number of context dependent states, dimensionality of the principal component features etc were not properly tuned. This paper develops speaker-specific ASR models for each dysarthric speaker in TORGO database by tuning parameters of GMM-HMM model, number of layers and hidden nodes in DNN. Employing dropout scheme and sequence discriminative training in DNN also gave significant gains. Speaker adapted features like feature-space maximum likelihood linear regression (FMLLR) are used to pass the speaker information to DNNs. To the best of our knowledge, this paper presents the best recognition accuracies for TORGO database till date.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Olympia Simantiraki|AUTHOR Olympia Simantiraki]]^^1^^, [[Paulos Charonyktakis|AUTHOR Paulos Charonyktakis]]^^2^^, [[Anastasia Pampouchidou|AUTHOR Anastasia Pampouchidou]]^^3^^, [[Manolis Tsiknakis|AUTHOR Manolis Tsiknakis]]^^4^^, [[Martin Cooke|AUTHOR Martin Cooke]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad del País Vasco, Spain; ^^2^^Gnosis Data Analysis, Greece; ^^3^^Le2i, France; ^^4^^TEI Crete, Greece</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2700–2704
</span></p></div>
<div class="cpabstractcardabstract"><p>Depression is one of the most prominent mental disorders, with an increasing rate that makes it the fourth cause of disability worldwide. The field of automated depression assessment has emerged to aid clinicians in the form of a decision support system. Such a system could assist as a pre-screening tool, or even for monitoring high risk populations. Related work most commonly involves multimodal approaches, typically combining audio and visual signals to identify depression presence and/or severity. The current study explores categorical assessment of depression using audio features alone. Specifically, since depression-related vocal characteristics impact the glottal source signal, we examine Phase Distortion Deviation which has previously been applied to the recognition of voice qualities such as hoarseness, breathiness and creakiness, some of which are thought to be features of depressed speech. The proposed method uses as features DCT-coefficients of the Phase Distortion Deviation for each frequency band. An automated machine learning tool, Just Add Data, is used to classify speech samples. The method is evaluated on a benchmark dataset (AVEC2014), in two conditions: read-speech and spontaneous-speech. Our findings indicate that Phase Distortion Deviation is a promising audio-only feature for automated detection and assessment of depressed speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roozbeh Sadeghian|AUTHOR Roozbeh Sadeghian]]^^1^^, [[J. David Schaffer|AUTHOR J. David Schaffer]]^^2^^, [[Stephen A. Zahorian|AUTHOR Stephen A. Zahorian]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harrisburg University of Science & Technology, USA; ^^2^^Binghamton University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2705–2709
</span></p></div>
<div class="cpabstractcardabstract"><p>The clinical diagnosis of Alzheimer’s disease and other dementias is very challenging, especially in the early stages. Our hypothesis is that any disease that affects particular brain regions involved in speech production and processing will also leave detectable finger prints in the speech. Computerized analysis of speech signals and computational linguistics have progressed to the point where an automatic speech analysis system is a promising approach for a low-cost non-invasive diagnostic tool for early detection of Alzheimer’s disease.
We present empirical evidence that strong discrimination between subjects with a diagnosis of probable Alzheimer’s versus matched normal controls can be achieved with a combination of acoustic features from speech, linguistic features extracted from an automatically determined transcription of the speech including punctuation, and results of a mini mental state exam (MMSE). We also show that discrimination is nearly as strong even if the MMSE is not used, which implies that a fully automated system is feasible. Since commercial automatic speech recognition (ASR) tools were unable to provide transcripts for about half of our speech samples, a customized ASR system was developed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]]^^1^^, [[Petr Fousek|AUTHOR Petr Fousek]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, USA; ^^2^^IBM, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2739–2743
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we describe the process of converting a research prototype system for Speaker Diarization into a fully deployed product running in real time and with low latency. The deployment is a part of the IBM Cloud Speech-to-Text (STT) Service. First, the prototype system is described and the requirements for the on-line, deployable system are introduced. Then we describe the technical approaches we took to satisfy these requirements and discuss some of the challenges we have faced. In particular, we present novel ideas for speeding up the system by using Automatic Speech Recognition (ASR) transcripts as an input to diarization, we introduce a concept of active window to keep the computational complexity linear, we improve the speaker model using a new speaker-clustering algorithm, we automatically keep track of the number of active speakers and we enable the users to set an operating point on a continuous scale between low latency and optimal accuracy. The deployed system has been tuned on real-life data reaching average Speaker Error Rates around 3% and improving over the prototype system by about 10% relative.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ernest Pusateri|AUTHOR Ernest Pusateri]]^^1^^, [[Bharat Ram Ambati|AUTHOR Bharat Ram Ambati]]^^1^^, [[Elizabeth Brooks|AUTHOR Elizabeth Brooks]]^^1^^, [[Ondrej Platek|AUTHOR Ondrej Platek]]^^2^^, [[Donald McAllaster|AUTHOR Donald McAllaster]]^^1^^, [[Venki Nagesha|AUTHOR Venki Nagesha]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Apple, USA; ^^2^^Charles University, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2784–2788
</span></p></div>
<div class="cpabstractcardabstract"><p>For an automatic speech recognition system to produce sensibly formatted, readable output, the spoken-form token sequence produced by the core speech recognizer must be converted to a written-form string. This process is known as inverse text normalization (ITN). Here we present a mostly data-driven ITN system that leverages a set of simple rules and a few hand-crafted grammars to cast ITN as a labeling problem. To this labeling problem, we apply a compact bi-directional LSTM. We show that the approach performs well using practical amounts of training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wenda Chen|AUTHOR Wenda Chen]]^^1^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^, [[Nancy F. Chen|AUTHOR Nancy F. Chen]]^^2^^, [[Boon Pang Lim|AUTHOR Boon Pang Lim]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2789–2793
</span></p></div>
<div class="cpabstractcardabstract"><p>It is extremely challenging to create training labels for building acoustic models of zero-resourced languages, in which conventional resources required for model training — lexicons, transcribed audio, or in extreme cases even orthographic system or a viable phone set design for the language — are unavailable. Here, language mismatched transcripts, in which audio is transcribed in the orthographic system of a completely different language by possibly non-speakers of the target language may play a vital role. Such mismatched transcripts have recently been successfully obtained through crowdsourcing and shown to be beneficial to ASR performance. This paper further studies this problem of using mismatched crowdsourced transcripts in a tonal language for which we have no standard orthography, and in which we may not even know the phoneme inventory. It proposes methods to project the multilingual mismatched transcriptions of a tonal language to the target phone segments. The results tested on Cantonese and Singapore Hokkien have shown that the reconstructed phone sequences’ accuracies have absolute increment of more than 3% from those of previously proposed monolingual probabilistic transcription methods. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[William Gale|AUTHOR William Gale]]^^1^^, [[Sarangarajan Parthasarathy|AUTHOR Sarangarajan Parthasarathy]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Adelaide, Australia; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2794–2798
</span></p></div>
<div class="cpabstractcardabstract"><p>We explore character-level neural network models for inferring punctuation from text-only input. Punctuation inference is treated as a sequence tagging problem where the input is a sequence of un-punctuated characters, and the output is a corresponding sequence of punctuation tags. We experiment with six architectures, all of which use a long short-term memory (LSTM) network for sequence modeling. They differ in the way the context and lookahead for a given character is derived: from simple character embedding and delayed output to enable lookahead, to complex convolutional neural networks (CNN) to capture context. We demonstrate that the accuracy of proposed character-level models are competitive with the accuracy of a state-of-the-art word-level Conditional Random Field (CRF) baseline with carefully crafted features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lakshmish Kaushik|AUTHOR Lakshmish Kaushik]], [[Abhijeet Sangwan|AUTHOR Abhijeet Sangwan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2799–2803
</span></p></div>
<div class="cpabstractcardabstract"><p>NASA’s Apollo program is a great achievement of mankind in the 20th century. Previously we had introduced UTD-CRSS Apollo data digitization initiative where we proposed to digitize Apollo mission speech data (~100,000 hours) and develop Spoken Language Technology based algorithms to analyze and understand various aspects of conversational speech[1]. A new 30 track analog audio decoder is designed to decode 30 track Apollo analog tapes and is mounted on to the NASA Soundscriber analog audio decoder (in place of single channel decoder). Using the new decoder all 30 channels of data can be decoded simultaneously thereby reducing the digitization time significantly. We have digitized 19,000 hours of data from Apollo missions (including entire Apollo-11, most of Apollo-13, Apollo-1, and Gemini-8 missions). Each audio track corresponds to a specific personnel/position in NASA mission control room or astronauts in space. Since many of the planned Apollo related spoken language technology approaches need transcripts we have developed an Apollo mission specific custom Deep Neural Networks (DNN) based Automatic Speech Recognition (ASR) system. Apollo specific language models are developed. Most audio channels are degraded due to high channel noise, system noise, attenuated signal bandwidth, transmission noise, cosmic noise, analog tape static noise, noise due to tape aging, etc,. In this paper we propose a novel method to improve the transcript quality by using Signal-to-Noise ratio of channels and N-Gram sentence similarity metrics across data channels. The proposed method shows significant improvement in transcript quality of noisy channels. The Word Error Rate (WER) analysis of transcripts across channels shows significant reduction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shreyas Seshadri|AUTHOR Shreyas Seshadri]]^^1^^, [[Ulpu Remes|AUTHOR Ulpu Remes]]^^2^^, [[Okko Räsänen|AUTHOR Okko Räsänen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalto University, Finland; ^^2^^University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2744–2748
</span></p></div>
<div class="cpabstractcardabstract"><p>Zero-resource speech processing (ZS) systems aim to learn structural representations of speech without access to labeled data. A starting point for these systems is the extraction of syllable tokens utilizing the rhythmic structure of a speech signal. Several recent ZS systems have therefore focused on clustering such syllable tokens into linguistically meaningful units. These systems have so far used heuristically set number of clusters, which can, however, be highly dataset dependent and cannot be optimized in actual unsupervised settings. This paper focuses on improving the flexibility of ZS systems using Bayesian non-parametric (BNP) mixture models that are capable of simultaneously learning the cluster models as well as their number based on the properties of the dataset. We also compare different model design choices, namely priors over the weights and the cluster component models, as the impact of these choices is rarely reported in the previous studies. Experiments are conducted using conversational speech from several languages. The models are first evaluated in a separate syllable clustering task and then as a part of a full ZS system in order to examine the potential of BNP methods and illuminate the relative importance of different model design choices.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jorge Proença|AUTHOR Jorge Proença]]^^1^^, [[Carla Lopes|AUTHOR Carla Lopes]]^^1^^, [[Michael Tjalve|AUTHOR Michael Tjalve]]^^2^^, [[Andreas Stolcke|AUTHOR Andreas Stolcke]]^^2^^, [[Sara Candeias|AUTHOR Sara Candeias]]^^3^^, [[Fernando Perdigão|AUTHOR Fernando Perdigão]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Instituto de Telecomunicações, Portugal; ^^2^^Microsoft, USA; ^^3^^Microsoft, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2749–2753
</span></p></div>
<div class="cpabstractcardabstract"><p>Reading aloud performance in children is typically assessed by teachers on an individual basis, manually marking reading time and incorrectly read words. A computational tool that assists with recording reading tasks, automatically analyzing them and providing performance metrics could be a significant help. Towards that goal, this work presents an approach to automatically predicting the overall reading aloud ability of primary school children (6–10 years old), based on the reading of sentences and pseudowords. The opinions of primary school teachers were gathered as ground truth of performance, who provided 0–5 scores closely related to the expectations at the end of each grade. To predict these scores automatically, features based on reading speed and number of disfluencies were extracted, after an automatic disfluency detection. Various regression models were trained, with Gaussian process regression giving best results for automatic features. Feature selection from both sentence and pseudoword reading tasks gave the closest predictions, with a correlation of 0.944. Compared to the use of manual annotation with the best correlation being 0.952, automatic annotation was only 0.8% worse. Furthermore, the error rate of predicted scores relative to ground truth was found to be smaller than the deviation of evaluators’ opinion per child.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Su-Youn Yoon|AUTHOR Su-Youn Yoon]], [[Chong Min Lee|AUTHOR Chong Min Lee]], [[Ikkyu Choi|AUTHOR Ikkyu Choi]], [[Xinhao Wang|AUTHOR Xinhao Wang]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]], [[Keelan Evanini|AUTHOR Keelan Evanini]]
</p><p class="cpabstractcardaffiliationlist">Educational Testing Service, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2754–2758
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we developed an automated off-topic response detection system as a supplementary module for an automated proficiency scoring system for non-native English speakers’ spontaneous speech. Given a spoken response, the system first generates an automated transcription using an ASR system trained on non-native speech, and then generates a set of features to assess similarity to the question. In contrast to previous studies which required a large set of training responses for each question, the proposed system only requires the question text, thus increasing the practical impact of the system, since new questions can be added to a test dynamically. However, questions are typically short and the traditional approach based on exact word matching does not perform well. In order to address this issue, a set of features based on neural embeddings and a convolutional neural network (CNN) were used. A system based on the combination of all features achieved an accuracy of 87% on a balanced dataset, which was substantially higher than the accuracy of a baseline system using question-based vector space models (49%). Additionally, this system almost reached the accuracy of vector space based model using a large set of responses to test questions (93%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Li|AUTHOR Wei Li]]^^1^^, [[Nancy F. Chen|AUTHOR Nancy F. Chen]]^^2^^, [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]]^^1^^, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Georgia Institute of Technology, USA; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2759–2763
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we utilize manner and place of articulation features and deep neural network models (DNNs) with long short-term memory (LSTM) to improve the detection performance of phonetic mispronunciations produced by second language learners. First, we show that speech attribute scores are complementary to conventional phone scores, so they can be concatenated as features to improve a baseline system based only on phone information. Next, pronunciation representation, usually calculated by frame-level averaging in a DNN, is now learned by LSTM, which directly uses sequential context information to embed a sequence of pronunciation scores into a pronunciation vector to improve the performance of subsequent mispronunciation detectors. Finally, when both proposed techniques are incorporated into the baseline phone-based GOP (goodness of pronunciation) classifier system trained on the same data, the integrated system reduces the false acceptance rate (FAR) and false rejection rate (FRR) by 37.90% and 38.44% (relative), respectively, from the baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shoko Tsujimura|AUTHOR Shoko Tsujimura]]^^1^^, [[Kazumasa Yamamoto|AUTHOR Kazumasa Yamamoto]]^^2^^, [[Seiichi Nakagawa|AUTHOR Seiichi Nakagawa]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Toyohashi University of Technology, Japan; ^^2^^Chubu University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2764–2768
</span></p></div>
<div class="cpabstractcardabstract"><p>Because of the spread of the Internet in recent years, e-learning, which is a form of learning through the Internet, has been used in school education. Many lecture videos delivered at The Open University of Japan show lecturers and lecture slides alternately. In such video style, it is hard to understand where on the slide the lecturer is explaining. In this paper, we examined methods to automatically estimate spots where the lecturer explains on the slide using lecture speech and slide data. This technology is expected to help learners to study the lectures. For itemized text slides, using DTW with word embedding based distance, we obtained higher estimation accuracy than a previous work. For slides containing figures, we estimated explanation spots using image classification results and text in the charts. In addition, we modified the lecture browsing system to indicate estimation results on slides, and investigated the usefulness of indicating explanation spots by subjective evaluation with the system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Myungjong Kim|AUTHOR Myungjong Kim]]^^1^^, [[Beiming Cao|AUTHOR Beiming Cao]]^^1^^, [[Ted Mau|AUTHOR Ted Mau]]^^2^^, [[Jun Wang|AUTHOR Jun Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^UT Southwestern, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2769–2773
</span></p></div>
<div class="cpabstractcardabstract"><p>Silent speech recognition (SSR) converts non-audio information such as articulatory (tongue and lip) movements to text. Articulatory movements generally have less information than acoustic features for speech recognition, and therefore, the performance of SSR may be limited. Multiview representation learning, which can learn better representations by analyzing multiple information sources simultaneously, has been recently successfully used in speech processing and acoustic speech recognition. However, it has rarely been used in SSR. In this paper, we investigate SSR based on multiview representation learning via canonical correlation analysis (CCA). When both acoustic and articulatory data are available during training, it is possible to effectively learn a representation of articulatory movements from the multiview data with CCA. To further represent the complex structure of the multiview data, we apply deep CCA, where the functional form of the feature mapping is a deep neural network. This approach was evaluated in a speaker-independent SSR task using a data set collected from seven English speakers using an electromagnetic articulograph (EMA). Experimental results showed the effectiveness of the multiview representation learning via deep CCA over the CCA-based multiview approach as well as baseline articulatory movement data on Gaussian mixture model and deep neural network-based SSR systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[K.M. Knill|AUTHOR K.M. Knill]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]], [[K. Kyriakopoulos|AUTHOR K. Kyriakopoulos]], [[A. Ragni|AUTHOR A. Ragni]], [[Y. Wang|AUTHOR Y. Wang]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2774–2778
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic systems for practice and exams are essential to support the growing worldwide demand for learning English as an additional language. Assessment of spontaneous spoken English is, however, currently limited in scope due to the difficulty of achieving sufficient automatic speech recognition (ASR) accuracy. “Off-the-shelf” English ASR systems cannot model the exceptionally wide variety of accents, pronunciations and recording conditions found in non-native learner data. Limited training data for different first languages (L1s), across all proficiency levels, often with (at most) crowd-sourced transcriptions, limits the performance of ASR systems trained on non-native English learner speech. This paper investigates whether the effect of one source of error in the system, lexical modelling, can be mitigated by using graphemic lexicons in place of phonetic lexicons based on native speaker pronunciations. Graphemic-based English ASR is typically worse than phonetic-based due to the irregularity of English spelling-to-pronunciation but here lower word error rates are consistently observed with the graphemic ASR. The effect of using graphemes on automatic assessment is assessed on different grader feature sets: audio and fluency derived features, including some phonetic level features; and phone/grapheme distance features which capture a measure of pronunciation ability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]], [[Ya Li|AUTHOR Ya Li]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2779–2783
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes an approach to distill knowledge from an ensemble of models to a single deep neural network (DNN) student model for punctuation prediction. This approach makes the DNN student model mimic the behavior of the ensemble. The ensemble consists of three single models. Kullback-Leibler (KL) divergence is used to minimize the difference between the output distribution of the DNN student model and the behavior of the ensemble. Experimental results on English IWSLT2011 dataset show that the ensemble outperforms the previous state-of-the-art model by up to 4.0% absolute in overall F₁-score. The DNN student model also achieves up to 13.4% absolute overall F₁-score improvement over the conventionally-trained baseline models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mitchell McLaren|AUTHOR Mitchell McLaren]]^^1^^, [[Luciana Ferrer|AUTHOR Luciana Ferrer]]^^2^^, [[Diego Castan|AUTHOR Diego Castan]]^^1^^, [[Aaron Lawson|AUTHOR Aaron Lawson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SRI International, USA; ^^2^^Universidad de Buenos Aires, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2804–2808
</span></p></div>
<div class="cpabstractcardabstract"><p>To date, automatic spoken language detection research has largely been based on a closed-set paradigm, in which the languages to be detected are known prior to system application. In actual practice, such systems may face previously unseen languages (out-of-set (OOS) languages) which should be rejected, a common problem that has received limited attention from the research community. In this paper, we focus on situations in which either (1) the system-modeled languages are not observed during use or (2) the test data contains OOS languages that are unseen during modeling or calibration. In these situations, the common multi-class objective function for calibration of language-detection scores is problematic. We describe how the assumptions of multi-class calibration are not always fulfilled in a practical sense and explore applying global and language-dependent binary objective functions to relax system constraints. We contrast the benefits and sensitivities of the calibration approaches on practical scenarios by presenting results using both LRE09 data and 14 languages from the BABEL dataset. We show that the global binary approach is less sensitive to the characteristics of the training data and that OOS modeling with individual detectors is the best option when OOS test languages are not known to the system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yosef A. Solewicz|AUTHOR Yosef A. Solewicz]]^^1^^, [[Michael Jessen|AUTHOR Michael Jessen]]^^2^^, [[David van der Vloed|AUTHOR David van der Vloed]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Police, Israel; ^^2^^Bundeskriminalamt, Germany; ^^3^^Netherlands Forensic Institute, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2849–2853
</span></p></div>
<div class="cpabstractcardabstract"><p>A new method named Null-Hypothesis LLR (H₀LLR) is proposed for forensic automatic speaker recognition. The method takes into account the fact that forensically realistic data are difficult to collect and that inter-individual variation is generally better represented than intra-individual variation. According to the proposal, intra-individual variation is modeled as a projection from case-customized inter-individual variation. Calibrated log Likelihood Ratios (LLR) that are calculated on the basis of the H₀LLR method were tested on two corpora of forensically-founded telephone interception test sets, German-based GFS 2.0 and Dutch-based NFI-FRITS. Five automatic speaker recognition systems were tested based on the scores or the LLRs provided by these systems which form the input to H₀LLR. Speaker-discrimination and calibration performance of H₀LLR is comparable to the performance indices of the system-internal LLR calculation methods. This shows that external data and strategies that work with data outside the forensic domain and without case customization are not necessary. It is also shown that H₀LLR leads to a reduction in the diversity of LLR output patterns of different automatic systems. This is important for the credibility of the Likelihood Ratio framework in forensics, and its application in forensic automatic speaker recognition in particular.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gang Liu|AUTHOR Gang Liu]]^^1^^, [[Qi Qian|AUTHOR Qi Qian]]^^1^^, [[Zhibin Wang|AUTHOR Zhibin Wang]]^^1^^, [[Qingen Zhao|AUTHOR Qingen Zhao]]^^1^^, [[Tianzhou Wang|AUTHOR Tianzhou Wang]]^^1^^, [[Hao Li|AUTHOR Hao Li]]^^1^^, [[Jian Xue|AUTHOR Jian Xue]]^^1^^, [[Shenghuo Zhu|AUTHOR Shenghuo Zhu]]^^1^^, [[Rong Jin|AUTHOR Rong Jin]]^^1^^, [[Tuo Zhao|AUTHOR Tuo Zhao]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba Group, USA; ^^2^^University of Missouri, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2854–2858
</span></p></div>
<div class="cpabstractcardabstract"><p>Last two decades have witnessed a significant progress in speaker recognition, as evidenced by the improving performance in the speaker recognition evaluations (SRE) hosted by NIST. Despite the progress, only a few research is focused on speaker recognition with short duration and language mismatch condition, which often leads to poor recognition performance. In NIST SRE2016, these concerns were first systematically investigated by the speaker recognition community. In this study, we address these challenges from the viewpoint of feature extraction and modeling. In particular, we improve the robustness of features by combining GMM and DNN based iVector extraction approaches, and improve the reliability of the back-end model by exploiting symmetric SVM that can effectively leverage the unlabeled data. Finally, we introduce distance metric learning to improve the generalization capacity of the development data that is usually in limited size. Then a fusion strategy is adopted to collectively boost the performance. The effectiveness of the proposed scheme for speaker recognition is demonstrated on SRE2016 evaluation data: compared with DNN-iVector PLDA baseline system, our method yields 25.6% relative improvement in terms of min_Cprimary.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nagendra Kumar|AUTHOR Nagendra Kumar]]^^1^^, [[Rohan Kumar Das|AUTHOR Rohan Kumar Das]]^^1^^, [[Sarfaraz Jelil|AUTHOR Sarfaraz Jelil]]^^1^^, [[Dhanush B.K.|AUTHOR Dhanush B.K.]]^^2^^, [[H. Kashyap|AUTHOR H. Kashyap]]^^2^^, [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]]^^3^^, [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]^^2^^, [[Rohit Sinha|AUTHOR Rohit Sinha]]^^1^^, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Guwahati, India; ^^2^^Indian Institute of Science, India; ^^3^^IIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2859–2863
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the speaker verification (SV) system submitted to the NIST 2016 speaker recognition evaluation (SRE) challenge by Indian Institute of Technology Guwahati (IITG) under the fixed training condition task. Various SV systems are developed following the idea-level collaboration with two other Indian institutions. Unlike the previous SREs, this time the focus was on developing SV system using non-target language speech data and a small amount unlabeled data from target language/ dialects. For addressing these novel challenges, we tried exploring the fusion of systems created using different features, data conditioning, and classifiers. On NIST 2016 SRE evaluation data, the presented fused system resulted in actual detection cost function ( actDCF) and equal error rate ( EER) of 0.81 and 12.91%, respectively. Post-evaluation, we explored a recently proposed pairwise support vector machine classifier and applied adaptive S-norm to the decision scores before fusion. With these changes, the final system achieves the actDCF and EER of 0.67 and 11.63%, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abhinav Misra|AUTHOR Abhinav Misra]], [[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2864–2868
</span></p></div>
<div class="cpabstractcardabstract"><p>Channel compensation is an integral part for any state-of-the-art speaker recognition system. Typically, Linear Discriminant Analysis (LDA) is used to suppress directions containing channel information. LDA assumes a unimodal Gaussian distribution of the speaker samples to maximize the ratio of the between-speaker variance to within-speaker variance. However, when speaker samples have multi-modal non-Gaussian distributions due to channel or noise distortions, LDA fails to provide optimal performance. In this study, we propose Locally Weighted Linear Discriminant Analysis (LWLDA). LWLDA computes the within-speaker scatter in a pairwise manner and then scales it by an affinity matrix so as to preserve the within-class local structure. This is in contrast to another recently proposed non-parametric discriminant analysis method called NDA. We show that LWLDA not only performs better than NDA but also is computationally much less expensive. Experiments are performed using the DARPA Robust Automatic Transcription of Speech (RATS) corpus. Results indicate that LWLDA consistently outperforms both LDA and NDA on all trial conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suwon Shon|AUTHOR Suwon Shon]], [[Seongkyu Mun|AUTHOR Seongkyu Mun]], [[Hanseok Ko|AUTHOR Hanseok Ko]]
</p><p class="cpabstractcardaffiliationlist">Korea University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2869–2873
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently in speaker recognition, performance degradation due to the channel domain mismatched condition has been actively addressed. However, the mismatches arising from language is yet to be sufficiently addressed. This paper proposes an approach which employs recursive whitening transformation to mitigate the language mismatched condition. The proposed method is based on the multiple whitening transformation, which is intended to remove un-whitened residual components in the dataset associated with i-vector length normalization. The experiments were conducted on the Speaker Recognition Evaluation 2016 trials of which the task is non-English speaker recognition using development dataset consist of both a large scale out-of-domain (English) dataset and an extremely low-quantity in-domain (non-English) dataset. For performance comparison, we develop a state-of-the-art system using deep neural network and bottleneck feature, which is based on a phonetically aware model. From the experimental results, along with other prior studies, effectiveness of the proposed method on language mismatched condition is validated.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarith Fernando|AUTHOR Sarith Fernando]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Julien Epps|AUTHOR Julien Epps]]
</p><p class="cpabstractcardaffiliationlist">University of New South Wales, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2809–2813
</span></p></div>
<div class="cpabstractcardabstract"><p>Language identification (LID) systems typically employ i-vectors as fixed length representations of utterances. However, it may not be possible to reliably estimate i-vectors from short utterances, which in turn could lead to reduced language identification accuracy. Recently, Long Short Term Memory networks (LSTMs) have been shown to better model short utterances in the context of language identification. This paper explores the use of bidirectional LSTMs for language identification with the aim of modelling temporal dependencies between past and future frame based features in short utterances. Specifically, an end-to-end system for short duration language identification employing bidirectional LSTM models of utterances is proposed. Evaluations on both NIST 2007 and 2015 LRE show state-of-the-art performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peng Shen|AUTHOR Peng Shen]], [[Xugang Lu|AUTHOR Xugang Lu]], [[Sheng Li|AUTHOR Sheng Li]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]
</p><p class="cpabstractcardaffiliationlist">NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2814–2818
</span></p></div>
<div class="cpabstractcardabstract"><p>The i-vector technique using deep neural network has been successfully applied in spoken language identification systems. Neural network modeling showed its effectiveness as both discriminant feature transformation and classification in many tasks, in particular with a large training data set. However, on a small data set, neural networks suffer from the overfitting problem which degrades the performance. Many strategies have been investigated and used to improve the regularization for deep neural networks, for example, weigh decay, dropout, data augmentation. In this paper, we study and use conditional generative adversarial nets as a classifier for the spoken language identification task. Unlike the previous works on GAN for image generation, our purpose is to focus on improving regularization of the neural network by jointly optimizing the “Real/Fake” objective function and the categorical objective function. Compared with dropout and data augmentation methods, the proposed method obtained 29.7% and 31.8% relative improvement on NIST 2015 i-vector challenge data set for spoken language identification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Antonio Miguel|AUTHOR Antonio Miguel]], [[Jorge Llombart|AUTHOR Jorge Llombart]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]
</p><p class="cpabstractcardaffiliationlist">Universidad de Zaragoza, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2819–2823
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we propose a method to model speaker and session variability and able to generate likelihood ratios using neural networks in an end-to-end phrase dependent speaker verification system. As in Joint Factor Analysis, the model uses tied hidden variables to model speaker and session variability and a MAP adaptation of some of the parameters of the model. In the training procedure our method jointly estimates the network parameters and the values of the speaker and channel hidden variables. This is done in a two-step backpropagation algorithm, first the network weights and factor loading matrices are updated and then the hidden variables, whose gradients are calculated by aggregating the corresponding speaker or session frames, since these hidden variables are tied. The last layer of the network is defined as a linear regression probabilistic model whose inputs are the previous layer outputs. This choice has the advantage that it produces likelihoods and additionally it can be adapted during the enrolment using MAP without the need of a gradient optimization. The decisions are made based on the ratio of the output likelihoods of two neural network models, speaker adapted and universal background model. The method was evaluated on the RSR2015 database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sungrack Yun|AUTHOR Sungrack Yun]], [[Hye Jin Jang|AUTHOR Hye Jin Jang]], [[Taesu Kim|AUTHOR Taesu Kim]]
</p><p class="cpabstractcardaffiliationlist">Qualcomm, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2824–2828
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a speaker clustering framework by iteratively performing two stages: a discriminative feature space is obtained given a cluster label set, and the cluster label set is updated using a clustering algorithm given the feature space. In the iterations of two stages, the cluster labels may be different from the true labels, and thus the obtained feature space based on the labels may be inaccurately discriminated. However, by iteratively performing above two stages, more accurate cluster labels and more discriminative feature space can be obtained, and finally they are converged. In this research, the linear discriminant analysis is used for discriminating the i-vector feature space, and the variational Bayesian expectation-maximization on Gaussian mixture model is used for clustering the i-vectors. Our iterative clustering framework was evaluated using the database of keyword utterances and compared with the recently-published approaches. In all experiments, the results show that our framework outperforms the other approaches and converges in a few iterations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ignacio Viñals|AUTHOR Ignacio Viñals]]^^1^^, [[Alfonso Ortega|AUTHOR Alfonso Ortega]]^^1^^, [[Jesús Villalba|AUTHOR Jesús Villalba]]^^2^^, [[Antonio Miguel|AUTHOR Antonio Miguel]]^^1^^, [[Eduardo Lleida|AUTHOR Eduardo Lleida]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Zaragoza, Spain; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2829–2833
</span></p></div>
<div class="cpabstractcardabstract"><p>This work presents a new strategy to perform diarization dealing with high variability data, such as multimedia information in broadcast. This variability is highly noticeable among domains (inter-domain variability among chapters, shows, genres, etc.). Therefore, each domain requires its own specific model to obtain the optimal results. We propose to adapt the PLDA models of our diarization system with in-domain unlabeled data. To do it, we estimate pseudo-speaker labels by unsupervised speaker clustering. This new method has been included in a PLDA-based diarization system and evaluated on the Multi-Genre Broadcast 2015 Challenge data. Given an audio, the system computes short-time i-vectors and clusters them using a variational Bayesian PLDA model with hidden labels. The proposed method improves 25.41% relative w.r.t. the system without PLDA adaptation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Miquel India|AUTHOR Miquel India]], [[José A.R. Fonollosa|AUTHOR José A.R. Fonollosa]], [[Javier Hernando|AUTHOR Javier Hernando]]
</p><p class="cpabstractcardaffiliationlist">Universitat Politècnica de Catalunya, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2834–2838
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a new speaker change detection system based on Long Short-Term Memory (LSTM) neural networks using acoustic data and linguistic content. Language modelling is combined with two different Joint Factor Analysis (JFA) acoustic approaches: i-vectors and speaker factors. Both of them are compared with a baseline algorithm that uses cosine distance to detect speaker turn changes. LSTM neural networks with both linguistic and acoustic features have been able to produce a robust speaker segmentation. The experimental results show that our proposal clearly outperforms the baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adrien Gresse|AUTHOR Adrien Gresse]], [[Mickael Rouvier|AUTHOR Mickael Rouvier]], [[Richard Dufour|AUTHOR Richard Dufour]], [[Vincent Labatut|AUTHOR Vincent Labatut]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]
</p><p class="cpabstractcardaffiliationlist">LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2839–2843
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this research work is the development of an automatic voice recommendation system for assisted voice casting. In this article, we propose preliminary work on acoustic pairing of original and dubbed voices. The voice segments are taken from a video game released in two different languages. The paired voice segments come from different languages but belong to the same video game character. Our wish is to exploit the relationship between a set of paired segments in order to model the perceptual aspects of a given character depending on the target language. We use a state-of-the-art approach in speaker recognition ( i.e. based on the paradigm i-vector/PLDA). We first evaluate pairs of i-vectors using two different acoustic spaces, one for each of the targeted languages. Secondly, we perform a transformation in order to project the source-language i-vector into the target language. The results showed that this latest approach is able to improve significantly the accuracy. Finally, we challenge the system ability to model the latent information that holds the video-game character independently of the speaker, the linguistic content and the language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Moez Ajili|AUTHOR Moez Ajili]]^^1^^, [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]^^1^^, [[Waad Ben Kheder|AUTHOR Waad Ben Kheder]]^^1^^, [[Solange Rossato|AUTHOR Solange Rossato]]^^2^^, [[Juliette Kahn|AUTHOR Juliette Kahn]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIA (EA 4128), France; ^^2^^LIG (UMR 5217), France; ^^3^^LNE, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2844–2848
</span></p></div>
<div class="cpabstractcardabstract"><p>It is common to see mobile recordings being presented as a forensic trace in a court. In such cases, a forensic expert is asked to analyze both suspect and criminal’s voice samples in order to determine the strength-of-evidence. This process is known as Forensic Voice Comparison (FVC). The Likelihood ratio (LR) framework is commonly used by the experts and quite often required by the expert’s associations “best practice guides”. Nevertheless, the LR accepts some practical limitations due both to intrinsic aspects of its estimation process and the information used during the FVC process. These aspects are embedded in a more general one, the lack of knowledge on FVC reliability. The question of reliability remains a major challenge, particularly for FVC systems where numerous variation factors like duration, noise, linguistic content or… within-speaker variability are not taken into account. Recently, we proposed an information theory-based criterion able to estimate one of these factors, the homogeneity of information between the two sides of a FVC trial. Thanks to this new criterion, we wish to explore new aspects of homogeneity in this article. We wish to question the impact of homogeneity on reliability separately on target and non-target trials. The study is performed using FABIOLE, a publicly available database dedicated to this kind of studies with a large number of recordings per target speaker. Our experiments report large differences of homogeneity impact between FVC genuine and impostor trials. These results show clearly the importance of intra-speaker variability effects in FVC reliability estimation. This study confirms also the interest of homogeneity measure for FVC reliability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shane Settle|AUTHOR Shane Settle]]^^1^^, [[Keith Levin|AUTHOR Keith Levin]]^^2^^, [[Herman Kamper|AUTHOR Herman Kamper]]^^1^^, [[Karen Livescu|AUTHOR Karen Livescu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^TTIC, USA; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2874–2878
</span></p></div>
<div class="cpabstractcardabstract"><p>Query-by-example search often uses dynamic time warping (DTW) for comparing queries and proposed matching segments. Recent work has shown that comparing speech segments by representing them as fixed-dimensional vectors — acoustic word embeddings — and measuring their vector distance (e.g., cosine distance) can discriminate between words more accurately than DTW-based approaches. We consider an approach to query-by-example search that embeds both the query and database segments according to a neural model, followed by nearest-neighbor search to find the matching segments. Earlier work on embedding-based query-by-example, using template-based acoustic word embeddings, achieved competitive performance. We find that our embeddings, based on recurrent neural networks trained to optimize word discrimination, achieve substantial improvements in performance and run-time efficiency over the previous approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emiru Tsunoo|AUTHOR Emiru Tsunoo]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2919–2923
</span></p></div>
<div class="cpabstractcardabstract"><p>A broadcast news stream consists of a number of stories and each story consists of several sentences. We capture this structure using a hierarchical model based on a word-level Recurrent Neural Network (RNN) sentence modeling layer and a sentence-level bidirectional Long Short-Term Memory (LSTM) topic modeling layer. First, the word-level RNN layer extracts a vector embedding the sentence information from the given transcribed lexical tokens of each sentence. These sentence embedding vectors are fed into a bidirectional LSTM that models the sentence and topic transitions. A topic posterior for each sentence is estimated discriminatively and a Hidden Markov model (HMM) follows to decode the story sequence and identify story boundaries. Experiments on the topic detection and tracking (TDT2) task indicate that the hierarchical RNN topic modeling achieves the best story segmentation performance with a higher F1-measure compared to conventional state-of-the-art methods. We also compare variations of our model to infer the optimal structure for the story segmentation task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abdessalam Bouchekif|AUTHOR Abdessalam Bouchekif]]^^1^^, [[Delphine Charlet|AUTHOR Delphine Charlet]]^^2^^, [[Géraldine Damnati|AUTHOR Géraldine Damnati]]^^2^^, [[Nathalie Camelin|AUTHOR Nathalie Camelin]]^^1^^, [[Yannick Estève|AUTHOR Yannick Estève]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIUM (EA 4023), France; ^^2^^Orange Labs, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2924–2928
</span></p></div>
<div class="cpabstractcardabstract"><p>Several evaluation metrics have been proposed for topic segmentation. Most of them rely on the paradigm that segmentation is mainly a task that detects boundaries, and thus are oriented on boundary detection evaluation. Nevertheless, this paradigm is not appropriate to get homogeneous chapters, which is one of the major applications of topic segmentation. For instance on Broadcast News, topic segmentation enables users to watch a chapter independently of the others.
We propose to consider segmentation as a task that detects homogeneous segments, and we propose evaluation metrics oriented on segment retrieval. The proposed metrics are experimented on various TV shows from different channels. Results are analysed and discussed, highlighting their relevance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jeong-Uk Bang|AUTHOR Jeong-Uk Bang]]^^1^^, [[Mu-Yeol Choi|AUTHOR Mu-Yeol Choi]]^^2^^, [[Sang-Hun Kim|AUTHOR Sang-Hun Kim]]^^2^^, [[Oh-Wook Kwon|AUTHOR Oh-Wook Kwon]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chungbuk National University, Korea; ^^2^^ETRI, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2929–2933
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes an automatic method to refine broadcast data collected every week for efficient acoustic model training. For training acoustic models, we use only audio signals, subtitle texts, and subtitle timestamps accompanied by recorded broadcast programs. However, the subtitle timestamps are often inaccurate due to inherent characteristics of closed captioning. In the proposed method, we remove subtitle texts with low subtitle quality index, concatenate adjacent subtitle texts into a merged subtitle text, and correct the timestamp of the merged subtitle text by adding a margin. Then, a speech recognizer is used to obtain a hypothesis text from the speech segment corresponding to the merged subtitle text. Finally, the refined speech segments to be used for acoustic model training, are generated by selecting the subparts of the merged subtitle text that matches the hypothesis text. It is shown that the acoustic models trained by using refined broadcast data give significantly higher speech recognition accuracy than those trained by using raw broadcast data. Consequently, the proposed method can efficiently refine a large amount of broadcast data with inaccurate timestamps taking about half of the time, compared with the previous approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Švec|AUTHOR Jan Švec]]^^1^^, [[Josef V. Psutka|AUTHOR Josef V. Psutka]]^^1^^, [[Luboš Šmídl|AUTHOR Luboš Šmídl]]^^1^^, [[Jan Trmal|AUTHOR Jan Trmal]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of West Bohemia, Czech Republic; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2934–2938
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a novel method for term score estimation. The method is primarily designed for scoring the out-of-vocabulary terms, however it could also estimate scores for in-vocabulary results. The term score is computed as a cosine distance of two pronunciation embeddings. The first one is generated from the grapheme representation of the searched term, while the second one is computed from the recognized phoneme confusion network. The embeddings are generated by specifically trained recurrent neural network built on the idea of Siamese neural networks. The RNN is trained from recognition results on word- and phone-level in an unsupervised fashion without need of any hand-labeled data. The method is evaluated on the MALACH data in two languages, English and Czech. The results are compared with two baseline methods for OOV term detection.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daisuke Kaneko|AUTHOR Daisuke Kaneko]]^^1^^, [[Ryota Konno|AUTHOR Ryota Konno]]^^1^^, [[Kazunori Kojima|AUTHOR Kazunori Kojima]]^^1^^, [[Kazuyo Tanaka|AUTHOR Kazuyo Tanaka]]^^2^^, [[Shi-wook Lee|AUTHOR Shi-wook Lee]]^^3^^, [[Yoshiaki Itoh|AUTHOR Yoshiaki Itoh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Iwate Prefectural University, Japan; ^^2^^University of Tsukuba, Japan; ^^3^^AIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2879–2883
</span></p></div>
<div class="cpabstractcardabstract"><p>The detection of out-of-vocabulary (OOV) query terms is a crucial problem in spoken term detection (STD), because OOV query terms are likely. To enable search of OOV query terms in STD systems, a query subword sequence is compared with subword sequences generated using an automatic speech recognizer against spoken documents. When comparing two subword sequences, the edit distance is a typical distance between any two subwords. We previously proposed an acoustic distance defined from statistics between states of the hidden Markov model (HMM) and showed its effectiveness in STD [4]. This paper proposes an acoustic distance between subwords and HMM states where the posterior probabilities output by a deep neural network are used to improve the STD accuracy for OOV query terms. Experiments are conducted to evaluate the performance of the proposed method, using the open test collections for the “Spoken&Doc” tasks of the NTCIR-9 [13] and NTCIR-10 [14] workshops. The proposed method shows improvements in mean average precision.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuri Khokhlov|AUTHOR Yuri Khokhlov]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Ivan Medennikov|AUTHOR Ivan Medennikov]], [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]]
</p><p class="cpabstractcardaffiliationlist">STC-innovations, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2884–2888
</span></p></div>
<div class="cpabstractcardabstract"><p>This work proposes a novel approach to out-of-vocabulary (OOV) keyword search (KWS) task. The proposed approach is based on using high-level features from an automatic speech recognition (ASR) system, so called phoneme posterior based ( PPB) features, for decoding. These features are obtained by calculating time-dependent phoneme posterior probabilities from word lattices, followed by their smoothing. For the PPB features we developed a special novel very fast, simple and efficient OOV decoder. Experimental results are presented on the Georgian language from the IARPA Babel Program, which was the test language in the OpenKWS 2016 evaluation campaign. The results show that in terms of maximum term weighted value (MTWV) metric and computational speed, for single ASR systems, the proposed approach significantly outperforms the state-of-the-art approach based on using in-vocabulary proxies for OOV keywords in the indexed database. The comparison of the two OOV KWS approaches on the fusion results of the nine different ASR systems demonstrates that the proposed OOV decoder outperforms the proxy-based approach in terms of MTWV metric given the comparable processing speed. Other important advantages of the OOV decoder include extremely low memory consumption and simplicity of its implementation and parameter optimization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ying-Wen Chen|AUTHOR Ying-Wen Chen]]^^1^^, [[Kuan-Yu Chen|AUTHOR Kuan-Yu Chen]]^^2^^, [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]^^2^^, [[Berlin Chen|AUTHOR Berlin Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Taiwan Normal University, Taiwan; ^^2^^Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2889–2893
</span></p></div>
<div class="cpabstractcardabstract"><p>Owing to the rapid global access to tremendous amounts of multimedia associated with speech information on the Internet, spoken document retrieval (SDR) has become an emerging application recently. Apart from much effort devoted to developing robust indexing and modeling techniques for spoken documents, a recent line of research targets at enriching and reformulating query representations in an attempt to enhance retrieval effectiveness. In practice, pseudo-relevance feedback is by far the most prevalent paradigm for query reformulation, which assumes that top-ranked feedback documents obtained from the initial round of retrieval are potentially relevant and can be exploited to reformulate the original query. Continuing this line of research, the paper presents a novel modeling framework, which aims at discovering significant words occurring in the feedback documents, to infer an enhanced query language model for SDR. Formally, the proposed framework targets at extracting the essential words representing a common notion of relevance (i.e., the significant words which occur in almost all of the feedback documents), so as to deduce a new query language model that captures these significant words and meanwhile modulates the influence of both highly frequent words and too specific words. Experiments conducted on a benchmark SDR task demonstrate the performance merits of our proposed framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hiroto Tasaki|AUTHOR Hiroto Tasaki]], [[Tomoyosi Akiba|AUTHOR Tomoyosi Akiba]]
</p><p class="cpabstractcardaffiliationlist">Toyohashi University of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2894–2898
</span></p></div>
<div class="cpabstractcardabstract"><p>A speech-driven information retrieval system is expected to be useful for gathering information with greater ease. In a conventional system, users have to decide on the contents of their utterance before speaking, which takes quite a long time when their request is complicated. To overcome that problem, it is required for the retrieval system to handle a spontaneously spoken query directly. In this work, we propose an extension technique of spoken content retrieval (SCR) for effectively using spontaneously spoken queries. Acoustic features of meaningful terms in the retrieval may have prominence compared to other terms. Also, those terms will have linguistic specificity. From this assumption, we predict the contribution of terms included in spontaneously spoken queries using acoustic and linguistic features, and incorporate it in the query likelihood model (QLM) which is a probabilistic retrieval model. We verified the effectiveness of the proposed method through experiments. Our proposed method was successful in improving retrieval performance under various conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo-Ru Lu|AUTHOR Bo-Ru Lu]], [[Frank Shyu|AUTHOR Frank Shyu]], [[Yun-Nung Chen|AUTHOR Yun-Nung Chen]], [[Hung-Yi Lee|AUTHOR Hung-Yi Lee]], [[Lin-Shan Lee|AUTHOR Lin-Shan Lee]]
</p><p class="cpabstractcardaffiliationlist">National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2899–2903
</span></p></div>
<div class="cpabstractcardabstract"><p>Connectionist temporal classification (CTC) is a powerful approach for sequence-to-sequence learning, and has been popularly used in speech recognition. The central ideas of CTC include adding a label “blank” during training. With this mechanism, CTC eliminates the need of segment alignment, and hence has been applied to various sequence-to-sequence learning problems. In this work, we applied CTC to abstractive summarization for spoken content. The “blank” in this case implies the corresponding input data are less important or noisy; thus it can be ignored. This approach was shown to outperform the existing methods in term of ROUGE scores over Chinese Giga-word and MATBN corpora. This approach also has the nice property that the ordering of words or characters in the input documents can be better preserved in the generated summaries.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masatoshi Tsuchiya|AUTHOR Masatoshi Tsuchiya]], [[Ryo Minamiguchi|AUTHOR Ryo Minamiguchi]]
</p><p class="cpabstractcardaffiliationlist">Toyohashi University of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2904–2908
</span></p></div>
<div class="cpabstractcardabstract"><p>Multimodal alignment between classroom lecture utterances and lecture slide components is one of the crucial problems to realize a multimodal e-Learning application. This paper proposes the new method for the automatic alignment, and formulates the alignment as the integer linear programming (ILP) problem to maximize the score function which consists of three factors: the similarity score between utterances and slide components, the consistency of the explanation order, and the explanation coverage of slide components. The experimental result on the Corpus of Japanese classroom Lecture Contents (CJLC) shows that the automatic alignment information acquired by the proposed method is effective to improve the performance of the automatic extraction of important utterances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Paula Lopez-Otero|AUTHOR Paula Lopez-Otero]], [[Laura Docio-Fernandez|AUTHOR Laura Docio-Fernandez]], [[Carmen Garcia-Mateo|AUTHOR Carmen Garcia-Mateo]]
</p><p class="cpabstractcardaffiliationlist">Universidade de Vigo, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2909–2913
</span></p></div>
<div class="cpabstractcardabstract"><p>The huge amount of available spoken documents has raised the need for tools to perform automatic searches within large audio databases. These collections usually consist of documents with a great variability regarding speaker, language or recording channel, among others. Reducing this variability would boost the performance of query-by-example search on speech systems, especially in zero-resource systems that use acoustic features for audio representation. Hence, in this work, a technique to compensate the variability caused by speaker gender is proposed. Given a data collection composed of documents spoken by both male and female voices, every time a spoken query has to be searched, an alternative version of the query on its opposite gender is generated using voice conversion. After that, the female version of the query is used to search within documents spoken by females and vice versa. Experimental validation of the proposed strategy shows an improvement of search on speech performance caused by the reduction of gender variability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anjishnu Kumar|AUTHOR Anjishnu Kumar]]^^1^^, [[Pavankumar Reddy Muddireddy|AUTHOR Pavankumar Reddy Muddireddy]]^^2^^, [[Markus Dreyer|AUTHOR Markus Dreyer]]^^1^^, [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon.com, USA; ^^2^^University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2914–2918
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a zero-shot learning approach for text classification, predicting which natural language understanding domain can handle a given utterance. Our approach can predict domains at runtime that did not exist at training time. We achieve this extensibility by learning to project utterances and domains into the same embedding space while generating each domain-specific embedding from a set of attributes that characterize the domain. Our model is a neural network trained via ranking loss. We evaluate the performance of this zero-shot approach on a subset of a virtual assistant’s third-party domains and show the effectiveness of the technique on new domains not observed during training. We compare to generative baselines and show that our approach requires less storage and performs better on new domains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]]^^1^^, [[Sebastian Möller|AUTHOR Sebastian Möller]]^^1^^, [[John Beerends|AUTHOR John Beerends]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^T-Labs, Germany; ^^2^^TNO, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2939–2943
</span></p></div>
<div class="cpabstractcardabstract"><p>The performance of automatic speech recognition based on coded-decoded speech heavily depends on the quality of the transmitted signals, determined by channel impairments. This paper examines relationships between speech recognition performance and measurements of speech quality and intelligibility over transmission channels. Different to previous studies, the effects of super-wideband transmissions are analyzed and compared to those of wideband and narrowband channels. Furthermore, intelligibility scores, gathered by conducting a listening test based on logatomes, are also considered for the prediction of automatic speech recognition results. The modern instrumental measurement techniques POLQA and POLQA-based intelligibility have been respectively applied to estimate the quality and the intelligibility of transmitted speech. Based on our results, polynomial models are proposed that permit the prediction of speech recognition accuracy from the subjective and instrumental measures, involving a number of channel distortions in the three bandwidths. This approach can save the costs of performing automatic speech recognition experiments and can be seen as a first step towards a useful tool for communication channel designers. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]]^^1^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Edinburgh, UK; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2944–2948
</span></p></div>
<div class="cpabstractcardabstract"><p>Intelligibility of speech in noise becomes lower as the listeners age increases, even when no apparent hearing impairment is present. The losses are, however, different depending on the nature of the noise and the characteristics of the voice. In this paper we investigate the effect that age, noise type and speaking style have on the intelligibility of speech reproduced by car loudspeakers. Using a binaural mannequin we recorded a variety of voices and speaking styles played from the audio system of a car while driving in different conditions. We used this material to create a listening test where participants were asked to transcribe what they could hear and recruited groups of young and older adults to take part in it. We found that intelligibility scores of older participants were lower for the competing speaker and background music conditions. Results also indicate that clear and Lombard speech was more intelligible than plain speech for both age groups. A mixed effect model revealed that the largest effect was the noise condition, followed by sentence type, speaking style, voice, age group and pure tone average.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Katsuhiko Yamamoto|AUTHOR Katsuhiko Yamamoto]]^^1^^, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^, [[Toshie Matsui|AUTHOR Toshie Matsui]]^^1^^, [[Shoko Araki|AUTHOR Shoko Araki]]^^2^^, [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^2^^, [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2949–2953
</span></p></div>
<div class="cpabstractcardabstract"><p>A new intelligibility prediction measure, called “Gammachirp Envelope Distortion Index (GEDI)” is proposed for the evaluation of speech enhancement algorithms. This model calculates the signal-to-distortion ratio (SDR) in envelope responses SDRenv derived from the gammachirp filterbank outputs of clean and enhanced speech, and is an extension of the speech based envelope power spectrum model (sEPSM) to improve prediction and usability. An evaluation was performed by comparing human subjective results and model predictions for the speech intelligibility of noise-reduced sounds processed by spectral subtraction and a recent Wiener filtering technique. The proposed GEDI predicted the subjective results of the Wiener filtering better than those predicted by the original sEPSM and well-known conventional measures, i.e., STOI, CSII, and HASPI.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yafan Chen|AUTHOR Yafan Chen]], [[Yong Xu|AUTHOR Yong Xu]], [[Jun Yang|AUTHOR Jun Yang]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2954–2957
</span></p></div>
<div class="cpabstractcardabstract"><p>The speech intelligibility of Mandarin Chinese sentences of various spectral regions, regarding band-stop conditions (one or two “holes” in the spectrum), was investigated through subjective listening tests. Results demonstrated significant effects on Mandarin Chinese sentence intelligibilities when a single or a pair of spectral holes was introduced. Meanwhile, it revealed the importance of the first and second formant (F1, F2) frequencies for the comprehension of Mandarin sentences. More importantly, the first formant frequencies played a more primary role rather than those of the second formants. Sentence intelligibilities declined evidently with the lacking of F1 frequencies, but the effect became small when the spectrum holes covered more than 50% of F1 frequencies, and F2 frequencies came into a major play in the intelligibility of Mandarin sentence.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lauren Ward|AUTHOR Lauren Ward]], [[Ben Shirley|AUTHOR Ben Shirley]], [[Yan Tang|AUTHOR Yan Tang]], [[William J. Davies|AUTHOR William J. Davies]]
</p><p class="cpabstractcardaffiliationlist">University of Salford, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2958–2962
</span></p></div>
<div class="cpabstractcardabstract"><p>In everyday life, speech is often accompanied by a situation-specific acoustic cue; a hungry bark as you ask ‘Has anyone fed the dog?’. This paper investigates the effect such cues have on speech intelligibility in noise and evaluates their interaction with the established effect of situation-specific semantic cues. This work is motivated by the introduction of new object-based broadcast formats, which have the potential to optimise intelligibility by controlling the level of individual broadcast audio elements, at point of service. Results of this study show that situation-specific acoustic cues alone can improve word recognition in multi-talker babble by 69.5%, a similar amount to semantic cues. The combination of both semantic and acoustic cues provide further improvement of 106.0% compared with no cues, and 18.7% compared with semantic cues only. Interestingly, whilst increasing subjective intelligibility of the target word, the presence of acoustic cues degraded the objective intelligibility of the speech-based semantic cues by 47.0% (equivalent to reducing the speech level by 4.5 dB). This paper discusses the interactions between the two types of cues and the implications that these results have for assessing and improving the intelligibility of broadcast speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Asger Heidemann Andersen|AUTHOR Asger Heidemann Andersen]]^^1^^, [[Jan Mark de Haan|AUTHOR Jan Mark de Haan]]^^2^^, [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]]^^1^^, [[Jesper Jensen|AUTHOR Jesper Jensen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalborg University, Denmark; ^^2^^Oticon, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2963–2967
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech intelligibility prediction methods are popular tools within the speech processing community for objective evaluation of speech intelligibility of e.g. enhanced speech. The Short-Time Objective Intelligibility (STOI) measure has become highly used due to its simplicity and high prediction accuracy. In this paper we investigate the use of Band Importance Functions (BIFs) in the STOI measure, i.e. of unequally weighting the contribution of speech information from each frequency band. We do so by fitting BIFs to several datasets of measured intelligibility, and cross evaluating the prediction performance. Our findings indicate that it is possible to improve prediction performance in specific situations. However, it has not been possible to find BIFs which systematically improve prediction performance beyond the data used for fitting. In other words, we find no evidence that the performance of the STOI measure can be improved considerably by extending it with a non-uniform BIF.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Constantin Spille|AUTHOR Constantin Spille]], [[Bernd T. Meyer|AUTHOR Bernd T. Meyer]]
</p><p class="cpabstractcardaffiliationlist">Carl von Ossietzky Universität Oldenburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2968–2972
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, automatic speech recognition (ASR) systems gradually decreased (and for some tasks closed) the gap between human and automatic speech recognition. However, it is unclear if similar performance implies humans and ASR systems to rely on similar signal cues. In the current study, ASR and HSR are compared using speech material from a matrix sentence test mixed with either a stationary speech-shaped noise (SSN) or amplitude-modulated SSN. Recognition performance of HSR and ASR is measured in term of the speech recognition threshold (SRT), i.e., the signal-to-noise ratio with 50% recognition rate and by comparing psychometric functions. ASR results are obtained with matched-trained DNN-based systems that use FBank features as input and compared to results obtained from eight normal-hearing listeners and two established models of speech intelligibility. For both maskers, HSR and ASR achieve similar SRTs with an average deviation of only 0.4 dB. A relevance propagation algorithm is applied to identify features relevant for ASR. The analysis shows that relevant features coincide either with spectral peaks of the speech signal or with dips of the noise masker, indicating that similar cues are important in HSR and ASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kosuke Sugai|AUTHOR Kosuke Sugai]]
</p><p class="cpabstractcardaffiliationlist">Kindai University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2973–2977
</span></p></div>
<div class="cpabstractcardabstract"><p>Japanese is one of the typical languages in which vowel quantity plays a key role. In Japanese, a phonological structure called “mora” is a fundamental rhythmic unit, and theoretically, each mora is supposed to have a similar duration (isochronicity). The rhythm of a native language has great importance on spoken language processing, including second language speaking; therefore, in order to get a clear picture of bottom-up speech processing, it is crucial to discern how morae are mentally represented. Various studies have been conducted to understand the nature of speech processing as a cognitive construct; however, most of this research was conducted with the target stimuli embedded in words or carrier sentences to clarify on specifically the relative duration of morae. In this study, two reaction-time experiments were conducted to investigate whether morae are mentally represented and how long the duration is. The isolated vowels /i/, /e/, /a/, /o/, /u/, and syllable /tan/ were chosen as target stimuli, and the first morae were digitally manipulated into 15 durations with 20 ms variations in length, from 150 ms to 330 ms. The results revealed the existence of a durational threshold between one and two morae, ranging around 250 ms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Markus Jochim|AUTHOR Markus Jochim]], [[Felicitas Kleber|AUTHOR Felicitas Kleber]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3018–3022
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this study was to investigate vowel and consonant quantity in Finnish, a typical quantity language, and to set up a reference corpus for a large-scale project studying the diachronic development of quantity contrasts in German varieties. Although German is not considered a quantity language, both tense and lax vowels and voiced and voiceless stops are differentiated by vowel and closure duration, respectively. The role of these cues, however, has undergone different diachronic changes in various German varieties. To understand the conditions for such prosodic changes, the present study investigates the stability of quantity relations in an undisputed quantity language. To this end, recordings of words differing in vowel and stop length were obtained from seven older and six younger L1 Finnish speakers, both in a normal and a loud voice. We then measured vowel and stop duration and calculated the vowel to vowel-plus-consonant ratio (a measure known to differentiate German VC sequences) as well as the geminate-to-singleton ratio. Results show stability across age groups but variability across speech styles. Moreover, VC ratios were similar for Finnish and Bavarian German speakers. We discuss our findings against the background of a typology of vowel and consonant quantity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bhanu Teja Nellore|AUTHOR Bhanu Teja Nellore]], [[RaviShankar Prasad|AUTHOR RaviShankar Prasad]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3023–3027
</span></p></div>
<div class="cpabstractcardabstract"><p>Bursts are produced by closing the oral tract at a place of articulation and suddenly releasing the acoustic energy built-up behind the closure in the tract. The release of energy is an impulse-like behavior, and it is followed by a short duration of frication. The burst release is short and mostly weak in nature (compared to sonorant sounds), thus making it difficult to detect its presence in continuous speech. This paper attempts to identify burst onsets based on parameters derived from single frequency filtering (SFF) analysis of speech signals. The SFF envelope and phase information give good spectral and temporal resolutions of certain features of the signal. Signal reconstructed from the SFF phase information is shown to be useful in locating burst onsets. Entropy and spectral distance parameters from the SFF spectral envelopes are used to refine the burst onset candidate set. The identified burst onset locations are compared with manual annotations in the TIMIT database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hongwei Ding|AUTHOR Hongwei Ding]]^^1^^, [[Yuanyuan Zhang|AUTHOR Yuanyuan Zhang]]^^1^^, [[Hongchao Liu|AUTHOR Hongchao Liu]]^^2^^, [[Chu-Ren Huang|AUTHOR Chu-Ren Huang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Shanghai Jiao Tong University, China; ^^2^^Hong Kong Polytechnic University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3028–3032
</span></p></div>
<div class="cpabstractcardabstract"><p>Chinese words written partly or fully in roman letters have gained popularity in Mandarin Chinese in the last few decades and an appendix of such Mandarin Alphabetical Words (MAWs) is included in the authoritative dictionary of Standard Mandarin. However, no transcription of MAWs has been provided because it is not clear whether we should keep the original English pronunciation or transcribe MAWs with Mandarin Pinyin system. This study aims to investigate the phonetic adaptation of several most frequent MAWs extracted from the corpus. We recruited eight students from Shanghai, 18 students from Shandong Province, and one student from the USA. All the subjects were asked to read both 24 Chinese sentences embedding the MAWs and all 26 letters of the English alphabet. The results showed that Letters A O N T were predominantly pronounced in Tone 1; H was often produced with vowel epenthesis after the final consonant; and B was usually produced in Tone 2 by Shanghai speakers and in Tone 4 by Shandong speakers. We conclude that the phonetic adaptation of MAWs is influenced by the dialects of the speakers, tones of other Chinese characters in the MAWs, as well as individual preferences.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thomas Schatz|AUTHOR Thomas Schatz]]^^1^^, [[Rory Turnbull|AUTHOR Rory Turnbull]]^^1^^, [[Francis Bach|AUTHOR Francis Bach]]^^2^^, [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LSCP (UMR 8554), France; ^^2^^DI ENS (UMR 8548), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3033–3037
</span></p></div>
<div class="cpabstractcardabstract"><p>Acoustic realizations of a given phonetic segment are typically affected by coarticulation with the preceding and following phonetic context. While coarticulation has been extensively studied using descriptive phonetic measurements, little is known about the functional impact of coarticulation for speech processing. Here, we use DTW-based similarity defined on raw acoustic features and ABX scores to derive a measure of the effect of coarticulation on phonetic discriminability. This measure does not rely on defining segment-specific phonetic cues (formants, duration, etc.) and can be applied systematically and automatically to any segment in large scale corpora. We illustrate our method using stimuli in English and Japanese. We confirm some expected trends, i.e., stronger anticipatory than perseveratory coarticulation and stronger coarticulation for lax/short vowels than for tense/long vowels. We then quantify for the first time the impact of coarticulation across different segment types (like vowels and consonants). We discuss how our metric and its possible extensions can help addressing current challenges in the systematic study of coarticulation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jia Ying|AUTHOR Jia Ying]]^^1^^, [[Christopher Carignan|AUTHOR Christopher Carignan]]^^1^^, [[Jason A. Shaw|AUTHOR Jason A. Shaw]]^^2^^, [[Michael Proctor|AUTHOR Michael Proctor]]^^3^^, [[Donald Derrick|AUTHOR Donald Derrick]]^^4^^, [[Catherine T. Best|AUTHOR Catherine T. Best]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Western Sydney University, Australia; ^^2^^Yale University, USA; ^^3^^University of Canterbury, New Zealand; ^^4^^Macquarie University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2978–2982
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated the dynamics of lateral channel formation of /l/ in Australian-accented English (AusE) using 3D electromagnetic articulography (EMA). Coils were placed on the tongue both mid-sagitally and para-sagitally. We varied the vowel preceding /l/ between /ɪ/ and /æ/, e.g., filbert vs. talbot, and the syllable position of /l/, e.g., /’tæl.bət/ vs. /’tæb.lət/. The articulatory analyses of lateral /l/ show that: (1) the mid-sagittal delay (from the tongue tip gesture to the tongue middle/tongue back gesture) changes across different syllable positions and vowel contexts; (2) the para-sagittal lateralization duration remains the same across syllable positions and vowel contexts; (3) the lateral formation reaches its peak earlier than the mid-sagittal gesture peak; (4) the magnitude of tongue asymmetrical lateralization is greater than the magnitude of tongue curvature in the coronal plane. We discuss these results in light of the temporal dynamics of lateral channel formation. We interpret our results as evidence that the formation of the lateral channel is the primary goal of /l/ production.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nicola Klingler|AUTHOR Nicola Klingler]]^^1^^, [[Sylvia Moosmüller|AUTHOR Sylvia Moosmüller]]^^1^^, [[Hannes Scheutz|AUTHOR Hannes Scheutz]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ÖAW, Austria; ^^2^^Universität Salzburg, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2983–2987
</span></p></div>
<div class="cpabstractcardabstract"><p>In 1913, Anton Pfalz described a specific relation of vowel and consonant sequences for East Middle Bavarian dialects, located in the eastern parts of Austria. According to his observations, a long vowel is always followed by a lenis consonant, and a short vowel is always followed by a fortis consonant. Consequently, vowel duration depends on the quality of the following consonant. Phonetic examinations of what became to be known as the Pfalz’s Law yielded different results. Specifically, the occurrence of a third category, namely a long vowel followed by a fortis consonant, seems to be firmly embedded in East Middle Bavarian.
Up till now, phonetic examinations concentrated on CVCV sequences. The analysis of monosyllables and of sequences including consonant clusters has been largely neglected so far. In the present contribution, we analyse the impact of initial and final consonant clusters in monosyllables on the assumed relationship of vowel + consonant sequences. Thus, we included 18 speakers from three Bavarian varieties. The results show that in all examined varieties long vowel + fortis consonants occur and that the cluster complexity has no influence on the absolute vowel duration, contradicting Pfalz’s Law.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amel Issa|AUTHOR Amel Issa]]
</p><p class="cpabstractcardaffiliationlist">University of Leeds, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2988–2992
</span></p></div>
<div class="cpabstractcardabstract"><p>This study examines the acoustic correlates of the singleton and geminate consonants in Tripolitanian Libyan Arabic (TLA). Several measurements were obtained including target segment duration, preceding vowel duration, RMS amplitude for the singleton and geminate consonants, and F1, F2 and F3 for the target consonants. The results confirm that the primary acoustic correlate that distinguishes singletons from geminates in TLA is duration regardless of sound type with the ratio of C to CC being 1 to 2.42. The duration of the preceding vowels is suggestive and may be considered as another cue to the distinction between them. There was no evidence of differences in RMS amplitude between singleton and geminate consonants of any type. F1, F2 and F3 frequencies are found to show similar patterns for singleton and geminate consonants for all sound types, suggesting no gestural effects of gemination in TLA. Preliminary results from the phonetic cues investigated here suggest that the acoustic distinction between singleton and geminate consonants in TLA is dependent mainly on durational correlates.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Erika Brandt|AUTHOR Erika Brandt]], [[Frank Zimmerer|AUTHOR Frank Zimmerer]], [[Bistra Andreeva|AUTHOR Bistra Andreeva]], [[Bernd Möbius|AUTHOR Bernd Möbius]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2993–2997
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated whether German vowels differ significantly from each other in mel-cepstral distortion (MCD) when they stand in different information density (ID) contexts. We hypothesized that vowels in the same ID contexts are more similar to each other than vowels that stand in different ID conditions. Read speech material from PhonDat2 of 16 German natives (m = 10, f = 6) was analyzed. Bi-phone and word language models were calculated based on DeWaC. To account for additional variability in the data, prosodic factors, as well as corpus-specific frequency values were also entered into the statistical models. Results showed that vowels in different ID conditions were significantly different in their MCD values. Unigram word probability and corpus-specific word frequency showed the expected effect on vowel similarity with a hierarchy between non-contrasting and contrasting conditions. However, these did not form a homogeneous group since there were group-internal significant differences. The largest distance can be found between vowels produced at fast speech rate, and between unstressed vowels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomáš Bořil|AUTHOR Tomáš Bořil]], [[Pavel Šturm|AUTHOR Pavel Šturm]], [[Radek Skarnitzl|AUTHOR Radek Skarnitzl]], [[Jan Volín|AUTHOR Jan Volín]]
</p><p class="cpabstractcardaffiliationlist">Charles University, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2998–3002
</span></p></div>
<div class="cpabstractcardabstract"><p>Unit selection systems of speech synthesis offer good overall quality, but this may be countervailed by a sporadic and unpredictable occurrence of audible artifacts, such as discontinuities in F0 and the spectrum. Informal observations suggested that such breaks may have an effect on perceived vowel duration. This study therefore investigates the effect of F0 and formant discontinuities on the perceived duration of vowels in Czech synthetic speech. Ten manipulations of F0, F1 and F2 were performed on target vowels in short synthesized phrases creating abrupt breaks in the contours at the midpoint of the vowels. Listeners decided in a 2AFC task in which phrase the last syllable was longer. The results showed that despite identical duration of the compared stimuli, vowels which were manipulated in the second part towards centralized values (i.e., less peripheral) were systematically considered to be shorter by the listeners than stimuli without such discontinuities, and vice versa. However, the influence seems to be distinct from an overall formant change (without a discontinuity) since a control stimulus in which the manipulation was performed within the entire vowel was not perceived as significantly shorter or longer. No effect of F0 manipulations was observed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marija Tabain|AUTHOR Marija Tabain]]^^1^^, [[Richard Beare|AUTHOR Richard Beare]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^La Trobe University, Australia; ^^2^^Monash University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3003–3007
</span></p></div>
<div class="cpabstractcardabstract"><p>This study presents ultrasound data from six female speakers of the Central Australian language Arrernte. We focus on the apical stop contrast, alveolar /t/ versus retroflex /ʈ/, which may be considered phonemically marginal. We compare these sounds in stressed and unstressed position. Consistent with previous results on this apical contrast, we show that there are minimal differences between the retroflex and the alveolar at stop offset; however, at stop onset, the retroflex has a higher front portion of the tongue, and often a more forward posterior portion of the tongue. This difference between the alveolar and the retroflex is particularly remarked in unstressed prosodic context. This result confirms our previous EPG and EMA results from two of the speakers in the present study, which showed that the most prototypical retroflex consonant occurs in the unstressed prosodic position.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3008–3012
</span></p></div>
<div class="cpabstractcardabstract"><p>Precise specification of the voice source would facilitate better modelling of expressive nuances in human spoken interaction. This paper focuses on the transformed version of the widely used LF voice source model, and proposes an algorithm which makes it possible to use the waveshape parameter R,,d,, to directly control the LF pulse, for more effective analysis and synthesis of voice modulations. The R,,d,, parameter, capturing much of the natural covariation between glottal parameters, is central to the transformed LF model. It is used to predict the standard R-parameters, which in turn are used to synthesise the LF waveform. However, the LF pulse that results from these predictions may have an R,,d,, value noticeably different from the specified R,,d,,, yielding undesirable artefacts, particularly when the model is used for detailed analysis and synthesis of non-modal voice. A further limitation is that only a subset of possible R,,d,, values can be used, to avoid conflicting LF parameter settings. To eliminate these problems, a new iterative algorithm was developed based on the Newton-Raphson method for two variables, but modified to include constraints. This ensures that the correct R,,d,, is always obtained and that the algorithm converges for effectively all permissible R,,d,, values.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Štefan Beňuš|AUTHOR Štefan Beňuš]]^^1^^, [[Juraj Šimko|AUTHOR Juraj Šimko]]^^2^^, [[Mona Lehtinen|AUTHOR Mona Lehtinen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UKF, Slovak Republic; ^^2^^University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3013–3017
</span></p></div>
<div class="cpabstractcardabstract"><p>Human spoken interactions are embodied and situated. Better understanding of the restrictions and affordances this embodiment and situational awareness has on human speech informs the quest for more natural models of human-machine spoken interactions. Here we examine the articulatory realization of communicative meanings expressed through f0 falling and rising prosodic boundaries in quiet and noisy conditions. Our data show that 1) the effect of environmental noise is more robustly present in the post-boundary than the pre-boundary movements, 2) f0 falls and rises are only weakly differentiated in supra-laryngeal articulation and differ minimally in their response to noise, 3) individual speakers find different solutions for achieving the communicative goals, and 4) lip movements are affected by noise and boundary type more than the tongue movements.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kin Wah Edward Lin|AUTHOR Kin Wah Edward Lin]]^^1^^, [[Hans Anderson|AUTHOR Hans Anderson]]^^1^^, [[Clifford So|AUTHOR Clifford So]]^^2^^, [[Simon Lui|AUTHOR Simon Lui]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SUTD, Singapore; ^^2^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3038–3042
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a simple heuristic-based Sinusoidal Partial Tracking (PT) algorithm for singing analysis. Our PT algorithm uses a heuristic of minimal frequency and magnitude difference to track sinusoidal partials in the popular music. An Ideal Binary Mask (IBM), which is created from the ground truth of the singing voice and the music accompaniment, is used to identify the sound source of the partials. In this justifiable way, we are able to assess the quality of the partials identified from the PT algorithm. Using the iKala dataset along with the IBM and BSS Eval 3.0 as a new method of quantifying the partials quality, the comparative results show that our PT algorithm can achieve 0.8746 ~ 1.7029 dB GNSDR gain, compared to two common benchmarks, namely the MQ algorithm and the SMS-PT algorithm. Thus, our PT algorithm can be considered as a new benchmark of the PT algorithm used in singing analysis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yong Xu|AUTHOR Yong Xu]], [[Qiuqiang Kong|AUTHOR Qiuqiang Kong]], [[Qiang Huang|AUTHOR Qiang Huang]], [[Wenwu Wang|AUTHOR Wenwu Wang]], [[Mark D. Plumbley|AUTHOR Mark D. Plumbley]]
</p><p class="cpabstractcardaffiliationlist">University of Surrey, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3083–3087
</span></p></div>
<div class="cpabstractcardabstract"><p>Audio tagging aims to perform multi-label classification on audio chunks and it is a newly proposed task in the Detection and Classification of Acoustic Scenes and Events 2016 (DCASE 2016) challenge. This task encourages research efforts to better analyze and understand the content of the huge amounts of audio data on the web. The difficulty in audio tagging is that it only has a chunk-level label without a frame-level label. This paper presents a weakly supervised method to not only predict the tags but also indicate the temporal locations of the occurred acoustic events. The attention scheme is found to be effective in identifying the important frames while ignoring the unrelated frames. The proposed framework is a deep convolutional recurrent model with two auxiliary modules: an attention module and a localization module. The proposed algorithm was evaluated on the Task 4 of DCASE 2016 challenge. State-of-the-art performance was achieved on the evaluation set with equal error rate (EER) reduced from 0.13 to 0.11, compared with the convolutional recurrent baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jing Pan|AUTHOR Jing Pan]]^^1^^, [[Ming Li|AUTHOR Ming Li]]^^1^^, [[Zhanmei Song|AUTHOR Zhanmei Song]]^^2^^, [[Xin Li|AUTHOR Xin Li]]^^2^^, [[Xiaolin Liu|AUTHOR Xiaolin Liu]]^^2^^, [[Hua Yi|AUTHOR Hua Yi]]^^2^^, [[Manman Zhu|AUTHOR Manman Zhu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sun Yat-sen University, China; ^^2^^Shandong Yingcai University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3088–3092
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an annotated piano performance evaluation dataset with 185 audio pieces and a method to evaluate the performance of piano beginners based on their audio recordings. The proposed framework includes three parts: piano key posterior probability extraction, Dynamic Time Warping (DTW) based matching and performance score regression. First, a deep neural network model is trained to extract 88 dimensional piano key features from Constant-Q Transform (CQT) spectrum. The proposed acoustic model shows high robustness to the recording environments. Second, we employ the DTW algorithm on the high-level piano key feature sequences to align the input with the template. Upon the alignment, we extract multiple global matching features that could reflect the similarity between the input and the template. Finally, we apply linear regression upon these matching features with the scores annotated by expertise in training data to estimate performance scores for test audio. Experimental results show that our automatic evaluation method achieves 2.64 average absolute score error in score range from 0 to 100, and 0.73 average correlation coefficient on our in-house collected YCU-MPPE-II dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shreyan Chowdhury|AUTHOR Shreyan Chowdhury]], [[Tanaya Guha|AUTHOR Tanaya Guha]], [[Rajesh M. Hegde|AUTHOR Rajesh M. Hegde]]
</p><p class="cpabstractcardaffiliationlist">IIT Kanpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3093–3096
</span></p></div>
<div class="cpabstractcardabstract"><p>Tempo estimation aims at estimating the pace of a musical piece measured in beats per minute. This paper presents a new tempo estimation method that utilizes coherent energy changes across multiple frequency sub-bands to identify the onsets. A new measure, called the sub-band synchrony, is proposed to detect and quantify the coherent amplitude changes across multiple sub-bands. Given a musical piece, our method first detects the onsets using the sub-band synchrony measure. The periodicity of the resulting onset curve, measured using the autocorrelation function, is used to estimate the tempo value. The performance of the sub-band synchrony based tempo estimation method is evaluated on two music databases. Experimental results indicate a reasonable improvement in performance when compared to conventional methods of tempo estimation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yun Wang|AUTHOR Yun Wang]], [[Florian Metze|AUTHOR Florian Metze]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3097–3101
</span></p></div>
<div class="cpabstractcardabstract"><p>Sound event detection is the task of detecting the type, onset time, and offset time of sound events in audio streams. The mainstream solution is recurrent neural networks (RNNs), which usually predict the probability of each sound event at every time step. Connectionist temporal classification (CTC) has been applied in order to relax the need for exact annotations of onset and offset times; the CTC output layer is expected to generate a peak for each event boundary where the acoustic signal is most salient. However, with limited training data, the CTC network has been found to train slowly, and generalize poorly to new data.
In this paper, we try to introduce knowledge learned from a much larger corpus into the CTC network. We train two variants of SoundNet, a deep convolutional network that takes the audio tracks of videos as input, and tries to approximate the visual information extracted by an image recognition network. A lower part of SoundNet or its variants is then used as a feature extractor for the CTC network to perform sound event detection. We show that the new feature extractor greatly accelerates the convergence of the CTC network, and slightly improves the generalization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naziba Mostafa|AUTHOR Naziba Mostafa]], [[Pascale Fung|AUTHOR Pascale Fung]]
</p><p class="cpabstractcardaffiliationlist">HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3102–3106
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a note-based query by humming (QBH) system with Hidden Markov Model (HMM) and Convolutional Neural Network (CNN) since note-based systems are much more efficient than the traditional frame-based systems. A note-based QBH system has two main components: humming transcription and candidate melody retrieval.
For humming transcription, we are the first to use a hybrid model using HMM and CNN. We use CNN for its ability to learn the features directly from raw audio data and for being able to model the locality and variability often present in a note and we use HMM for handling the variability across the time-axis.
For candidate melody retrieval, we use locality sensitive hashing to narrow down the candidates for retrieval and dynamic time warping and earth mover’s distance for the final ranking of the selected candidates.
We show that our HMM-CNN humming transcription system outperforms other state of the art humming transcription systems by ~2% using the transcription evaluation framework by Molina et. al and our overall query by humming system has a Mean Reciprocal Rank of 0.92 using the standard MIREX dataset, which is higher than other state of the art note-based query by humming systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hardik B. Sailor|AUTHOR Hardik B. Sailor]], [[Dharmesh M. Agrawal|AUTHOR Dharmesh M. Agrawal]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]
</p><p class="cpabstractcardaffiliationlist">DA-IICT, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3107–3111
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose to use Convolutional Restricted Boltzmann Machine (ConvRBM) to learn filterbank from the raw audio signals. ConvRBM is a generative model trained in an unsupervised way to model the audio signals of arbitrary lengths. ConvRBM is trained using annealed dropout technique and parameters are optimized using Adam optimization. The subband filters of ConvRBM learned from the ESC-50 database resemble Fourier basis in the mid-frequency range while some of the low-frequency subband filters resemble Gammatone basis. The auditory-like filterbank scale is nonlinear w.r.t. the center frequencies of the subband filters and follows the standard auditory scales. We have used our proposed model as a front-end for the Environmental Sound Classification (ESC) task with supervised Convolutional Neural Network (CNN) as a back-end. Using CNN classifier, the ConvRBM filterbank (ConvRBM-BANK) and its score-level fusion with the Mel filterbank energies (FBEs) gave an absolute improvement of 10.65%, and 18.70% in the classification accuracy, respectively, over FBEs alone on the ESC-50 database. This shows that the proposed ConvRBM filterbank also contains highly complementary information over the Mel filterbank, which is helpful in the ESC task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Meet H. Soni|AUTHOR Meet H. Soni]], [[Rishabh Tak|AUTHOR Rishabh Tak]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]
</p><p class="cpabstractcardaffiliationlist">DA-IICT, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3112–3116
</span></p></div>
<div class="cpabstractcardabstract"><p>Retrieval of the phase of a signal is one of the major problems in signal processing. For an exact signal reconstruction, both magnitude, and phase spectrum of the signal is required. In many speech-based applications, only the magnitude spectrum is processed and the phase is ignored, which leads to degradation in the performance. Here, we propose a novel technique that enables the reconstruction of the speech signal from magnitude spectrum only. We consider the even-odd part decomposition of a causal sequence and process only on the real part of the DTFT of the signal. We propose the shifting of the real part of DTFT of the sequence to make it non-negative. By adding a constant of sufficient value to the real part of the DTFT, the exact signal reconstruction is possible from the magnitude or power spectrum alone. Moreover, we have compared our proposed approach with recently proposed phase retrieval method from magnitude spectrum of the Causal Delta Dominant (CDD) signal. We found that the method of phase retrieval from CDD signal and proposed method are identical under certain approximation. However, proposed method involves the less computational cost for the exact processing of the signal.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huy Phan|AUTHOR Huy Phan]], [[Philipp Koch|AUTHOR Philipp Koch]], [[Fabrice Katzberg|AUTHOR Fabrice Katzberg]], [[Marco Maass|AUTHOR Marco Maass]], [[Radoslaw Mazur|AUTHOR Radoslaw Mazur]], [[Alfred Mertins|AUTHOR Alfred Mertins]]
</p><p class="cpabstractcardaffiliationlist">Universität zu Lübeck, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3043–3047
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce in this work an efficient approach for audio scene classification using deep recurrent neural networks. An audio scene is firstly transformed into a sequence of high-level label tree embedding feature vectors. The vector sequence is then divided into multiple subsequences on which a deep GRU-based recurrent neural network is trained for sequence-to-label classification. The global predicted label for the entire sequence is finally obtained via aggregation of subsequence classification outputs. We will show that our approach obtains an F1-score of 97.7% on the LITIS Rouen dataset, which is the largest dataset publicly available for the task. Compared to the best previously reported result on the dataset, our approach is able to reduce the relative classification error by 35.3%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Maria Sandsten|AUTHOR Maria Sandsten]], [[Isabella Reinhold|AUTHOR Isabella Reinhold]], [[Josefin Starkhammar|AUTHOR Josefin Starkhammar]]
</p><p class="cpabstractcardaffiliationlist">Lund University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3048–3052
</span></p></div>
<div class="cpabstractcardabstract"><p>High-resolution time-frequency (TF) images of multi-component signals are of great interest for visualization, feature extraction and estimation. The matched Gaussian multitaper spectrogram has been proposed to optimally resolve multi-component transient functions of Gaussian shape. Hermite functions are used as multitapers and the weights of the different spectrogram functions are optimized. For a fixed number of multitapers, the optimization gives the approximate Wigner distribution of the Gaussian shaped function. Increasing the number of multitapers gives a better approximation, i.e. a better resolution, but the cross-terms also become more prominent for close TF components. In this submission, we evaluate a number of different concentration measures to automatically estimate the number of multitapers resulting in the optimal spectrogram for TF images of dolphin echolocation signals. The measures are evaluated for different multi-component signals and noise levels and a suggestion of an automatic procedure for optimal TF analysis is given. The results are compared to other well known TF estimation algorithms and examples of real data measurements of echolocation signals from a beluga whale ( Delphinapterus leucas) are presented.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3053–3057
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper a classification-based method for the automatic detection of glottal closure instants (GCIs) from the speech signal is proposed. Peaks in the speech waveforms are taken as candidates for GCI placements. A classification framework is used to train a classification model and to classify whether or not a peak corresponds to the GCI. We show that the detection accuracy in terms of F1 score is 97.27%. In addition, despite using the speech signal only, the proposed method behaves comparably to a method utilizing the glottal signal. The method is also compared with three existing GCI detection algorithms on publicly available databases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoke Qi|AUTHOR Xiaoke Qi]], [[Jianhua Tao|AUTHOR Jianhua Tao]]
</p><p class="cpabstractcardaffiliationlist">Chinese Academy of Sciences, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3058–3062
</span></p></div>
<div class="cpabstractcardabstract"><p>Many methods have been proposed for modeling head-related transfer functions (HRTFs) and yield a good performance level in terms of log-spectral distortion (LSD). However, most of them utilize linear weighting to reconstruct or interpolate HRTFs, but not consider the inherent nonlinearity relationship between the basis function and HRTFs. Motivated by this, a domain knowledge-assisted nonlinear modeling method is proposed based on bottleneck features. Domain knowledge is used in two aspects. One is to generate the input features derived from the solution to sound wave propagation equation at the physical level, and the other is to design the loss function for model training based on the knowledge of objective evaluation criterion, i.e., LSD. Furthermore, with utilizing the strong representation ability of the bottleneck features, the nonlinear model has the potential to achieve a more accurate mapping. The objective and subjective experimental results show that the proposed method gains less LSD when compared with linear model, and the interpolated HRTFs can generate a similar perception to those of the database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Luis M.T. Jesus|AUTHOR Luis M.T. Jesus]], [[Bruno Rocha|AUTHOR Bruno Rocha]], [[Andreia Hall|AUTHOR Andreia Hall]]
</p><p class="cpabstractcardaffiliationlist">Universidade de Aveiro, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3063–3067
</span></p></div>
<div class="cpabstractcardabstract"><p>Music teacher’s reports suggest that the respiratory function and laryngeal control in wind instruments, stimulate muscular tension of the involved anatomical structure. However, the physiology and acoustics of the larynx during trumpet playing has seldom been studied. Therefore, the current paper describes the laryngeal articulation during trumpet performance with biomedical signals and auditory perception. The activation of laryngeal musculature of six professional trumpeters when playing a standard musical passage was analysed using audio, electroglottography (EGG), oxygen saturation and heart rate signals. Two University trumpet teachers listened to the audio recordings, to evaluate the participants’ laryngeal effort (answers on a 100 mm Visual-Analogue-Scale (VAS): 0 “no perceived effort”; 100 “extreme effort”). Correlations between parameters extracted from the EGG data and the perception of the audio stimuli by the teachers were explored. Two hundred and fifty laryngeal articulations, where raising of the larynx and muscular effort were observed, were annotated and analysed. No correlation between the EGG data and the auditory evaluation was observed. However, both teachers perceived the laryngeal effort (VAS mean scores = 61±14). Our findings show that EGG and auditory perception data can provide new insights into laryngeal articulation and breathing control that are key to low muscular tension.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jian Guan|AUTHOR Jian Guan]]^^1^^, [[Xuan Wang|AUTHOR Xuan Wang]]^^1^^, [[Pengming Feng|AUTHOR Pengming Feng]]^^2^^, [[Jing Dong|AUTHOR Jing Dong]]^^3^^, [[Wenwu Wang|AUTHOR Wenwu Wang]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harbin Institute of Technology, China; ^^2^^Newcastle University, UK; ^^3^^Nanjing Tech University, China; ^^4^^University of Surrey, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3068–3072
</span></p></div>
<div class="cpabstractcardabstract"><p>We study the problem of dictionary learning for signals that can be represented as polynomials or polynomial matrices, such as convolutive signals with time delays or acoustic impulse responses. Recently, we developed a method for polynomial dictionary learning based on the fact that a polynomial matrix can be expressed as a polynomial with matrix coefficients, where the coefficient of the polynomial at each time lag is a scalar matrix. However, a polynomial matrix can be also equally represented as a matrix with polynomial elements. In this paper, we develop an alternative method for learning a polynomial dictionary and a sparse representation method for polynomial signal reconstruction based on this model. The proposed methods can be used directly to operate on the polynomial matrix without having to access its coefficients matrices. We demonstrate the performance of the proposed method for acoustic impulse response modeling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rakib Hyder|AUTHOR Rakib Hyder]]^^1^^, [[Shabnam Ghaffarzadegan|AUTHOR Shabnam Ghaffarzadegan]]^^2^^, [[Zhe Feng|AUTHOR Zhe Feng]]^^2^^, [[John H.L. Hansen|AUTHOR John H.L. Hansen]]^^3^^, [[Taufiq Hasan|AUTHOR Taufiq Hasan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BUET, Bangladesh; ^^2^^Robert Bosch, USA; ^^3^^University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3073–3077
</span></p></div>
<div class="cpabstractcardabstract"><p>Enabling smart devices to infer about the environment using audio signals has been one of the several long-standing challenges in machine listening. The availability of public-domain datasets, e.g., Detection and Classification of Acoustic Scenes and Events (DCASE) 2016, enabled researchers to compare various algorithms on standard predefined tasks. Most of the current best performing individual acoustic scene classification systems utilize different spectrogram image based features with a Convolutional Neural Network (CNN) architecture. In this study, we first analyze the performance of a state-of-the-art CNN system for different auditory image and spectrogram features, including Mel-scaled, logarithmically scaled, linearly scaled filterbank spectrograms, and Stabilized Auditory Image (SAI) features. Next, we benchmark an MFCC based Gaussian Mixture Model (GMM) SuperVector (SV) system for acoustic scene classification. Finally, we utilize the activations from the final layer of the CNN to form a SuperVector (SV) and use them as feature vectors for a Probabilistic Linear Discriminative Analysis (PLDA) classifier. Experimental evaluation on the DCASE 2016 database demonstrates the effectiveness of the proposed CNN-SV approach compared to conventional CNNs with a fully connected softmax output layer. Score fusion of individual systems provides up to 7% relative improvement in overall accuracy compared to the CNN baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xue Feng|AUTHOR Xue Feng]]^^1^^, [[Brigitte Richardson|AUTHOR Brigitte Richardson]]^^2^^, [[Scott Amman|AUTHOR Scott Amman]]^^2^^, [[James Glass|AUTHOR James Glass]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT, USA; ^^2^^Ford, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3078–3082
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we investigate environment feature representations, which we refer to as e-vectors, that can be used for environment adaption in automatic speech recognition (ASR), and for environment identification. Inspired by the fact that i-vectors in the total variability space capture both speaker and channel environment variability, our proposed e-vectors are extracted from i-vectors. Two extraction methods are proposed: one is via linear discriminant analysis (LDA) projection, and the other via a bottleneck deep neural network (BN-DNN). Our evaluations show that by augmenting DNN-HMM ASR systems with the proposed e-vectors for environment adaptation, ASR performance is significantly improved. We also demonstrate that the proposed e-vector yields promising results on environment identification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jochen Weiner|AUTHOR Jochen Weiner]], [[Mathis Engelbart|AUTHOR Mathis Engelbart]], [[Tanja Schultz|AUTHOR Tanja Schultz]]
</p><p class="cpabstractcardaffiliationlist">Universität Bremen, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3117–3121
</span></p></div>
<div class="cpabstractcardabstract"><p>As the population in developed countries is aging, larger numbers of people are at risk of developing dementia. In the near future there will be a need for time- and cost-efficient screening methods. Speech can be recorded and analyzed in this manner, and as speech and language are affected early on in the course of dementia, automatic speech processing can provide valuable support for such screening methods.
We present two pipelines of feature extraction for dementia detection: the manual pipeline uses manual transcriptions while the fully automatic pipeline uses transcriptions created by automatic speech recognition (ASR). The acoustic and linguistic features that we extract need no language specific tools other than the ASR system. Using these two different feature extraction pipelines we automatically detect dementia. Our results show that the ASR system’s transcription quality is a good single feature and that the features extracted from automatic transcriptions perform similar or slightly better than the features extracted from the manual transcriptions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sebastian Wankerl|AUTHOR Sebastian Wankerl]], [[Elmar Nöth|AUTHOR Elmar Nöth]], [[Stefan Evert|AUTHOR Stefan Evert]]
</p><p class="cpabstractcardaffiliationlist">FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3162–3166
</span></p></div>
<div class="cpabstractcardabstract"><p>Alzheimer’s disease (AD) is the most common cause of dementia and affects wide parts of the elderly population. Since there exists no cure for this illness, it is of particular interest to develop reliable and easy-to-use diagnostic methods to alleviate its effects. Speech can be a useful indicator to reach this goal. We propose a purely statistical approach towards the automatic diagnosis of AD which is solely based on n-gram models with subsequent evaluation of the perplexity and does not incorporate any further linguistic features. Hence, it works independently of a concrete language. We evaluate our approach on the DementiaBank which contains spontaneous speech of test subjects describing a picture. Using the Equal-Error-Rate as classification threshold, we achieve an accuracy of 77.1%. In addition to that, we studied the correlation between the calculated perplexities and the Mini-Mental State Examination (MMSE) scores of the test subjects. While there is little correlation for the healthy control group, a higher correlation could be found when considering the demented speakers. This makes it reasonable to conclude that our approach reveals some of the cognitive limitations of AD patients and can help to better diagnose the disease based on speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karel Mundnich|AUTHOR Karel Mundnich]], [[Md. Nasir|AUTHOR Md. Nasir]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3167–3171
</span></p></div>
<div class="cpabstractcardabstract"><p>Behavioral and mental health research and its clinical applications widely rely on quantifying human behavioral expressions. This often requires human-derived behavioral annotations, which tend to be noisy, especially when the psychological objects of interest are latent and subjective in nature. This paper focuses on exploiting multiple human annotations toward improving reliability of the ensemble decision, by creating a ranking of the evaluated objects. To create this ranking, we employ an adapted version of Copeland’s counting method, which results in robust inter-annotator rankings and agreement. We use a simple mapping between the ranked objects and the scale of evaluation, which preserves the original distribution of ratings, based on maximum likelihood estimation. We apply the algorithm to ratings that lack a ground truth. Therefore, we assess our algorithm in two ways: (1) by corrupting the annotations with different distributions of noise, and computing the inter-annotator agreement between the ensemble estimates derived from the original and corrupted data using Krippendorff’s α; and (2) by replacing one annotator at a time with the ensemble estimate. Our results suggest that the proposed method provides a robust alternative that suffers less from individual annotator preferences/biases and scale misuse.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Massimo Pettorino|AUTHOR Massimo Pettorino]]^^1^^, [[Wentao Gu|AUTHOR Wentao Gu]]^^2^^, [[Paweł Półrola|AUTHOR Paweł Półrola]]^^3^^, [[Ping Fan|AUTHOR Ping Fan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Università di Napoli “L’Orientale”, Italy; ^^2^^Nanjing Normal University, China; ^^3^^UJK, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3172–3176
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous studies on Italian speech showed that the percentage of vocalic portion in the utterance (%V) and the duration of the interval between two consecutive vowel onset points (VtoV) were larger for parkinsonian (PD) than for healthy controls (HC). Especially, the values of %V were distinctly separated between PD and HC. The present study aimed to further test the finding on Mandarin and Polish. Twenty-five Mandarin speakers (13 PD and 12 HC matched on age) and thirty-one Polish speakers (18 PD and 13 HC matched on age) read aloud a passage of story. The recorded speeches were segmented into vocalic and consonantal intervals, and then %V and VtoV were calculated. For both languages, VtoV overlapped between HC and PD. For Polish, %V was distinctly higher in PD than in HC, while for Mandarin there was no significant difference. It suggests that %V could be used for automatic diagnosis of PD for Italian and Polish, but not for Mandarin. The effectiveness of the rhythmic metric appears to be language-dependent, varying with the rhythmic typology of the language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rahul Gupta|AUTHOR Rahul Gupta]]^^1^^, [[Saurabh Sahu|AUTHOR Saurabh Sahu]]^^2^^, [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]^^2^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon.com, USA; ^^2^^University of Maryland, USA; ^^3^^University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3122–3126
</span></p></div>
<div class="cpabstractcardabstract"><p>Humans use emotional expressions to communicate their internal affective states. These behavioral expressions are often multi-modal (e.g. facial expression, voice and gestures) and researchers have proposed several schemes to predict the latent affective states based on these expressions. The relationship between the latent affective states and their expression is hypothesized to be affected by several factors; depression disorder being one of them. Despite a wide interest in affect prediction, and several studies linking the effect of depression on affective expressions, only a limited number of affect prediction models account for the depression severity. In this work, we present a novel scheme that incorporates depression severity as a parameter in Deep Neural Networks (DNNs). In order to predict affective dimensions for an individual at hand, our scheme alters the DNN activation function based on the subject’s depression severity. We perform experiments on affect prediction in two different sessions of the Audio-Visual Depressive language Corpus, which involves patients with varying degree of depression. Our results show improvements in arousal and valence prediction on both the sessions using the proposed DNN modeling. We also present analysis of the impact of such an alteration in DNNs during training and testing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Stephanie Gillespie|AUTHOR Stephanie Gillespie]]^^1^^, [[Yash-Yee Logan|AUTHOR Yash-Yee Logan]]^^1^^, [[Elliot Moore|AUTHOR Elliot Moore]]^^1^^, [[Jacqueline Laures-Gore|AUTHOR Jacqueline Laures-Gore]]^^2^^, [[Scott Russell|AUTHOR Scott Russell]]^^3^^, [[Rupal Patel|AUTHOR Rupal Patel]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Georgia Institute of Technology, USA; ^^2^^Georgia State University, USA; ^^3^^Grady Memorial Hospital, USA; ^^4^^Northeastern University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3127–3131
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysarthria is a motor speech disorder that impacts verbal articulation and co-ordination, resulting in slow, slurred and imprecise speech. Automated classification of dysarthria subtypes and severities could provide a useful clinical tool in assessing the onset and progress in treatment. This study represents a pilot project to train models to detect the presence of dysarthria in continuous speech. Subsets of the Universal Access Research Dataset (UA-Speech) and the Atlanta Motor Speech Disorders Corpus (AMSDC) database were utilized in a cross-database training strategy (training on UA-Speech / testing on AMSDC) to distinguish speech with and without dysarthria. In addition to traditional spectral and prosodic features, the current study also includes features based on the Teager Energy Operator (TEO) and the glottal waveform. Baseline results on the UA-Speech dataset maximize word- and participant-level accuracies at 75.3% and 92.9% using prosodic features. However, the cross-training of UA-Speech tested on the AMSDC maximize word- and participant-level accuracies at 71.3% and 90% based on a TEO feature. The results of this pilot study reinforce consideration of dysarthria subtypes in cross-dataset training as well as highlight additional features that may be sensitive to the presence of dysarthria in continuous speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[M. Novotný|AUTHOR M. Novotný]]^^1^^, [[Jan Rusz|AUTHOR Jan Rusz]]^^1^^, [[K. Spálenka|AUTHOR K. Spálenka]]^^1^^, [[Jiří Klempíř|AUTHOR Jiří Klempíř]]^^2^^, [[D. Horáková|AUTHOR D. Horáková]]^^2^^, [[Evžen Růžička|AUTHOR Evžen Růžička]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CTU, Czech Republic; ^^2^^Charles University, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3132–3136
</span></p></div>
<div class="cpabstractcardabstract"><p>Although previous studies have reported the occurrence of velopharyngeal incompetence connected with ataxic dysarthria, there is a lack of evidence related to nasality assessment in cerebellar disorders. This is partly due to the limited reliability of challenging analyses and partly due to nasality being a less pronounced manifestation of ataxic dysarthria. Therefore, we employed 1/3-octave spectra analysis as an objective measurement of nasality disturbances. We analyzed 20 subjects with multiple system atrophy (MSA), 13 subjects with cerebellar ataxia (CA), 20 subjects with multiple sclerosis (MS) and 20 healthy (HC) speakers. Although we did not detect the presence of hypernasality, our results showed increased nasality fluctuation in 65% of MSA, 43% of CA and 30% of MS subjects compared to 15% of HC speakers, suggesting inconsistent velopharyngeal motor control. Furthermore, we found a statistically significant difference between MSA and HC participants (p<0.001), and significant correlation between the natural history cerebellar subscore and neuroprotection in Parkinson plus syndromes — Parkinson plus scale and nasality fluctuations in MSA (r=0.51, p<0.05). In conclusion, acoustic analysis showed an increased presence of abnormal nasality fluctuations in all ataxic groups and revealed that nasality fluctuation is associated with distortion of cerebellar functions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simone Hantke|AUTHOR Simone Hantke]], [[Hesam Sagha|AUTHOR Hesam Sagha]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3137–3141
</span></p></div>
<div class="cpabstractcardabstract"><p>The automatic recognition of emotion from speech is a mature research field with a large number of publicly available corpora. However, to the best of the authors knowledge, none of these datasets consist solely of emotional speech samples from individuals with mental, neurological and/or physical disabilities. Yet, such individuals could benefit from speech-based assistive technologies to enhance their communication with their environment and to manage their daily work process. With the aim of advancing these technologies, we fill this void in emotional speech resources by introducing the EmotAsS (Emotional Sensitivity Assistance System for People with Disabilities) corpus consisting of spontaneous emotional German speech data recorded from 17 mentally, neurologically and/or physically disabled participants in their daily work environment, resulting in just under 11 hours of total speech time and featuring approximately 12.7 k utterances after segmentation. Transcription was performed and labelling was carried out in seven emotional categories, as well as for the intelligibility of the speaker. We present a set of baseline results, based on using standard acoustic and linguistic features, for arousal and valence emotion recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carla Agurto|AUTHOR Carla Agurto]]^^1^^, [[Raquel Norel|AUTHOR Raquel Norel]]^^1^^, [[Rachel Ostrand|AUTHOR Rachel Ostrand]]^^1^^, [[Gillinder Bedi|AUTHOR Gillinder Bedi]]^^2^^, [[Harriet de Wit|AUTHOR Harriet de Wit]]^^2^^, [[Matthew J. Baggott|AUTHOR Matthew J. Baggott]]^^2^^, [[Matthew G. Kirkpatrick|AUTHOR Matthew G. Kirkpatrick]]^^3^^, [[Margaret Wardle|AUTHOR Margaret Wardle]]^^4^^, [[Guillermo A. Cecchi|AUTHOR Guillermo A. Cecchi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, USA; ^^2^^University of Chicago, USA; ^^3^^University of Southern California, USA; ^^4^^UTHealth, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3142–3146
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech data has the potential to become a powerful tool to provide quantitative information about emotion beyond that achieved by subjective assessments. Based on this concept, we investigate the use of speech to identify effects in subjects under the influence of two different drugs: Oxytocin (OT) and 3,4-methylenedioxymethamphetamine (MDMA), also known as ecstasy. We extract a set of informative phonological features that can characterize emotion. Then, we perform classification to detect if the subject is under the influence of a drug. Our best results show low error rates of 13% and 17% for the subject classification of OT and MDMA vs. placebo, respectively. We also analyze the performance of the features to differentiate the two levels of MDMA doses, obtaining an error rate of 19%. The results indicate that subtle emotional changes can be detected in the context of drug use.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bahman Mirheidari|AUTHOR Bahman Mirheidari]]^^1^^, [[Daniel Blackburn|AUTHOR Daniel Blackburn]]^^1^^, [[Kirsty Harkness|AUTHOR Kirsty Harkness]]^^2^^, [[Traci Walker|AUTHOR Traci Walker]]^^1^^, [[Annalena Venneri|AUTHOR Annalena Venneri]]^^1^^, [[Markus Reuber|AUTHOR Markus Reuber]]^^2^^, [[Heidi Christensen|AUTHOR Heidi Christensen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Sheffield, UK; ^^2^^Royal Hallamshire Hospital, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3147–3151
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents work on developing an automatic dementia screening test based on patients’ ability to interact and communicate — a highly cognitively demanding process where early signs of dementia can often be detected. Such a test would help general practitioners, with no specialist knowledge, make better diagnostic decisions as current tests lack specificity and sensitivity. We investigate the feasibility of basing the test on conversations between a ‘talking head’ (avatar) and a patient and we present a system for analysing such conversations for signs of dementia in the patient’s speech and language. Previously we proposed a semi-automatic system that transcribed conversations between patients and neurologists and extracted conversation analysis style features in order to differentiate between patients with progressive neurodegenerative dementia (ND) and functional memory disorders (FMD). Determining who talks when in the conversations was performed manually. In this study, we investigate a fully automatic system including speaker diarisation, and the use of additional acoustic and lexical features. Initial results from a pilot study are presented which shows that the avatar conversations can successfully classify ND/FMD with around 91% accuracy, which is in line with previous results for conversations that were led by a neurologist.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yue Zhang|AUTHOR Yue Zhang]]^^1^^, [[Felix Weninger|AUTHOR Felix Weninger]]^^2^^, [[Björn Schuller|AUTHOR Björn Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^Nuance Communications, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3152–3156
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we study the drowsy state of a speaker, induced by alcohol intoxication or sleep deprivation. In particular, we investigate the coherence between the two pivotal causes of drowsiness, as featured in the Intoxication and Sleepiness tasks of the INTERSPEECH Speaker State Challenge. In this way, we aim to exploit the interrelations between these different, yet highly correlated speaker states, which need to be reliably recognised in safety and security critical environments. To this end, we perform cross-domain classification of alcohol intoxication and sleepiness, thus leveraging the acoustic similarities of these speech phenomena for transfer learning. Further, we conducted in-depth feature analysis to quantitatively assess the task relatedness and to determine the most relevant features for both tasks. To test our methods in realistic contexts, we use the Alcohol Language Corpus and the Sleepy Language Corpus containing in total 60 hours of genuine intoxicated and sleepy speech. In the result, cross-domain classification combined with feature selection yields up to 60.3% unweighted average recall, which is significantly above-chance (50%) and highly notable given the mismatch in the training and validation data. Finally, we show that an effective, general drowsiness classifier can be obtained by aggregating the training data from both domains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Paula Lopez-Otero|AUTHOR Paula Lopez-Otero]]^^1^^, [[Laura Docio-Fernandez|AUTHOR Laura Docio-Fernandez]]^^1^^, [[Alberto Abad|AUTHOR Alberto Abad]]^^2^^, [[Carmen Garcia-Mateo|AUTHOR Carmen Garcia-Mateo]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade de Vigo, Spain; ^^2^^INESC-ID Lisboa, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3157–3161
</span></p></div>
<div class="cpabstractcardabstract"><p>Depression is a mood disorder that is usually addressed by outpatient treatments in order to favour patient’s inclusion in society. This leads to a need for novel automatic tools exploiting speech processing approaches that can help to monitor the emotional state of patients via telephone or the Internet. However, the transmission, processing and subsequent storage of such sensitive data raises several privacy concerns. Speech de-identification can be used to protect the patients’ identity. Nevertheless, these techniques modify the speech signal, eventually affecting the performance of depression detection approaches based on either speech characteristics or automatic transcriptions. This paper presents a study on the influence of speech de-identification when using transcription-based approaches for depression detection. To this effect, a system based on the global vectors method for natural language processing is proposed. In contrast to previous works, two main sources of nuisance have been considered: the de-identification process itself and the transcription errors introduced by the automatic recognition of the patients’ speech. Experimental validation on the DAIC-WOZ corpus reveals very promising results, obtaining only a slight performance degradation with respect to the use of manual transcriptions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jung-Yueh Tu|AUTHOR Jung-Yueh Tu]]^^1^^, [[Janice Wing-Sze Wong|AUTHOR Janice Wing-Sze Wong]]^^2^^, [[Jih-Ho Cha|AUTHOR Jih-Ho Cha]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Shanghai Jiao Tong University, China; ^^2^^Hong Kong Baptist University, China; ^^3^^National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3177–3180
</span></p></div>
<div class="cpabstractcardabstract"><p>The third tone sandhi in Mandarin is a well-studied rule, where a Tone 3 followed by another Tone 3 is changed as a rising tone, similar to Tone 2. This Tone 3 sandhi rule is straightforward in disyllabic words, which is phonetically driven for the ease of production. In three or more than three syllables with Tone 3, however, the Tone 3 sandhi application is more complicated and involves both the prosodic and morph-syntactic domains, which makes it difficult for L2 learners. This study aims to understand how L2 learners with another tone language experience could master the Mandarin Tone 3 sandhi rule. Specifically, the study investigates the production of Tone 3 sandhi in trisyllabic Mandarin words by Cantonese speakers. In the current study, 30 Cantonese speakers were requested to produce 15 trisyllabic words (“1+[2+3]” and “[1+2]+3” sandhi patterns) and 5 hexasyllabic sentences with Tone 3 in sequences. The analyses of results center on three major types of error patterns: overgeneralization, under application, and combination. The findings are discussed with regard to the phono-syntactic interactions of Tone 3 sandhi at the lexical and phrasal levels as well as the influence of the Cantonese tonal system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Radek Skarnitzl|AUTHOR Radek Skarnitzl]]^^1^^, [[Anders Eriksson|AUTHOR Anders Eriksson]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Charles University, Czech Republic; ^^2^^Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3221–3225
</span></p></div>
<div class="cpabstractcardabstract"><p>The study is part of a series of studies which examine the acoustic correlates of lexical stress in several typologically different languages, in three speech styles: spontaneous speech, phrase reading, and wordlist reading. This study focuses on Czech, a language with stress fixed on the first syllable of a prosodic word, with no contrastive function at the level of individual words. The acoustic parameters examined here are F0-level, F0-variation, Duration, Sound Pressure Level, and Spectral Emphasis. Values for over 6,000 vowels were analyzed.
Unlike the other languages examined so far, lexical stress in Czech is not manifested by clear prominence markings on the first, stressed syllable: the stressed syllable is neither higher, realized with greater F0 variation, longer; nor does it have a higher SPL or higher Spectral Emphasis. There are slight, but insignificant tendencies pointing to a delayed rise, that is, to higher values of some of the acoustic parameters on the second, post-stressed syllable. Since lexical stress does not serve a contrastive function in Czech, the absence of acoustic marking on the stressed syllable is not surprising.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Petra Wagner|AUTHOR Petra Wagner]], [[Nataliya Bryhadyr|AUTHOR Nataliya Bryhadyr]]
</p><p class="cpabstractcardaffiliationlist">Universität Bielefeld, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3226–3230
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigated the expression of prosodic prominence related to unpredictability and relevance in spontaneous dyadic interactions in which interlocutors could or could not see each other. Interactions between visibility and prominence were analyzed in a verbal version of the game TicTacToe. This setting allows for disentangling different types of information structure: early moves tend to be unpredictable, but are typically irrelevant for the immediate outcome of the game, while late moves tend to be predictable but relevant, as they usually prevent an opponent’s winning move or constitute a winning move by themselves.
Our analyses on German reveal that prominence expression is affected globally by visibility conditions: speech becomes overall softer and faster when interlocutors can see each other. However, speakers differentiate unpredictability and relevance-related accents rather consistently using intensity cues both under visibility and invisibility conditions. We also find that pitch excursions related to prosodic information structure are not affected by visibility. Our findings support effort-optimization models of speech production, but also models that regard speech production as an integrated bimodal process with a high degree of congruency across domains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Yin Hsu|AUTHOR Yu-Yin Hsu]], [[Anqi Xu|AUTHOR Anqi Xu]]
</p><p class="cpabstractcardaffiliationlist">Hong Kong Polytechnic University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3231–3235
</span></p></div>
<div class="cpabstractcardabstract"><p>In addition to deciding what to say, interlocutors have to decide how to say it. One of the important tasks of linguists is then to model how differences in acoustic patterns influence the interpretation of a sentence. In light of previous studies on how prosodic structure convey discourse-level of information in a sentence, this study makes use of a speech production experiment to investigate how expressions related to different information packaging, such as information focus, corrective focus, and old information, are prosodically realized within a complex nominal. Special attention was paid to the sequence of “numeral-classifier-noun” in Mandarin, which consists of closely related sub-syntactic units internally, and provides a phonetically controlled environment comparable to previous phonetic studies on focus prominence at the sentential level. The result shows that a multi-dimensional strategy is used in focus-marking, and that focus prosody is sensitive to the size of focus domain and is observable in various lexical tonal environments in Mandarin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Malin Svensson Lundmark|AUTHOR Malin Svensson Lundmark]], [[Gilbert Ambrazaitis|AUTHOR Gilbert Ambrazaitis]], [[Otto Ewald|AUTHOR Otto Ewald]]
</p><p class="cpabstractcardaffiliationlist">Lund University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3236–3240
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates acoustic and articulatory correlates of South Swedish word accents (Accent 1 vs. 2) — a tonal distinction traditionally associated with F0 timing. The study is motivated by previous findings on (i) the acoustic complexity of tonal prosody and (ii) tonal-articulatory interplay in other languages.
Acoustic and articulatory (EMA) data from two controlled experiments are reported (14 speakers in total; pilot EMA recordings with 2 speakers). Apart from the well-established F0 timing pattern, results of Experiment 1 reveal a longer duration of a post-stress consonant in Accent 2 than in Accent 1, a higher degree of creaky voice in Accent 1, as well as a deviant (two-peak) pitch pattern in Accent 2 for one of eight discourse conditions used in the experiment. Experiment 2 reveals an effect of word accent on vowel articulation, as the tongue body gesture target is reached earlier in Accent 2. It also suggests slight but (marginally) significant word-accent effects on word-initial gestural coordination, taking slightly different forms in the two speakers, as well as corresponding differences in word-initial formant patterns. Results are discussed concerning their potential perceptual relevance, as well as with reference to the c-center effect discussed within Articulatory Phonology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karin Puga|AUTHOR Karin Puga]]^^1^^, [[Robert Fuchs|AUTHOR Robert Fuchs]]^^2^^, [[Jane Setter|AUTHOR Jane Setter]]^^3^^, [[Peggy Mok|AUTHOR Peggy Mok]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^JLU Gießen, Germany; ^^2^^Hong Kong Baptist University, China; ^^3^^University of Reading, UK; ^^4^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3241–3245
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous research suggests that intonation is a particularly challenging aspect of L2 speech learning. While most research focuses on speech production, we widen the focus and study the perception of intonation by L2 learners. We investigate whether advanced German learners of English have knowledge of the appropriate English intonation patterns in a narrative context with different sentence types (e.g. statements, questions). The results of a tonal pattern selection task indicate that learners (n=20) performed similar to British English controls (n=25) for some sentence types (e.g. statements, yes/no-questions), but performed significantly worse than the control group in the case of open and closed tag questions and the expression of sarcasm. The results can be explained by the fact that tag questions are the only sentence type investigated that does not exist in the learners’ L1, and sarcasm is not represented syntactically. This suggests that L1 influence can partly account for why some intonation patterns are more challenging than others, and that contextualized knowledge of the intonation patterns of the target language rather than knowledge of intonation patterns in isolation is crucial for the successful L2 learning of intonation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heete Sahkai|AUTHOR Heete Sahkai]], [[Meelis Mihkla|AUTHOR Meelis Mihkla]]
</p><p class="cpabstractcardaffiliationlist">Institute of the Estonian Language, Estonia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3181–3185
</span></p></div>
<div class="cpabstractcardabstract"><p>Contrastive topic is an information structural category that is usually associated with a specific intonation, which tends to be similar across languages (a rising pitch accent). The aim of the present study is to examine whether this also true of Estonian. Three potential prosodic correlates of contrastive topics are examined: marking with a particular pitch accent type, an emphatic realization of the pitch accent, and a following prosodic boundary. With respect to pitch accent types, it is found that only two subjects out of eight distinguish sentences with a contrastive topic from other types of information structure; the contour bears resemblance to contrastive topic intonation in other languages (consisting of an H* accent on the contrastive topic and an HL* accent on the focus), but is not restricted to sentences with contrastive topics. A more consistent correlate turns out to be an emphatic realization of the pitch accent carried by the contrastive topic constituent. No evidence is found of a tendency to produce contrastive topics as separate prosodic phrases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lixia Hao|AUTHOR Lixia Hao]], [[Wei Zhang|AUTHOR Wei Zhang]], [[Yanlu Xie|AUTHOR Yanlu Xie]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]
</p><p class="cpabstractcardaffiliationlist">BLCU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3186–3190
</span></p></div>
<div class="cpabstractcardabstract"><p>In Mandarin, Fundamental Frequency (F0) peak delay has been reported to occur frequently in the rising (R) tone or high (H) tone succeeding by a low (L) tone. Its occurrence was ascribed to articulatory constraints within a conflicting tonal context: a high offset target followed by a low onset target. To further examine the underlying mechanism of the phenomenon, the current study tests the possibility that valley delay, as opposed to peak delay, may occur in an L+H tonal context; and peak or valley delay may also occur within a compatible tonal context where adjacent tonal values are identical or similar. An experiment was done on Annotated Speech Corpus of Chinese Discourse to investigate the frequency of occurrence and amount of peak and valley delay. The results indicated that: F0 peak and valley delay frequently occurred in both conflicting and compatible tonal contexts; the phenomenon was found extensively in R tone and F (falling) tone, but barely in H tone and L tone. The findings suggest that while peak or valley delay is partially due to articulatory constraints in certain tonal contexts, the speakers’ active effort-distribution strategy based on economical principle is also behind the phenomenon.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amandine Michelas|AUTHOR Amandine Michelas]], [[Cecile Cau|AUTHOR Cecile Cau]], [[Maud Champagne-Lavau|AUTHOR Maud Champagne-Lavau]]
</p><p class="cpabstractcardaffiliationlist">LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3191–3195
</span></p></div>
<div class="cpabstractcardabstract"><p>We examine the hypothesis that modelling the addressee in spoken interaction affects the production of prosodic forms by the speaker. This question was tested in an interactive paradigm that enabled us to measure prosodic variations at two levels: the global/acoustic level and the phonological one. We used a semi-spontaneous task in which French speakers gave instructions to addressees about where to place a cross between different objects (e.g., Tu mets la croix entre la souris bordeau et la maison bordeau; ‘You put the cross between the red mouse and the red house’). Each trial was composed of two noun-adjective fragments and the target was the second fragment. We manipulated (i) whether the two interlocutors shared or didn’t share the same objects and (ii) the informational status of targets to obtain variations in abstract prosodic phrasing. We found that the absence of shared knowledge between interlocutors affected the speaker’s production of prosodic forms at the global/acoustic level (i.e., pitch range and speech rate) but not at the phonological one (i.e., prosodic phrasing). These results are consistent with a mechanism in which global prosodic variations are influenced by audience design because they reflect the way that speakers help addressees to understand speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Wagner|AUTHOR Michael Wagner]], [[Michael McAuliffe|AUTHOR Michael McAuliffe]]
</p><p class="cpabstractcardaffiliationlist">McGill University, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3196–3200
</span></p></div>
<div class="cpabstractcardabstract"><p>Prosody simultaneously encodes different kinds of information, including the type of speech act of an utterance (e.g., falling declarative vs. rising interrogative intonational tunes), the location of semantic focus (via prosodic prominence), and syntactic constituent structure (via prosodic phrasing). The syntactic/ semantic functional dimensions (speech act, focus, constituency) are orthogonal to each other, but to which extent their prosodic correlates (tune, prominence, phrasing) are remains controversial. This paper takes a ‘bottom up’ approach to test for interactions, and reports evidence that contrary to many current theories of sentence intonation, the cues to the three dimensions are often orthogonal where interactions are predicted.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Janine Kleinhans|AUTHOR Janine Kleinhans]]^^1^^, [[Mireia Farrús|AUTHOR Mireia Farrús]]^^1^^, [[Agustín Gravano|AUTHOR Agustín Gravano]]^^2^^, [[Juan Manuel Pérez|AUTHOR Juan Manuel Pérez]]^^2^^, [[Catherine Lai|AUTHOR Catherine Lai]]^^3^^, [[Leo Wanner|AUTHOR Leo Wanner]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universitat Pompeu Fabra, Spain; ^^2^^Universidad de Buenos Aires, Argentina; ^^3^^University of Edinburgh, UK; ^^4^^Universitat Pompeu Fabra, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3201–3205
</span></p></div>
<div class="cpabstractcardabstract"><p>This work aims to explore the correlation between the discourse structure of a spoken monologue and its prosody by predicting discourse relations from different prosodic attributes. For this purpose, a corpus of semi-spontaneous monologues in English has been automatically annotated according to the Rhetorical Structure Theory, which models coherence in text via rhetorical relations. From corresponding audio files, prosodic features such as pitch, intensity, and speech rate have been extracted from different contexts of a relation. Supervised classification tasks using Support Vector Machines have been performed to find relationships between prosodic features and rhetorical relations. Preliminary results show that intensity combined with other features extracted from intra- and intersegmental environments is the feature with the highest predictability for a discourse relation. The prediction of rhetorical relations from prosodic features and their combinations is straightforwardly applicable to several tasks such as speech understanding or generation. Moreover, the knowledge of how rhetorical relations should be marked in terms of prosody will serve as a basis to improve speech synthesis applications and make voices sound more natural and expressive.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elizabeth Godoy|AUTHOR Elizabeth Godoy]], [[James R. Williamson|AUTHOR James R. Williamson]], [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]]
</p><p class="cpabstractcardaffiliationlist">MIT Lincoln Laboratory, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3206–3210
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech prosody encodes information about language and communicative intent as well as speaker identity and state. Consequently, a host of speech technologies could benefit from increased understanding of prosodic phenomena and corresponding acoustics. A recently developed comprehensive prosodic transcription system called RaP (Rhythm-and-Pitch) annotates both perceived rhythmic prominences and pitch tones in speech. Using RaP-annotated speech corpora, the present work analyzes relationships between perceived prosodic events and acoustic features including syllable duration and novel measures of intensity and fundamental frequency. Canonical Correlation Analysis (CCA) reveals two dominant prosodic dimensions relating the acoustic features and RaP annotations. The first captures perceived prosodic emphasis of syllables indicated by strong metrical beats and significant pitch variability (i.e. presence of either high or low pitch tones). Acoustically, this dimension is described most by syllable duration followed by the mean intensity and fundamental frequency measures. The second CCA dimension then primarily discriminates pitch tone level (high versus low), indicated mainly by the mean fundamental frequency measure. Finally, within a leave-one-out cross-validation framework, RaP prosodic events are well-predicted from acoustic features (AUC between 0.78 and 0.84). Future work will exploit automated RaP labelling in contexts ranging from language learning to neurological disorder recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sofoklis Kakouros|AUTHOR Sofoklis Kakouros]], [[Okko Räsänen|AUTHOR Okko Räsänen]], [[Paavo Alku|AUTHOR Paavo Alku]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3211–3215
</span></p></div>
<div class="cpabstractcardabstract"><p>Spectral tilt has been suggested to be a correlate of prominence in speech, although several studies have not replicated this empirically. This may be partially due to the lack of a standard method for tilt estimation from speech, rendering interpretations and comparisons between studies difficult. In addition, little is known about the performance of tilt estimators for prominence detection in the presence of noise. In this work, we investigate and compare several standard tilt measures on quantifying prominence in spoken Dutch and under different levels of additive noise. We also compare these measures with other acoustic correlates of prominence, namely, energy, F0, and duration. Our results provide further empirical support for the finding that tilt is a systematic correlate of prominence, at least in Dutch, even though energy, F0, and duration appear still to be more robust features for the task. In addition, our results show that there are notable differences between different tilt estimators in their ability to discriminate prominent words from non-prominent ones in different levels of noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianjing Kuang|AUTHOR Jianjing Kuang]]
</p><p class="cpabstractcardaffiliationlist">University of Pennsylvania, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3216–3220
</span></p></div>
<div class="cpabstractcardabstract"><p>This study looks into the distribution of creaky voice in Mandarin in continuous speech. A creaky voice detector was used to automatically detect the appearance of creaky voice in a large-scale Mandarin corpus (Sinica COSPRO corpus). As the prosodic information has been annotated in the corpus, we were able to look at the distribution of creaky voice as a function of the interaction between tone and prosodic structures. As expected, among the five tonal categories (four lexical tones and one neutral tone), creaky voice is most likely to occur with Tone 3 and the neutral tone, followed by Tone 2 and Tone 4. Prosodic boundaries also play important roles, as the likelihood of creak increases when the prosodic boundaries are larger, regardless of the tonal categories. It is also confirmed that the pitch range for the occurrence of creaky voice is 110 Hz for male speakers and 170 Hz for female speakers, consistent with previous small-scale studies. Finally, male speakers have a higher overall rate of creaky voice than female speakers. Altogether, this study validates the hypotheses from previous studies, and provides a better understanding of voice-source variation in different prosodic conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emilia Parada-Cabaleiro|AUTHOR Emilia Parada-Cabaleiro]], [[Alice Baird|AUTHOR Alice Baird]], [[Anton Batliner|AUTHOR Anton Batliner]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Simone Hantke|AUTHOR Simone Hantke]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3246–3250
</span></p></div>
<div class="cpabstractcardabstract"><p>Noise pollution is part of our daily life, affecting millions of people, particularly those living in urban environments. Noise alters our perception and decreases our ability to understand others. Considering this, speech perception in background noise has been extensively studied, showing that especially white noise can damage listener perception. However, the perception of emotions in noisified speech has not been explored with as much depth. In the present study, we use artificial background noise conditions, by applying noise to a subset of the GEMEP corpus (emotions expressed in nonsense speech). Noises were at varying intensities and ‘colours’; white, pink, and brownian. The categorical and dimensional perceptual test was completed by 26 listeners. The results indicate that background noise conditions influence the perception of emotion in speech — pink noise most, brownian least. Worsened perception invokes higher confusion, especially with sadness, an emotion with less pronounced prosodic characteristics. Yet, all this does not lead to a break-down of the ‘cognitive-emotional space’ in a Non-metric MultiDimensional Scaling representation. The gender of speakers and the cultural background of listeners do not seem to play a role.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shao-Yen Tseng|AUTHOR Shao-Yen Tseng]]^^1^^, [[Brian Baucom|AUTHOR Brian Baucom]]^^2^^, [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^University of Utah, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3291–3295
</span></p></div>
<div class="cpabstractcardabstract"><p>Identifying complex behavior in human interactions for observational studies often involves the tedious process of transcribing and annotating large amounts of data. While there is significant work towards accurate transcription in Automatic Speech Recognition, automatic Natural Language Understanding of high-level human behaviors from the transcribed text is still at an early stage of development. In this paper we present a novel approach for modeling human behavior using sentence embeddings and propose an automatic behavior annotation framework. We explore unsupervised methods of extracting semantic information, using seq2seq models, into deep sentence embeddings and demonstrate that these embeddings capture behaviorally meaningful information. Our proposed framework utilizes LSTM Recurrent Neural Networks to estimate behavior trajectories from these sentence embeddings. Finally, we employ fusion to compare our high-resolution behavioral trajectories with the coarse, session-level behavioral ratings of human annotators in Couples Therapy. Our experiments show that behavior annotation using this framework achieves better results than prior methods and approaches or exceeds human performance in terms of annotator agreement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Md. Nasir|AUTHOR Md. Nasir]]^^1^^, [[Brian Baucom|AUTHOR Brian Baucom]]^^2^^, [[Craig J. Bryan|AUTHOR Craig J. Bryan]]^^2^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^, [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^University of Utah, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3296–3300
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we analyze a 53-hour speech corpus of interactions of soldiers who had recently attempted suicide or had strong suicidal ideation conversing with their therapists. In particular, we study the complexity in therapist-patient speech as a marker of their emotional bond. Emotional bond is the extent to which the patient feels understood by and connected to the therapist. First, we extract speech features from audio recordings of their interactions. Then, we consider the nonlinear time series representation of those features and compute complexity measures based on the Lyapunov coefficient and correlation dimension. For the majority of the subjects, we observe that speech complexity in therapist-patient pairs is higher for the interview sessions, when compared to that of the rest of their interactions (intervention and post-interview follow-up). This indicates that entrainment (adapting to each other’s speech) between the patient and the therapist is lower during the interview than regular interactions. This observation is consistent with prior studies in clinical psychology, considering that assessment interviews typically involve the therapist asking routine questions to enquire about the patient’s suicidal thoughts and feelings. In addition, we find that complexity is negatively correlated with the patient’s perceived emotional bond with the therapist.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhaocheng Huang|AUTHOR Zhaocheng Huang]], [[Julien Epps|AUTHOR Julien Epps]]
</p><p class="cpabstractcardaffiliationlist">University of New South Wales, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3301–3305
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite recent interest in continuous prediction of dimensional emotions, the dynamical aspect of emotions has received less attention in automated systems. This paper investigates how emotion change can be effectively incorporated to improve continuous prediction of arousal and valence from speech. Significant correlations were found between emotion ratings and their dynamics during investigations on the RECOLA database, and here we examine how to best exploit them using a Kalman filter. In particular, we investigate the correlation between predicted arousal and valence dynamics with arousal and valence ground truth; the Kalman filter internal delay for estimating the state transition matrix; the use of emotion dynamics as a measurement input to a Kalman filter; and how multiple probabilistic Kalman filter outputs can be effectively fused. Evaluation results show that correct dynamics estimation and internal delay settings allow up to 5% and 58% relative improvement in arousal and valence prediction respectively over existing Kalman filter implementations. Fusion based on probabilistic Kalman filter outputs yields further gains.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[James Gibson|AUTHOR James Gibson]]^^1^^, [[Doğan Can|AUTHOR Doğan Can]]^^1^^, [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]^^1^^, [[David C. Atkins|AUTHOR David C. Atkins]]^^2^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3251–3255
</span></p></div>
<div class="cpabstractcardabstract"><p>In psychotherapy interactions there are several desirable and undesirable behaviors that give insight into the efficacy of the counselor and the progress of the client. It is important to be able to identify when these target behaviors occur and what aspects of the interaction signal their occurrence. Manual observation and annotation of these behaviors is costly and time intensive. In this paper, we use long short term memory networks equipped with an attention mechanism to process transcripts of addiction counseling sessions and predict prominent counselor and client behaviors. We demonstrate that this approach gives competitive performance while also providing additional interpretability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Torsten Wörtwein|AUTHOR Torsten Wörtwein]]^^1^^, [[Tadas Baltrušaitis|AUTHOR Tadas Baltrušaitis]]^^2^^, [[Eugene Laksana|AUTHOR Eugene Laksana]]^^3^^, [[Luciana Pennant|AUTHOR Luciana Pennant]]^^4^^, [[Elizabeth S. Liebson|AUTHOR Elizabeth S. Liebson]]^^4^^, [[Dost Öngür|AUTHOR Dost Öngür]]^^4^^, [[Justin T. Baker|AUTHOR Justin T. Baker]]^^4^^, [[Louis-Philippe Morency|AUTHOR Louis-Philippe Morency]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KIT, Germany; ^^2^^Carnegie Mellon University, USA; ^^3^^University of Southern California, USA; ^^4^^McLean Hospital, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3256–3260
</span></p></div>
<div class="cpabstractcardabstract"><p>Various forms of psychotic disorders, including schizophrenia, can influence how we speak. Therefore, clinicians assess speech and language behaviors of their patients. While it is difficult for humans to quantify speech behaviors precisely, acoustic descriptors, such as tenseness of voice and speech rate, can be quantified automatically. In this work, we identify previously unstudied acoustic descriptors related to the severity of psychotic symptoms within a clinical population (N=29). Our dataset consists of semi-structured interviews between patients and clinicians. Psychotic disorders are often characterized by two groups of symptoms: negative and positive. While negative symptoms are also prevalent in disorders such as depression, positive symptoms in psychotic disorders have rarely been studied from an acoustic and computational perspective. Our experiments show relationships between psychotic symptoms and acoustic descriptors related to voice quality consistency, variation of speech rate and volume, vowel space, and a parameter of glottal flow. Further, we show that certain acoustic descriptors can track a patient’s state from admission to discharge. Finally, we demonstrate that measures from the Brief Psychiatric Rating Scale (BPRS) can be estimated with acoustic descriptors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ya-Tse Wu|AUTHOR Ya-Tse Wu]]^^1^^, [[Hsuan-Yu Chen|AUTHOR Hsuan-Yu Chen]]^^1^^, [[Yu-Hsien Liao|AUTHOR Yu-Hsien Liao]]^^1^^, [[Li-Wei Kuo|AUTHOR Li-Wei Kuo]]^^2^^, [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Tsing Hua University, Taiwan; ^^2^^National Health Research Institute, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3261–3265
</span></p></div>
<div class="cpabstractcardabstract"><p>Developing automatic emotion recognition by modeling expressive behaviors is becoming crucial in enabling the next generation design of human-machine interface. Also, with the availability of functional magnetic resonance imaging (fMRI), researchers have also conducted studies into quantitative understanding of vocal emotion perception mechanism. In this work, our aim is two folds: 1) investigating whether the neural-responses can be used to automatically decode the emotion labels of vocal stimuli, and 2) combining acoustic and fMRI features to improve the speech emotion recognition accuracies. We introduce a novel framework of lobe-dependent convolutional neural network (LD-CNN) to provide better modeling of perceivers neural-responses on vocal emotion. Furthermore, by fusing LD-CNN with acoustic features, we demonstrate an overall 63.17% accuracies in a four-class emotion recognition task (9.89% and 14.42% relative improvement compared to the acoustic-only and the fMRI-only features). Our analysis further shows that temporal lobe possess the most information in decoding emotion labels; the fMRI and the acoustic information are complementary to each other, where neural-responses and acoustic features are better at discriminating along the valence and activation dimensions, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bogdan Vlasenko|AUTHOR Bogdan Vlasenko]], [[Hesam Sagha|AUTHOR Hesam Sagha]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Björn Schuller|AUTHOR Björn Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Passau, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3266–3270
</span></p></div>
<div class="cpabstractcardabstract"><p>Whilst studies on emotion recognition show that gender-dependent analysis can improve emotion classification performance, the potential differences in the manifestation of depression between male and female speech have yet to be fully explored. This paper presents a qualitative analysis of phonetically aligned acoustic features to highlight differences in the manifestation of depression. Gender-dependent analysis with phonetically aligned gender-dependent features are used for speech-based depression recognition. The presented experimental study reveals gender differences in the effect of depression on vowel-level features. Considering the experimental study, we also show that a small set of knowledge-driven gender-dependent vowel-level features can outperform state-of-the-art turn-level acoustic features when performing a binary depressed speech recognition task. A combination of these preselected gender-dependent vowel-level features with turn-level standardised openSMILE features results in additional improvement for depression recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Farhad Bin Siddique|AUTHOR Farhad Bin Siddique]], [[Pascale Fung|AUTHOR Pascale Fung]]
</p><p class="cpabstractcardaffiliationlist">HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3271–3275
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a multilingual personality classifier that uses text data from social media and Youtube Vlog transcriptions, and maps them into Big Five personality traits using a Convolutional Neural Network (CNN). We first train unsupervised bilingual word embeddings from an English-Chinese parallel corpus, and use these trained word representations as input to our CNN. This enables our model to yield relatively high cross-lingual and multilingual performance on Chinese texts, after training on the English dataset for example. We also train monolingual Chinese embeddings from a large Chinese text corpus and then train our CNN model on a Chinese dataset consisting of conversational dialogue labeled with personality. We achieve an average F-score of 66.1 in our multilingual task compared to 63.3 F-score in cross-lingual, and 63.2 F-score in the monolingual performance. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yoshiko Arimoto|AUTHOR Yoshiko Arimoto]]^^1^^, [[Hiroki Mori|AUTHOR Hiroki Mori]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^RIKEN, Japan; ^^2^^Utsunomiya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3276–3280
</span></p></div>
<div class="cpabstractcardabstract"><p>The psychological classification of emotion has two main approaches. One is emotion category, in which emotions are classified into discrete and fundamental groups; the other is emotion dimension, in which emotions are characterized by multiple continuous scales. The cognitive classification of emotion by humans perceived from speech is not sufficiently established. Although there have been several studies on such classification, they did not discuss it deeply. Moreover, the relationship between emotion category and emotion dimension perceived from speech is not well studied. Aiming to establish common emotion labels for emotional speech, this study elucidated the relationship between the emotion category and the emotion dimension perceived by speech by conducting an experiment of cross-corpus emotion labeling with two different Japanese dialogue corpora (Online Gaming Voice Chat Corpus with Emotional Label (OGVC) and Utsunomiya University Spoken Dialogue Database for Paralinguistic Information Studies (UUDB)). A likelihood ratio test was conducted to assess the independency of one emotion category from the others in three-dimensional emotional space. This experiment revealed that many emotion categories exhibited independency from the other emotion categories. Only the neutral states did not exhibit independency from the three emotions of sadness, disgust, and surprise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cedric Fayet|AUTHOR Cedric Fayet]], [[Arnaud Delhay|AUTHOR Arnaud Delhay]], [[Damien Lolive|AUTHOR Damien Lolive]], [[Pierre-François Marteau|AUTHOR Pierre-François Marteau]]
</p><p class="cpabstractcardaffiliationlist">IRISA, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3281–3285
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents an attempt to evaluate three different sets of features extracted from prosodic descriptors and Big Five traits for building an anomaly detector. The Big Five model enables to capture personality information. Big Five traits are extracted from a manual annotation while Prosodic features are extracted directly from the speech signal. Two different anomaly detection methods are evaluated: Gaussian Mixture Model (GMM) and One-Class SVM (OC-SVM), each one combined with a threshold classification to decide the “normality” of a sample. The different combinations of models and feature sets are evaluated on the SSPNET-Personality corpus which has already been used in several experiments, including a previous work on separating two types of personality profiles in a supervised way. In this work, we propose the above mentioned unsupervised or semi-supervised methods, and discuss their performance, to detect particular audio-clips produced by a speaker with an abnormal personality. Results show that using automatically extracted prosodic features competes with the Big Five traits. The overall detection performance achieved by the best model is around 0.8 (F1-measure).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hayakawa Akira|AUTHOR Hayakawa Akira]]^^1^^, [[Carl Vogel|AUTHOR Carl Vogel]]^^1^^, [[Saturnino Luz|AUTHOR Saturnino Luz]]^^2^^, [[Nick Campbell|AUTHOR Nick Campbell]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Trinity College Dublin, Ireland; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3286–3290
</span></p></div>
<div class="cpabstractcardabstract"><p>This study focuses on the adaptation of subjects in Human-to-Human (H2H) communication in spontaneous dialogues in two different settings. The speech rate of sixteen dialogues from the HCRC Map Task corpus have been analyzed as direct H2H communication, while fifteen dialogues from the ILMT-s2s corpus have been analyzed as a Speech-to-Speech Machine Translation (S2S-MT) mediated H2H communication comparison. The analysis shows that while the mean speech rate of the subjects in the two task oriented corpora differ, in both corpora the role of the subject causes a significant difference in the speech rate with the Information Giver using a slower speech rate than the Information Follower. Also the different settings of the dialogue recordings (with or without eye contact in the HCRC corpus and with or without live video streaming in the ILMT-s2s corpus) only show a negligible difference in the speech rate. However, the gender of the subjects have provided an interesting difference with the female subjects of the ILMT-s2s corpus using a slower speech rate than the male subjects, gender does not show any difference in the HCRC corpus. This indicates that the difference is not from performing the map task, but a result of their adaptation strategy to the S2S-MT system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kugatsu Sadamitsu|AUTHOR Kugatsu Sadamitsu]], [[Yukinori Homma|AUTHOR Yukinori Homma]], [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]], [[Yoshihiro Matsuo|AUTHOR Yoshihiro Matsuo]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3306–3310
</span></p></div>
<div class="cpabstractcardabstract"><p>Natural language understanding (NLU) is an important module of spoken dialogue systems. One of the difficulties when it comes to adapting NLU to new domains is the high cost of constructing new training data for each domain. To reduce this cost, we propose a zero-shot learning of NLU that takes into account the sequential structures of sentences together with general question types across different domains. Experimental results show that our methods achieve higher accuracy than baseline methods in two completely different domains (insurance and sightseeing).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carlos-D. Martínez-Hinarejos|AUTHOR Carlos-D. Martínez-Hinarejos]]^^1^^, [[Zuzanna Parcheta|AUTHOR Zuzanna Parcheta]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad Politécnica de Valencia, Spain; ^^2^^Sciling, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3349–3353
</span></p></div>
<div class="cpabstractcardabstract"><p>Natural language recognition techniques can be applied not only to speech signals, but to other signals that represent natural language units (e.g., words and sentences). This is the case of sign language recognition, which is usually employed by deaf people to communicate. The use of recognition techniques may allow this language users to communicate more independently with non-signal users. Several works have been done for different variants of sign languages, but in most cases their vocabulary is quite limited and they only recognise gestures corresponding to isolated words. In this work, we propose gesture recognisers which make use of typical Continuous Density Hidden Markov Model. They solve not only the isolated word problem, but also the recognition of basic sentences using the Spanish Sign Language with a higher vocabulary than in other approximations. Different topologies and Gaussian mixtures are studied. Results show that our proposal provides promising results that are the first step to obtain a general automatic recognition of Spanish Sign Language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michelle Renee Morales|AUTHOR Michelle Renee Morales]]^^1^^, [[Stefan Scherer|AUTHOR Stefan Scherer]]^^2^^, [[Rivka Levitan|AUTHOR Rivka Levitan]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUNY Graduate Center, USA; ^^2^^University of Southern California, USA; ^^3^^CUNY Brooklyn College, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3354–3358
</span></p></div>
<div class="cpabstractcardabstract"><p>The primary use of speech is in face-to-face interactions and situational context and human behavior therefore intrinsically shape and affect communication. In order to usefully model situational awareness, machines must have access to the same streams of information humans have access to. In other words, we need to provide machines with features that represent each communicative modality: face and gesture, voice and speech, and language. This paper presents OpenMM: an open-source multimodal feature extraction tool. We build upon existing open-source repositories to present the first publicly available tool for multimodal feature extraction. The tool provides a pipeline for researchers to easily extract visual and acoustic features. In addition, the tool also performs automatic speech recognition (ASR) and then uses the transcripts to extract linguistic features. We evaluate the OpenMM’s multimodal feature set on deception, depression and sentiment classification tasks and show its performance is very promising. This tool provides researchers with a simple way of extracting multimodal features and consequently a richer and more robust feature representation for machine learning tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuyun Huang|AUTHOR Yuyun Huang]], [[Emer Gilmartin|AUTHOR Emer Gilmartin]], [[Nick Campbell|AUTHOR Nick Campbell]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3359–3363
</span></p></div>
<div class="cpabstractcardabstract"><p>Conversational engagement is a multimodal phenomenon and an essential cue to assess both human-human and human-robot communication. Speaker-dependent and speaker-independent scenarios were addressed in our engagement study. Handcrafted audio-visual features were used. Fixed window sizes for feature fusion method were analysed. Novel dynamic window size selection and multimodal bi-directional long short term memory (Multimodal BLSTM) approaches were proposed and evaluated for engagement level recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naoki Sawada|AUTHOR Naoki Sawada]]^^1^^, [[Ryo Masumura|AUTHOR Ryo Masumura]]^^1^^, [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTT, Japan; ^^2^^University of Yamanashi, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3311–3315
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a novel classification method for multi-stream conversational documents. Documents of contact center dialogues or meetings are often composed of multiple source documents that are transcriptions of the recordings of each speaker’s channel. To enhance the classification performance of such multi-stream conversational documents, three main advances over the previous method are introduced. The first is a parallel hierarchical attention network (PHAN) for multi-stream conversational document modeling. PHAN can precisely capture word and sentence structures of individual source documents and efficiently integrate them. The second is a shared memory reader that can yield a shared attention mechanism. The shared memory reader highlights common important information in a conversation. Our experiments on a call category classification in contact center dialogues show that PHAN together with the shared memory reader outperforms the single document modeling method and previous multi-stream document modeling method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohamed Morchid|AUTHOR Mohamed Morchid]]
</p><p class="cpabstractcardaffiliationlist">LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3316–3319
</span></p></div>
<div class="cpabstractcardabstract"><p>Long Short-Term Memory (LSTM) Recurrent Neural Networks (RNN) require 4 gates to learn short- and long-term dependencies for a given sequence of basic elements. Recently, “Gated Recurrent Unit” (GRU) has been introduced and requires fewer gates than LSTM (reset and update gates), to code short- and long-term dependencies and reaches equivalent performances to LSTM, with less processing time during the learning. The “Leaky integration Unit” (LU) is a GRU with a single gate (update) that codes mostly long-term dependencies quicker than LSTM or GRU (small number of operations for learning). This paper proposes a novel RNN that takes advantage of LSTM, GRU (short- and long-term dependencies) and the LU (fast learning) called “Internal Memory Gate” (IMG). The effectiveness and the robustness of the proposed IMG-RNN is evaluated during a classification task of a small corpus of spoken dialogues from the DECODA project that allows us to evaluate the capability of each RNN to code short-term dependencies. The experiments show that IMG-RNNs reach better accuracies with a gain of 0.4 points compared to LSTM- and GRU-RNNs and 0.7 points compared to the LU-RNN. Moreover, IMG-RNN requires less processing time than GRU or LSTM with a gain of 19% and 50% respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mandy Korpusik|AUTHOR Mandy Korpusik]], [[Zachary Collins|AUTHOR Zachary Collins]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3320–3324
</span></p></div>
<div class="cpabstractcardabstract"><p>Character-based embedding models provide robustness for handling misspellings and typos in natural language. In this paper, we explore convolutional neural network based embedding models for handling out-of-vocabulary words in a meal description food ranking task. We demonstrate that character-based models combined with a standard word-based model improves the top-5 recall of USDA database food items from 26.3% to 30.3% on a test set of all USDA foods with typos simulated in 10% of the data. We also propose a new reranking strategy for predicting the top USDA food matches given a meal description, which significantly outperforms our prior method of n-best decoding with a finite state transducer, improving the top-5 recall on the all USDA foods task from 20.7% to 63.8%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Mohamed Morchid|AUTHOR Mohamed Morchid]], [[Georges Linarès|AUTHOR Georges Linarès]]
</p><p class="cpabstractcardaffiliationlist">LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3325–3328
</span></p></div>
<div class="cpabstractcardabstract"><p>In the last decades, encoder-decoders or autoencoders (AE) have received a great interest from researchers due to their capability to construct robust representations of documents in a low dimensional subspace. Nonetheless, autoencoders reveal little in way of spoken document internal structure by only considering words or topics contained in the document as an isolate basic element, and tend to overfit with small corpus of documents. Therefore, Quaternion Multi-layer Perceptrons (QMLP) have been introduced to capture such internal latent dependencies, whereas denoising autoencoders (DAE) are composed with different stochastic noises to better process small set of documents. This paper presents a novel autoencoder based on both hitherto-proposed DAE (to manage small corpus) and the QMLP (to consider internal latent structures) called “Quaternion denoising encoder-decoder” (QDAE). Moreover, the paper defines an original angular Gaussian noise adapted to the specificity of hyper-complex algebra. The experiments, conduced on a theme identification task of spoken dialogues from the DECODA framework, show that the QDAE obtains the promising gains of 3% and 1.5% compared to the standard real valued denoising autoencoder and the QMLP respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Edwin Simonnet|AUTHOR Edwin Simonnet]]^^1^^, [[Sahar Ghannay|AUTHOR Sahar Ghannay]]^^1^^, [[Nathalie Camelin|AUTHOR Nathalie Camelin]]^^1^^, [[Yannick Estève|AUTHOR Yannick Estève]]^^1^^, [[Renato De Mori|AUTHOR Renato De Mori]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIUM (EA 4023), France; ^^2^^LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3329–3333
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the problem of automatic speech recognition (ASR) error detection and their use for improving spoken language understanding (SLU) systems. In this study, the SLU task consists in automatically extracting, from ASR transcriptions, semantic concepts and concept/values pairs in a e.g touristic information system. An approach is proposed for enriching the set of semantic labels with error specific labels and by using a recently proposed neural approach based on word embeddings to compute well calibrated ASR confidence measures. Experimental results are reported showing that it is possible to decrease significantly the Concept/Value Error Rate with a state of the art system, outperforming previously published results performance on the same experimental data. It also shown that combining an SLU approach based on conditional random fields with a neural encoder/decoder attention based architecture, it is possible to effectively identifying confidence islands and uncertain semantic output segments useful for deciding appropriate error handling actions by the dialogue manager strategy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mingbo Ma|AUTHOR Mingbo Ma]]^^1^^, [[Kai Zhao|AUTHOR Kai Zhao]]^^1^^, [[Liang Huang|AUTHOR Liang Huang]]^^1^^, [[Bing Xiang|AUTHOR Bing Xiang]]^^2^^, [[Bowen Zhou|AUTHOR Bowen Zhou]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Oregon State University, USA; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3334–3338
</span></p></div>
<div class="cpabstractcardabstract"><p>Sentence-level classification and sequential labeling are two fundamental tasks in language understanding. While these two tasks are usually modeled separately, in reality, they are often correlated, for example in intent classification and slot filling, or in topic classification and named-entity recognition. In order to utilize the potential benefits from their correlations, we propose a jointly trained model for learning the two tasks simultaneously via Long Short-Term Memory (LSTM) networks. This model predicts the sentence-level category and the word-level label sequence from the stepwise output hidden representations of LSTM. We also introduce a novel mechanism of “sparse attention” to weigh words differently based on their semantic relevance to sentence-level classification. The proposed method outperforms baseline models on ATIS and TREC datasets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Neha Nayak|AUTHOR Neha Nayak]], [[Dilek Hakkani-Tür|AUTHOR Dilek Hakkani-Tür]], [[Marilyn Walker|AUTHOR Marilyn Walker]], [[Larry Heck|AUTHOR Larry Heck]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3339–3343
</span></p></div>
<div class="cpabstractcardabstract"><p>Natural language generation for task-oriented dialogue systems aims to effectively realize system dialogue actions. All natural language generators (NLGs) must realize grammatical, natural and appropriate output, but in addition, generators for task-oriented dialogue must faithfully perform a specific dialogue act that conveys specific semantic information, as dictated by the dialogue policy of the system dialogue manager. Most previous work on deep learning methods for task-oriented NLG assumes that generation output can be an utterance skeleton. Utterances are delexicalized, with variable names for slots, which are then replaced with actual values as part of post-processing. However, the value of slots do, in fact, influence the lexical selection in the surrounding context as well as the overall sentence plan. To model this effect, we investigate sequence-to-sequence (seq2seq) models in which slot values are included as part of the input sequence and the output surface form. Furthermore, we study whether a separate sentence planning module that decides on grouping of slot value mentions as input to the seq2seq model results in more natural sentences than a seq2seq model that aims to jointly learn the plan and the surface realization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthieu Riou|AUTHOR Matthieu Riou]], [[Bassam Jabaian|AUTHOR Bassam Jabaian]], [[Stéphane Huet|AUTHOR Stéphane Huet]], [[Fabrice Lefèvre|AUTHOR Fabrice Lefèvre]]
</p><p class="cpabstractcardaffiliationlist">LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3344–3348
</span></p></div>
<div class="cpabstractcardabstract"><p>Following some recent propositions to handle natural language generation in spoken dialog systems with long short-term memory recurrent neural network models [1] we first investigate a variant thereof with the objective of a better integration of the attention subnetwork. Then our main objective is to propose and evaluate a framework to adapt the NLG module online through direct interactions with the users. When doing so the basic way is to ask the user to utter an alternative sentence to express a particular dialog act. But then the system has to decide between using an automatic transcription or to ask for a manual transcription. To do so a reinforcement learning approach based on an adversarial bandit scheme is retained. We show that by defining appropriately the rewards as a linear combination of expected payoffs and costs of acquiring the new data provided by the user, a system design can balance between improving the system’s performance towards a better match with the user’s preferences and the burden associated with it.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chin-Cheng Hsu|AUTHOR Chin-Cheng Hsu]], [[Hsin-Te Hwang|AUTHOR Hsin-Te Hwang]], [[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]
</p><p class="cpabstractcardaffiliationlist">Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3364–3368
</span></p></div>
<div class="cpabstractcardabstract"><p>Building a voice conversion (VC) system from non-parallel speech corpora is challenging but highly valuable in real application scenarios. In most situations, the source and the target speakers do not repeat the same texts or they may even speak different languages. In this case, one possible, although indirect, solution is to build a generative model for speech. Generative models focus on explaining the observations with latent variables instead of learning a pairwise transformation function, thereby bypassing the requirement of speech frame alignment. In this paper, we propose a non-parallel VC framework with a variational autoencoding Wasserstein generative adversarial network (VAW-GAN) that explicitly considers a VC objective when building the speech model. Experimental results corroborate the capability of our framework for building a VC system from unaligned data, and demonstrate improved conversion quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Runnan Li|AUTHOR Runnan Li]]^^1^^, [[Zhiyong Wu|AUTHOR Zhiyong Wu]]^^1^^, [[Yishuang Ning|AUTHOR Yishuang Ning]]^^1^^, [[Lifa Sun|AUTHOR Lifa Sun]]^^2^^, [[Helen Meng|AUTHOR Helen Meng]]^^1^^, [[Lianhong Cai|AUTHOR Lianhong Cai]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3409–3413
</span></p></div>
<div class="cpabstractcardabstract"><p>From speech, speaker identity can be mostly characterized by the spectro-temporal structures of spectrum. Although recent researches have demonstrated the effectiveness of employing long short-term memory (LSTM) recurrent neural network (RNN) in voice conversion, traditional LSTM-RNN based approaches usually focus on temporal evolutions of speech features only. In this paper, we improve the conventional LSTM-RNN method for voice conversion by employing the two-dimensional time-frequency LSTM (TFLSTM) to model spectro-temporal warping along both time and frequency axes. A multi-task learned structured output layer (SOL) is afterward adopted to capture the dependencies between spectral and pitch parameters for further improvement, where spectral parameter targets are conditioned upon pitch parameters prediction. Experimental results show the proposed approach outperforms conventional systems in speech quality and speaker similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Miguel Varela Ramos|AUTHOR Miguel Varela Ramos]]^^1^^, [[Alan W. Black|AUTHOR Alan W. Black]]^^2^^, [[Ramon Fernandez Astudillo|AUTHOR Ramon Fernandez Astudillo]]^^1^^, [[Isabel Trancoso|AUTHOR Isabel Trancoso]]^^1^^, [[Nuno Fonseca|AUTHOR Nuno Fonseca]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^INESC-ID Lisboa, Portugal; ^^2^^Carnegie Mellon University, USA; ^^3^^Politécnico de Leiria, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3414–3418
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice conversion techniques aim to modify a subject’s voice characteristics in order to mimic the one’s of another person. Due to the difference in utterance length between source and target speaker, state of the art voice conversion systems often rely on a frame alignment pre-processing step. This step aligns the entire utterances with algorithms such as dynamic time warping (DTW) that introduce errors, hindering system performance. In this paper we present a new technique that avoids the alignment of entire utterances at frame level, while keeping the local context during training. For this purpose, we combine an RNN model with the use of phoneme or syllable-level information, obtained from a speech recognition system. This system segments the utterances into segments which then can be grouped into overlapping windows, providing the needed context for the model to learn the temporal dependencies. We show that with this approach, notable improvements can be attained over a state of the art RNN voice conversion system on the CMU ARCTIC database. It is also worth noting that with this technique it is possible to halve the training data size and still outperform the baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Toru Nakashika|AUTHOR Toru Nakashika]]
</p><p class="cpabstractcardaffiliationlist">University of Electro-Communications, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3369–3373
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a new energy-based probabilistic model, called CAB (Cluster Adaptive restricted Boltzmann machine), is proposed for voice conversion (VC) that does not require parallel data during the training and requires only a small amount of speech data during the adaptation. Most of the existing VC methods require parallel data for training. Recently, VC methods that do not require parallel data (called non-parallel VCs) have been also proposed and are attracting much attention because they do not require prepared or recorded parallel speech data, unlike conventional approaches. The proposed CAB model is aimed at statistical non-parallel VC based on cluster adaptive training (CAT). This extends the VC method used in our previous model, ARBM (adaptive restricted Boltzmann machine). The ARBM approach assumes that any speech signals can be decomposed into speaker-invariant phonetic information and speaker-identity information using the ARBM adaptation matrices of each speaker. VC is achieved by switching the source speaker’s identity into those of the target speaker while retaining the phonetic information obtained by decomposition of the source speaker’s speech. In contrast, CAB speaker identities are represented as cluster vectors that determine the adaptation matrices. As the number of clusters is generally smaller than the number of speakers, the number of model parameters can be reduced compared to ARBM, which enables rapid adaptation of a new speaker. Our experimental results show that the proposed method especially performed better than the ARBM approach, particularly in adaptation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryo Aihara|AUTHOR Ryo Aihara]]^^1^^, [[Tetsuya Takiguchi|AUTHOR Tetsuya Takiguchi]]^^2^^, [[Yasuo Ariki|AUTHOR Yasuo Ariki]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Mitsubishi Electric, Japan; ^^2^^Kobe University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3374–3378
</span></p></div>
<div class="cpabstractcardabstract"><p>We present in this paper a Voice Conversion (VC) method for a person with dysarthria resulting from athetoid cerebral palsy. VC is being widely researched in the field of speech processing because of increased interest in using such processing in applications such as personalized Text-To-Speech systems. A Gaussian Mixture Model (GMM)-based VC method has been widely researched and Partial Least Square (PLS)-based VC has been proposed to prevent the over-fitting problems associated with the GMM-based VC method. In this paper, we present phoneme-discriminative features, which are associated with PLS-based VC. Conventional VC methods do not consider the phonetic structure of spectral features although phonetic structures are important for speech analysis. Especially for dysarthric speech, their phonetic structures are difficult to discriminate and discriminative learning will improve the conversion accuracy. This paper employs discriminative manifold learning. Spectral features are projected into a subspace in which a near point with the same phoneme label is close to another and a near point with a different phoneme label is apart. Our proposed method was evaluated on dysarthric speaker conversion task which converts dysarthric voice into non-dysarthric speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jie Wu|AUTHOR Jie Wu]]^^1^^, [[D.-Y. Huang|AUTHOR D.-Y. Huang]]^^2^^, [[Lei Xie|AUTHOR Lei Xie]]^^1^^, [[Haizhou Li|AUTHOR Haizhou Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^A*STAR, Singapore; ^^3^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3379–3383
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper studies the post processing in deep bidirectional Long Short-Term Memory (DBLSTM) based voice conversion, where the statistical parameters are optimized to generate speech that exhibits similar properties to target speech. However, there always exists residual error between converted speech and target one. We reformulate the residual error problem as speech restoration, which aims to recover the target speech samples from the converted ones. Specifically, we propose a denoising recurrent neural network (DeRNN) by introducing regularization during training to shape the distribution of the converted data in latent space. We compare the proposed approach with global variance (GV), modulation spectrum (MS) and recurrent neural network (RNN) based postfilters, which serve a similar purpose. The subjective test results show that the proposed approach significantly outperforms these conventional approaches in terms of quality and similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kei Tanaka|AUTHOR Kei Tanaka]], [[Sunao Hara|AUTHOR Sunao Hara]], [[Masanobu Abe|AUTHOR Masanobu Abe]], [[Masaaki Sato|AUTHOR Masaaki Sato]], [[Shogo Minagi|AUTHOR Shogo Minagi]]
</p><p class="cpabstractcardaffiliationlist">Okayama University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3384–3388
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, using GMM-based voice conversion algorithm, we propose to generate speaker-dependent mapping functions to improve the intelligibility of speech uttered by patients with a wide glossectomy. The speaker-dependent approach enables to generate the mapping functions that reconstruct missing spectrum features of speech uttered by a patient without having influences of a speaker’s factor. The proposed idea is simple, i.e., to collect speech uttered by a patient before and after the glossectomy, but in practice it is hard to ask patients to utter speech just for developing algorithms. To confirm the performance of the proposed approach, in this paper, in order to simulate glossectomy patients, we fabricated an intraoral appliance which covers lower dental arch and tongue surface to restrain tongue movements. In terms of the Mel-frequency cepstrum (MFC) distance, by applying the voice conversion, the distances were reduced by 25% and 42% for speaker-dependent case and speaker-independent case, respectively. In terms of phoneme intelligibility, dictation tests revealed that speech reconstructed by speaker-dependent approach almost always showed better performance than the original speech uttered by simulated patients, while speaker-independent approach did not.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takuhiro Kaneko|AUTHOR Takuhiro Kaneko]]^^1^^, [[Shinji Takaki|AUTHOR Shinji Takaki]]^^2^^, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^1^^, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTT, Japan; ^^2^^NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3389–3393
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a learning-based postfilter to reconstruct the high-fidelity spectral texture in short-term Fourier transform (STFT) spectrograms. In speech-processing systems, such as speech synthesis, conversion, enhancement, separation, and coding, STFT spectrograms have been widely used as key acoustic representations. In these tasks, we normally need to precisely generate or predict the representations from inputs; however, generated spectra typically lack the fine structures that are close to those of the true data. To overcome these limitations and reconstruct spectra having finer structures, we propose a generative adversarial network (GAN)-based postfilter that is implicitly optimized to match the true feature distribution in adversarial learning. The challenge with this postfilter is that a GAN cannot be easily trained for very high-dimensional data such as STFT spectra. We take a simple divide-and-concatenate strategy. Namely, we first divide the spectrograms into multiple frequency bands with overlap, reconstruct the individual bands using the GAN-based postfilter trained for each band, and finally connect the bands with overlap. We tested our proposed postfilter on a deep neural network-based text-to-speech task and confirmed that it was able to reduce the gap between synthesized and target spectra, even in the high-dimensional STFT domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bajibabu Bollepalli|AUTHOR Bajibabu Bollepalli]], [[Lauri Juvela|AUTHOR Lauri Juvela]], [[Paavo Alku|AUTHOR Paavo Alku]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3394–3398
<a href="./IS2017/MEDIA/1288" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent studies have shown that text-to-speech synthesis quality can be improved by using glottal vocoding. This refers to vocoders that parameterize speech into two parts, the glottal excitation and vocal tract, that occur in the human speech production apparatus. Current glottal vocoders generate the glottal excitation waveform by using deep neural networks (DNNs). However, the squared error-based training of the present glottal excitation models is limited to generating conditional average waveforms, which fails to capture the stochastic variation of the waveforms. As a result, shaped noise is added as post-processing. In this study, we propose a new method for predicting glottal waveforms by generative adversarial networks (GANs). GANs are generative models that aim to embed the data distribution in a latent space, enabling generation of new instances very similar to the original by randomly sampling the latent distribution. The glottal pulses generated by GANs show a stochastic component similar to natural glottal pulses. In our experiments, we compare synthetic speech generated using glottal waveforms produced by both DNNs and GANs. The results show that the newly proposed GANs achieve synthesis quality comparable to that of widely-used DNNs, without using an additive noise component.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhaojie Luo|AUTHOR Zhaojie Luo]], [[Jinhui Chen|AUTHOR Jinhui Chen]], [[Tetsuya Takiguchi|AUTHOR Tetsuya Takiguchi]], [[Yasuo Ariki|AUTHOR Yasuo Ariki]]
</p><p class="cpabstractcardaffiliationlist">Kobe University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3399–3403
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep learning techniques have been successfully applied to speech processing. Typically, neural networks (NNs) are very effective in processing nonlinear features, such as mel cepstral coefficients (MCC), which represent the spectrum features in voice conversion (VC) tasks. Despite these successes, the approach is restricted to problems with moderate dimension and sufficient data. Thus, in emotional VC tasks, it is hard to deal with a simple representation of fundamental frequency (F0), which is the most important feature in emotional voice representation, Another problem is that there are insufficient emotional data for training. To deal with these two problems, in this paper, we propose the adaptive scales continuous wavelet transform (AS-CWT) method to systematically capture the F0 features of different temporal scales, which can represent different prosodic levels ranging from micro-prosody to sentence levels. Meanwhile, we also use the pre-trained conversion functions obtained from other emotional datasets to synthesize new emotional data as additional training samples for target emotional voice conversion. Experimental results indicate that our proposed method achieves the best performance in both objective and subjective evaluations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rama Doddipatla|AUTHOR Rama Doddipatla]]^^1^^, [[Norbert Braunschweiler|AUTHOR Norbert Braunschweiler]]^^1^^, [[Ranniery Maia|AUTHOR Ranniery Maia]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Toshiba Research Europe, UK; ^^2^^Universidade Federal de Santa Catarina, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3404–3408
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper presents a mechanism to perform speaker adaptation in speech synthesis based on deep neural networks (DNNs). The mechanism extracts speaker identification vectors, so-called d-vectors, from the training speakers and uses them jointly with the linguistic features to train a multi-speaker DNN-based text-to-speech synthesizer (DNN-TTS). The d-vectors are derived by applying principal component analysis (PCA) on the bottle-neck features of a speaker classifier network. At the adaptation stage, three variants are explored: (1) d-vectors calculated using data from the target speaker, or (2) d-vectors calculated as a weighted sum of d-vectors from training speakers, or (3) d-vectors calculated as an average of the above two approaches. The proposed method of unsupervised adaptation using the d-vector is compared with the commonly used i-vector based approach for speaker adaptation. Listening tests show that: (1) for speech quality, the d-vector based approach is significantly preferred over the i-vector based approach. All the d-vector variants perform similar for speech quality; (2) for speaker similarity, both d-vector and i-vector based adaptation were found to perform similar, except a small significant preference for the d-vector calculated as an average over the i-vector.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roger K. Moore|AUTHOR Roger K. Moore]], [[Ben Mitchinson|AUTHOR Ben Mitchinson]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3419–3420
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces MiRo — the world’s first commercial biomimetic robot — and describes how its vocal system was designed using a real-time parametric general-purpose mammalian vocal synthesiser tailored to the specific physical characteristics of the robot. MiRo’s capabilities will be demonstrated live during the hands-on interactive ‘Show & Tell’ session at INTERSPEECH-2017.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mónica Domínguez|AUTHOR Mónica Domínguez]]^^1^^, [[Mireia Farrús|AUTHOR Mireia Farrús]]^^1^^, [[Leo Wanner|AUTHOR Leo Wanner]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universitat Pompeu Fabra, Spain; ^^2^^ICREA, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3421–3422
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a demonstration of a stochastic prosody tool for enrichment of synthesized speech using SSML prosody tags applied over hierarchical thematicity spans in the context of a CTS application. The motivation for using hierarchical thematicity is exemplified, together with the capabilities of the module to generate a variety of SSML prosody tags within a controlled range of values depending on the input thematicity label.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Grůber|AUTHOR Martin Grůber]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]], [[Jakub Vít|AUTHOR Jakub Vít]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3423–3424
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper deals with a presentation of an experimental system (called WebSubDub) for creating a high-quality alternative audio track for TV broadcasting. The system is used to create subtitles for TV shows in such a format which allows to automatically generate an alternative audio track with multiple voices employing a specially adapted TTS system. This alternative audio track is intended for televiewers with slight hearing impairments, i.e. for a group of televiewers who encounter issues when perceiving the original audio track — especially dialogues with background music, background noise or emotional speech. The system was developed in cooperation with Czech television, the public service broadcaster in the Czech Republic.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Markéta Jůzová|AUTHOR Markéta Jůzová]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3425–3426
</span></p></div>
<div class="cpabstractcardabstract"><p>The presented paper is focused on the building of personalized text-to-speech (TTS) synthesis for people who are losing their voices due to fatal diseases. The special conditions of this issue make the process different from preparing professional synthetic voices for commercial TTS systems and make it also more difficult. The whole process is described in this paper and the first results of the personalized voice building are presented here as well.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Atish Shankar Ghone|AUTHOR Atish Shankar Ghone]]^^1^^, [[Rachana Nerpagar|AUTHOR Rachana Nerpagar]]^^1^^, [[Pranaw Kumar|AUTHOR Pranaw Kumar]]^^1^^, [[Arun Baby|AUTHOR Arun Baby]]^^2^^, [[Aswin Shanmugam|AUTHOR Aswin Shanmugam]]^^2^^, [[Sasikumar M.|AUTHOR Sasikumar M.]]^^1^^, [[Hema A. Murthy|AUTHOR Hema A. Murthy]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^C-DAC, India; ^^2^^IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3427–3428
</span></p></div>
<div class="cpabstractcardabstract"><p>With the development of high quality TTS systems, application area of synthetic speech is increasing rapidly. Beyond the communication aids for the visually impaired and vocally handicap, TTS voices are being used in various educational, telecommunication and multimedia applications. All around the world people are trying to build TTS voice for their regional languages. TTS voice building requires a number of steps to follow and involves use of multiple tools, which makes it time consuming, tedious and perplexing to a user. This paper describes a Toolkit developed for HMM-based TTS voice building that makes the process much easier and handy. The toolkit uses all required tools, viz. HTS, Festival, Festvox, Hybrid Segmentation Tool, etc. and handles each and every step starting from phone set creation, then prompt generation, hybrid segmentation, F0 range finding, voice building, and finally putting the built voice into Synthesis framework. Wherever possible it does parallel processing to reduce time. It saves manual effort and time to a large extent and enable a person to build TTS voice very easily. This toolkit is made available under Open Source license.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Reima Karhila|AUTHOR Reima Karhila]]^^1^^, [[Sari Ylinen|AUTHOR Sari Ylinen]]^^2^^, [[Seppo Enarvi|AUTHOR Seppo Enarvi]]^^1^^, [[Kalle Palomäki|AUTHOR Kalle Palomäki]]^^1^^, [[Aleksander Nikulin|AUTHOR Aleksander Nikulin]]^^1^^, [[Olli Rantula|AUTHOR Olli Rantula]]^^1^^, [[Vertti Viitanen|AUTHOR Vertti Viitanen]]^^1^^, [[Krupakar Dhinakaran|AUTHOR Krupakar Dhinakaran]]^^1^^, [[Anna-Riikka Smolander|AUTHOR Anna-Riikka Smolander]]^^2^^, [[Heini Kallio|AUTHOR Heini Kallio]]^^2^^, [[Katja Junttila|AUTHOR Katja Junttila]]^^2^^, [[Maria Uther|AUTHOR Maria Uther]]^^3^^, [[Perttu Hämäläinen|AUTHOR Perttu Hämäläinen]]^^1^^, [[Mikko Kurimo|AUTHOR Mikko Kurimo]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalto University, Finland; ^^2^^University of Helsinki, Finland; ^^3^^University of Winchester, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3429–3430
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce a digital game for children’s foreign-language learning that uses automatic speech recognition (ASR) for evaluating children’s utterances. Our first prototype focuses on the learning of English words and their pronunciation. The game connects to a network server, which handles the recognition and pronunciation grading of children’s foreign-language speech. The server is reusable for different applications. Given suitable acoustic models, it can be used for grading pronunciations in any language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Staffan Larsson|AUTHOR Staffan Larsson]]^^1^^, [[Alex Berman|AUTHOR Alex Berman]]^^2^^, [[Andreas Krona|AUTHOR Andreas Krona]]^^2^^, [[Fredrik Kronlid|AUTHOR Fredrik Kronlid]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Gothenburg, Sweden; ^^2^^Talkamatic, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3431–3432
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the integration of Amazon Alexa with the Talkamatic Dialogue Manager (TDM), and shows how flexible dialogue skills and rapid prototyping of dialogue apps can be brought to the Alexa platform.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Farhia Ahmed|AUTHOR Farhia Ahmed]]^^1^^, [[Pierrette Bouillon|AUTHOR Pierrette Bouillon]]^^2^^, [[Chelle Destefano|AUTHOR Chelle Destefano]]^^3^^, [[Johanna Gerlach|AUTHOR Johanna Gerlach]]^^2^^, [[Sonia Halimi|AUTHOR Sonia Halimi]]^^2^^, [[Angela Hooper|AUTHOR Angela Hooper]]^^4^^, [[Manny Rayner|AUTHOR Manny Rayner]]^^2^^, [[Hervé Spechbach|AUTHOR Hervé Spechbach]]^^5^^, [[Irene Strasly|AUTHOR Irene Strasly]]^^2^^, [[Nikos Tsourakis|AUTHOR Nikos Tsourakis]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Association Genevoise des Malentendants, Switzerland; ^^2^^Université de Genève, Switzerland; ^^3^^Gypsysnail Arts, Australia; ^^4^^NABS Interpreting Services, Australia; ^^5^^H^opitaux Universitaires de Genève, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3433–3434
</span></p></div>
<div class="cpabstractcardabstract"><p>We present BabelDr, a web-enabled spoken-input phraselator for medical domains, which has been developed at Geneva University in a collaboration between a human language technology group and a group at the University hospital. The current production version of the system translates French into Arabic, using exclusively rule-based methods, and has performed credibly in simulated triaging tests with standardised patients. We also present an experimental version which combines large-vocabulary recognition with the main rule-based recogniser; offline tests on unseen data suggest that the new architecture adds robustness while more than halving the 2-best semantic error rate. The experimental version translates from spoken English into spoken French and also two sign languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Frank Duckhorn|AUTHOR Frank Duckhorn]]^^1^^, [[Markus Huber|AUTHOR Markus Huber]]^^2^^, [[Werner Meyer|AUTHOR Werner Meyer]]^^3^^, [[Oliver Jokisch|AUTHOR Oliver Jokisch]]^^4^^, [[Constanze Tschöpe|AUTHOR Constanze Tschöpe]]^^1^^, [[Matthias Wolff|AUTHOR Matthias Wolff]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Fraunhofer IKTS, Germany; ^^2^^InnoTec21, Germany; ^^3^^Brandenburgische Technische Universität, Germany; ^^4^^Hochschule für Telekommunikation Leipzig, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3435–3436
</span></p></div>
<div class="cpabstractcardabstract"><p>With this paper we present an overview of an autarkic embedded cognitive user interface. It is realized in form of an integrated device able to communicate with the user over speech & gesture recognition, speech synthesis and a touch display. Semantic processing and cognitive behaviour control support intuitive interaction and help controlling arbitrary electronic devices. To ensure user privacy and to operate autonomously of network access all information processing is done on the device.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Genta Indra Winata|AUTHOR Genta Indra Winata]]^^1^^, [[Onno Kampman|AUTHOR Onno Kampman]]^^1^^, [[Yang Yang|AUTHOR Yang Yang]]^^1^^, [[Anik Dey|AUTHOR Anik Dey]]^^2^^, [[Pascale Fung|AUTHOR Pascale Fung]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HKUST, China; ^^2^^EMOS Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3437–3438
</span></p></div>
<div class="cpabstractcardabstract"><p>Nora is a new dialog system that mimics a conversation with a psychologist by screening for stress, anxiety, and depression. She understands, empathizes, and adapts to users using emotional intelligence modules trained via statistical modelling such as Convolutional Neural Networks. These modules also enable her to personalize the content of each conversation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hassan Alam|AUTHOR Hassan Alam]], [[Aman Kumar|AUTHOR Aman Kumar]], [[Manan Vyas|AUTHOR Manan Vyas]], [[Tina Werner|AUTHOR Tina Werner]], [[Rachmat Hartono|AUTHOR Rachmat Hartono]]
</p><p class="cpabstractcardaffiliationlist">BCL Technologies, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3439–3440
</span></p></div>
<div class="cpabstractcardabstract"><p>In this proof-of-concept study we build a tool that modifies the grammar and the dictionary of an Automatic Speech Recognition (ASR) engine. We evaluated our tool using Amazon’s Alexa ASR engine. The experiments show that with our grammar and dictionary modification algorithms in the military domain, the accuracy of the modified ASR went up significantly — from 20/100 correct to 80/100 correct.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kathleen Currie Hall|AUTHOR Kathleen Currie Hall]], [[Scott Mackie|AUTHOR Scott Mackie]], [[Michael Fry|AUTHOR Michael Fry]], [[Oksana Tkachman|AUTHOR Oksana Tkachman]]
</p><p class="cpabstractcardaffiliationlist">University of British Columbia, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2083–2087
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a new resource for building phonetically transcribed corpora of signed languages. The free, open-source software tool, SLPAnnotator, is designed to facilitate the transcription of hand configurations using a slightly modified version of the Sign Language Phonetic Annotation (SLPA) system ([1], [2], [3], [4]; see also [5]).
While the SLPA system is extremely phonetically detailed, it can be seen as cumbersome and, perhaps, harder for humans to use and interpret than other transcription systems (e.g. Prosodic Model Handshape Coding, [6]). SLPAnnotator is designed to bridge the gap between such systems by automating some of the transcription process, providing users with informative references about possible configurations as they are coding, giving continuously updatable access to a visual model of the transcribed handshape, and allowing users to verify that transcribed handshapes are both phonologically and anatomically plausible. Finally, SLPAnnotator is designed to interface with other analysis tools, such as Phonological CorpusTools ([7], [8]), to allow for subsequent phonological analysis of the resulting sign language corpora.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Iris-Corinna Schwarz|AUTHOR Iris-Corinna Schwarz]]^^1^^, [[Noor Botros|AUTHOR Noor Botros]]^^2^^, [[Alekzandra Lord|AUTHOR Alekzandra Lord]]^^2^^, [[Amelie Marcusson|AUTHOR Amelie Marcusson]]^^2^^, [[Henrik Tidelius|AUTHOR Henrik Tidelius]]^^2^^, [[Ellen Marklund|AUTHOR Ellen Marklund]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stockholm University, Sweden; ^^2^^Karolinska Institute, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2088–2092
</span></p></div>
<div class="cpabstractcardabstract"><p>The Language Environment Analysis system LENA is used to capture day-long recordings of children’s natural audio environment. The system performs automated segmentation of the recordings and provides estimates for various measures. One of those measures is Adult Word Count (AWC), an approximation of the number of words spoken by adults in close proximity to the child. The LENA system was developed for and trained on American English, but it has also been evaluated on its performance when applied to Spanish, Mandarin and French. The present study is the first evaluation of the LENA system applied to Swedish, and focuses on the AWC estimate. Twelve five-minute segments were selected at random from each of four day-long recordings of 30-month-old children. Each of these 48 segments was transcribed by two transcribers, and both number of words and number of vowels were calculated (inter-transcriber reliability for words: r = .95, vowels: r = .93). Both counts correlated with the LENA system’s AWC estimate for the same segments (words: r = .67, vowels: r = .66). The reliability of the AWC as estimated by the LENA system when applied to Swedish is therefore comparable to its reliability for Spanish, Mandarin and French.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marisa Casillas|AUTHOR Marisa Casillas]]^^1^^, [[Andrei Amatuni|AUTHOR Andrei Amatuni]]^^2^^, [[Amanda Seidl|AUTHOR Amanda Seidl]]^^3^^, [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]]^^4^^, [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]^^5^^, [[Elika Bergelson|AUTHOR Elika Bergelson]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MPI for Psycholinguistics, The Netherlands; ^^2^^Duke University, USA; ^^3^^Purdue University, USA; ^^4^^University of Manitoba, Canada; ^^5^^University of California at Merced, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2093–2097
</span></p></div>
<div class="cpabstractcardabstract"><p>Child-directed speech is argued to facilitate language development, and is found cross-linguistically and cross-culturally to varying degrees. However, previous research has generally focused on short samples of child-caregiver interaction, often in the lab or with experimenters present. We test the generalizability of this phenomenon with an initial descriptive analysis of the speech heard by young children in a large, unique collection of naturalistic, daylong home recordings. Trained annotators coded automatically-detected adult speech ‘utterances’ from 61 homes across 4 North American cities, gathered from children (age 2–24 months) wearing audio recorders during a typical day. Coders marked the speaker gender (male/female) and intended addressee (child/adult), yielding 10,886 addressee and gender tags from 2,523 minutes of audio (cf. HB-CHAAC Interspeech ComParE challenge; Schuller et al., in press). Automated speaker-diarization (LENA) incorrectly gender-tagged 30% of male adult utterances, compared to manually-coded consensus. Furthermore, we find effects of SES and gender on child-directed and overall speech, increasing child-directed speech with child age, and interactions of speaker gender, child gender, and child age: female caretakers increased their child-directed speech more with age than male caretakers did, but only for male infants. Implications for language acquisition and existing classification algorithms are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marisa Casillas|AUTHOR Marisa Casillas]]^^1^^, [[Elika Bergelson|AUTHOR Elika Bergelson]]^^2^^, [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]^^3^^, [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]^^4^^, [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]]^^5^^, [[Mark VanDam|AUTHOR Mark VanDam]]^^6^^, [[Han Sloetjes|AUTHOR Han Sloetjes]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MPI for Psycholinguistics, The Netherlands; ^^2^^Duke University, USA; ^^3^^University of California at Merced, USA; ^^4^^LSCP (UMR 8554), France; ^^5^^University of Manitoba, Canada; ^^6^^Washington State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2098–2102
</span></p></div>
<div class="cpabstractcardabstract"><p>Interoperable annotation formats are fundamental to the utility, expansion, and sustainability of collective data repositories. In language development research, shared annotation schemes have been critical to facilitating the transition from raw acoustic data to searchable, structured corpora. Current schemes typically require comprehensive and manual annotation of utterance boundaries and orthographic speech content, with an additional, optional range of tags of interest. These schemes have been enormously successful for datasets on the scale of dozens of recording hours but are untenable for long-format recording corpora, which routinely contain hundreds to thousands of audio hours. Long-format corpora would benefit greatly from (semi-)automated analyses, both on the earliest steps of annotation — voice activity detection, utterance segmentation, and speaker diarization — as well as later steps — e.g., classification-based codes such as child-vs-adult-directed speech, and speech recognition to produce phonetic/orthographic representations. We present an annotation workflow specifically designed for long-format corpora which can be tailored by individual researchers and which interfaces with the current dominant scheme for short-format recordings. The workflow allows semi-automated annotation and analyses at higher linguistic levels. We give one example of how the workflow has been successfully implemented in a large cross-database project.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christina Bergmann|AUTHOR Christina Bergmann]], [[Sho Tsuji|AUTHOR Sho Tsuji]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]
</p><p class="cpabstractcardaffiliationlist">LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2103–2107
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent work has made available a number of standardized meta-analyses bearing on various aspects of infant language processing. We utilize data from two such meta-analyses (discrimination of vowel contrasts and word segmentation, i.e., recognition of word forms extracted from running speech) to assess whether the published body of empirical evidence supports a bottom-up versus a top-down theory of early phonological development by leveling the power of results from thousands of infants. We predicted that if infants can rely purely on auditory experience to develop their phonological categories, then vowel discrimination and word segmentation should develop in parallel, with the latter being potentially lagged compared to the former. However, if infants crucially rely on word form information to build their phonological categories, then development at the word level must precede the acquisition of native sound categories. Our results do not support the latter prediction. We discuss potential implications and limitations, most saliently that word forms are only one top-down level proposed to affect phonological development, with other proposals suggesting that top-down pressures emerge from lexical (i.e., word-meaning pairs) development. This investigation also highlights general procedures by which standardized meta-analyses may be reused to answer theoretical questions spanning across phenomena.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sho Tsuji|AUTHOR Sho Tsuji]]^^1^^, [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Pennsylvania, USA; ^^2^^LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2108–2112
</span></p></div>
<div class="cpabstractcardabstract"><p>A key research question in early language acquisition concerns the development of infants’ ability to discriminate sounds, and the factors structuring discrimination abilities. Vowel discrimination, in particular, has been studied using a range of tasks, experimental paradigms, and stimuli over the past 40 years, work recently compiled in a meta-analysis. We use this meta-analysis to assess whether there is statistical evidence for the following factors affecting effect sizes across studies: (1) the order in which the two vowel stimuli are presented; and (2) the distance between the vowels, measured acoustically in terms of spectral and quantity differences. The magnitude of effect sizes analysis revealed order effects consistent with the Natural Referent Vowels framework, with greater effect sizes when the second vowel was more peripheral than the first. Additionally, we find that spectral acoustic distinctiveness is a consistent predictor of studies’ effect sizes, while temporal distinctiveness did not predict effect size magnitude. None of these factors interacted significantly with age. We discuss implications of these results for language acquisition, and more generally developmental psychology, research.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pavlos Papadopoulos|AUTHOR Pavlos Papadopoulos]]^^1^^, [[Ruchir Travadi|AUTHOR Ruchir Travadi]]^^1^^, [[Colin Vaz|AUTHOR Colin Vaz]]^^1^^, [[Nikolaos Malandrakis|AUTHOR Nikolaos Malandrakis]]^^1^^, [[Ulf Hermjakob|AUTHOR Ulf Hermjakob]]^^1^^, [[Nima Pourdamghani|AUTHOR Nima Pourdamghani]]^^1^^, [[Michael Pust|AUTHOR Michael Pust]]^^1^^, [[Boliang Zhang|AUTHOR Boliang Zhang]]^^2^^, [[Xiaoman Pan|AUTHOR Xiaoman Pan]]^^2^^, [[Di Lu|AUTHOR Di Lu]]^^2^^, [[Ying Lin|AUTHOR Ying Lin]]^^2^^, [[Ondřej Glembek|AUTHOR Ondřej Glembek]]^^3^^, [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]]^^3^^, [[Martin Karafiát|AUTHOR Martin Karafiát]]^^3^^, [[Lukáš Burget|AUTHOR Lukáš Burget]]^^3^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^4^^, [[Heng Ji|AUTHOR Heng Ji]]^^2^^, [[Jonathan May|AUTHOR Jonathan May]]^^1^^, [[Kevin Knight|AUTHOR Kevin Knight]]^^1^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Rensselaer Polytechnic Institute, USA; ^^3^^Brno University of Technology, Czech Republic; ^^4^^University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2053–2057
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe the system designed and developed by team ELISA for DARPA’s LORELEI (Low Resource Languages for Emergent Incidents) pilot speech evaluation. The goal of the LORELEI program is to guide rapid resource deployment for humanitarian relief (e.g. for natural disasters), with a focus on “low-resource” language locations, where the cost of developing technologies for automated human language tools can be prohibitive both in monetary terms and timewise. In this phase of the program, the speech evaluation consisted of three separate tasks: detecting presence of an incident, classifying incident type, and classifying incident type along with identifying the location where it occurs. The performance metric was area under curve of precision-recall curves. Team ELISA competed against five other teams and won all the subtasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Péter Mihajlik|AUTHOR Péter Mihajlik]]^^1^^, [[Lili Szabó|AUTHOR Lili Szabó]]^^2^^, [[Balázs Tarján|AUTHOR Balázs Tarján]]^^1^^, [[András Balog|AUTHOR András Balog]]^^2^^, [[Krisztina Rábai|AUTHOR Krisztina Rábai]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BME, Hungary; ^^2^^THINKTech Research Center, Hungary; ^^3^^University of Hradec Králové, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2058–2062
</span></p></div>
<div class="cpabstractcardabstract"><p>Latin had served as an official language across Europe from the Roman Empire until the 19^^th^^ century. As a result, vast amount of Latin language historical documents (charters, account books) survived from the Middle Ages, waiting for recovery. In the digitization process, tremendous human efforts are needed for the transliteration of textual content, as the applicability of optical character recognition techniques is often limited. In the era of Digital Humanities our aim is to accelerate the transcription by using automatic speech recognition technology. We introduce the challenges and our initial results in developing a real-time, medieval Latin language LVCSR dictation system for East-Central Europe (ECE). In this region, the pronunciation and usage of medieval Latin is considered to be roughly uniform. At this phase of the research, therefore, Latin speech data was not collected for acoustic model training but only for test purposes — from a selection of ECE countries. Our experimental results, however, suggest that ECE Latin varies significantly depending on the primary national language on both acoustic-phonetic and grammatical levels. On the other hand, unexpectedly low word error rates are obtained for several speakers whose native language is completely uncovered by the applied training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[C.I. Watson|AUTHOR C.I. Watson]]^^1^^, [[P.J. Keegan|AUTHOR P.J. Keegan]]^^1^^, [[M.A. Maclagan|AUTHOR M.A. Maclagan]]^^2^^, [[R. Harlow|AUTHOR R. Harlow]]^^3^^, [[J. King|AUTHOR J. King]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Auckland, New Zealand; ^^2^^University of Canterbury, New Zealand; ^^3^^University of Waikato, New Zealand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2063–2067
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper outlines the motivation and development of a pronunciation aid ( MPAi) for the Māori language, the language of the indigenous people of New Zealand. Māori is threatened and after a break in transmission the language is currently undergoing revitalization. The data for the aid has come from a corpus of 60 speakers (men and women). The language aid allows users to model their speech against exemplars from young speakers or older speakers of Māori. This is important, because of the status of the elders in the Māori speaking community, but it also recognizes that Māori is undergoing substantial vowel change. The pronunciation aid gives feedback on vowel production via formant analysis, and selected words via speech recognition. The evaluation of the aid by 22 language teachers is presented and the resulting changes are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Siyuan Feng|AUTHOR Siyuan Feng]], [[Tan Lee|AUTHOR Tan Lee]]
</p><p class="cpabstractcardaffiliationlist">Chinese University of Hong Kong, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2068–2072
</span></p></div>
<div class="cpabstractcardabstract"><p>Unsupervised acoustic modeling is an important and challenging problem in spoken language technology development for low-resource languages. It aims at automatically learning a set of speech units from un-transcribed data. These learned units are expected to be related to fundamental linguistic units that constitute the concerned language. Formulated as a clustering problem, unsupervised acoustic modeling methods are often evaluated in terms of average purity or similar types of performance measures. They do not provide detailed insights on the fitness of individual learned units and the relation between them. This paper presents an investigation on the linguistic relevance of learned speech units based on Kullback-Leibler (KL) divergence. A symmetric KL divergence metric is used to measure the distance between each pair of learned unit and ground-truth phoneme of the target language. Experimental analysis on a multilingual database shows that KL divergence is consistent with purity in evaluating clustering results. The deviation between a learned unit and its closest ground-truth phoneme is comparable to the inherent variability of the phoneme. The learned speech units have a good coverage of linguistically defined phonemes. However, there are certain phonemes that can not be covered, for example, the retroflex final /er/ in Mandarin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amit Das|AUTHOR Amit Das]]^^1^^, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^, [[Karel Veselý|AUTHOR Karel Veselý]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2073–2077
</span></p></div>
<div class="cpabstractcardabstract"><p>We examine a scenario where we have no access to native transcribers in the target language. This is typical of language communities that are under-resourced. However, turkers (online crowd workers) available in online marketplaces can serve as valuable alternative resources for providing transcripts in the target language. We assume that the turkers neither speak nor have any familiarity with the target language. Thus, they are unable to distinguish all phone pairs in the target language; their transcripts therefore specify, at best, a probability distribution called a probabilistic transcript (PT). Standard deep neural network (DNN) training using PTs do not necessarily improve error rates. Previously reported results have demonstrated some success by adopting the multi-task learning (MTL) approach. In this study, we report further improvements by introducing a deep auto-encoder based MTL. This method leverages large amounts of untranscribed data in the target language in addition to the PTs obtained from turkers. Furthermore, to encourage transfer learning in the feature space, we also examine the effect of using monophones from transcripts in well-resourced languages. We report consistent improvement in phone error rates (PER) for Swahili, Amharic, Dinka, and Mandarin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Gutkin|AUTHOR Alexander Gutkin]]^^1^^, [[Richard Sproat|AUTHOR Richard Sproat]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, UK; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2078–2082
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce phylogenetic and areal language features to the domain of multilingual text-to-speech synthesis. Intuitively, enriching the existing universal phonetic features with cross-lingual shared representations should benefit the multilingual acoustic models and help to address issues like data scarcity for low-resource languages. We investigate these representations using the acoustic models based on long short-term memory recurrent neural networks. Subjective evaluations conducted on eight languages from diverse language families show that sometimes phylogenetic and areal representations lead to significant multilingual synthesis quality improvements. To help better leverage these novel features, improving the baseline phonetic representation may be necessary.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Neasa Ní Chiaráin|AUTHOR Neasa Ní Chiaráin]], [[Christoph Wendler|AUTHOR Christoph Wendler]], [[Harald Berthelsen|AUTHOR Harald Berthelsen]], [[Andy Murphy|AUTHOR Andy Murphy]], [[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2113–2117
</span></p></div>
<div class="cpabstractcardabstract"><p>The processes of language demise take hold when a language ceases to belong to the mainstream of life’s activities. Digital communication technology increasingly pervades all aspects of modern life. Languages not digitally ‘available’ are ever more marginalised, whereas a digital presence often yields unexpected opportunities to integrate the language into the mainstream. The ABAIR initiative embraces three central aspects of speech technology development for Irish (Gaelic): the provision of technology-oriented linguistic-phonetic resources; the building and perfecting of core speech technologies; and the development of technology applications, which exploit both the technologies and the linguistic resources. The latter enable the public, learners, and those with disabilities to integrate Irish into their day-to-day usage. This paper outlines some of the specific linguistic and sociolinguistic challenges and the approaches adopted to address them. Although machine-learning approaches are helping to speed up the process of technology provision, the ABAIR experience highlights how phonetic-linguistic resources are also crucial to the development process. For the endangered language, linguistic resources are central to many applications that impact on language usage. The sociolinguistic context and the needs of potential end users should be central considerations in setting research priorities and deciding on methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Basil Abraham|AUTHOR Basil Abraham]], [[Tejaswi Seeram|AUTHOR Tejaswi Seeram]], [[S. Umesh|AUTHOR S. Umesh]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2158–2162
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep neural networks (DNN) require large amount of training data to build robust acoustic models for speech recognition tasks. Our work is intended in improving the low-resource language acoustic model to reach a performance comparable to that of a high-resource scenario with the help of data/model parameters from other high-resource languages. We explore transfer learning and distillation methods, where a complex high resource model guides or supervises the training of low resource model. The techniques include (i) multi-lingual framework of borrowing data from high-resource language while training the low-resource acoustic model. The KL divergence based constraints are added to make the model biased towards low-resource language, (ii) distilling knowledge from the complex high-resource model to improve the low-resource acoustic model. The experiments were performed on three Indian languages namely Hindi, Tamil and Kannada. All the techniques gave improved performance and the multi-lingual framework with KL divergence regularization giving the best results. In all the three languages a performance close to or better than high-resource scenario was obtained.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Inga Rún Helgadóttir|AUTHOR Inga Rún Helgadóttir]], [[Róbert Kjaran|AUTHOR Róbert Kjaran]], [[Anna Björk Nikulásdóttir|AUTHOR Anna Björk Nikulásdóttir]], [[Jón Guðnason|AUTHOR Jón Guðnason]]
</p><p class="cpabstractcardaffiliationlist">Reykjavik University, Iceland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2163–2167
</span></p></div>
<div class="cpabstractcardabstract"><p>Acoustic data acquisition for under-resourced languages is an important and challenging task. In the Icelandic parliament, Althingi, all performed speeches are transcribed manually and published as text on Althingi’s web page. To reduce the manual work involved, an automatic speech recognition system is being developed for Althingi. In this paper the development of a speech corpus suitable for the training of a parliamentary ASR system is described. Text and audio data of manually transcribed speeches were processed to build an aligned, segmented corpus, whereby language specific tasks had to be developed specially for Icelandic. The resulting corpus of 542 hours of speech is freely available on http://www.malfong.is. First experiments with an ASR system trained on the Althingi corpus have been conducted, showing promising results. Word error rate of 16.38% was obtained using time-delay deep neural network (TD-DNN) and 14.76% was obtained using long-short term memory recurrent neural network (LSTM-RNN) architecture. The Althingi corpus is to our knowledge the largest speech corpus currently available in Icelandic. The corpus as well as the developed methods for corpus creation constitute a valuable resource for further developments within Icelandic language technology.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanel Alumäe|AUTHOR Tanel Alumäe]], [[Andrus Paats|AUTHOR Andrus Paats]], [[Ivo Fridolin|AUTHOR Ivo Fridolin]], [[Einar Meister|AUTHOR Einar Meister]]
</p><p class="cpabstractcardaffiliationlist">Tallinn University of Technology, Estonia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2168–2172
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech recognition has become increasingly popular in radiology reporting in the last decade. However, developing a speech recognition system for a new language in a highly specific domain requires a lot of resources, expert knowledge and skills. Therefore, commercial vendors do not offer ready-made radiology speech recognition systems for less-resourced languages.
This paper describes the implementation of a radiology speech recognition system for Estonian, a language with less than one million native speakers. The system was developed in partnership with a hospital that provided a corpus of written reports for language modeling purposes. Rewrite rules for pre-processing training texts and postprocessing recognition results were created manually based on a small parallel corpus created by the hospital’s radiologists, using the Thrax toolkit. Deep neural network based acoustic models were trained based on 216 hours of out-of-domain data and adapted on 14 hours of spoken radiology data, using the Kaldi toolkit. The current word error rate of the system is 5.4%. The system is in active use in real clinical environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jón Guðnason|AUTHOR Jón Guðnason]], [[Matthías Pétursson|AUTHOR Matthías Pétursson]], [[Róbert Kjaran|AUTHOR Róbert Kjaran]], [[Simon Klüpfel|AUTHOR Simon Klüpfel]], [[Anna Björk Nikulásdóttir|AUTHOR Anna Björk Nikulásdóttir]]
</p><p class="cpabstractcardaffiliationlist">Reykjavik University, Iceland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2173–2177
</span></p></div>
<div class="cpabstractcardabstract"><p>Building acoustic databases for speech recognition is very important for under-resourced languages. To build a speech recognition system, a large amount of speech data from a considerable number of participants needs to be collected. Eyra is a toolkit that can be used to gather acoustic data from a large number of participants in a relatively straight forward fashion. Predetermined prompts are downloaded onto a client, typically run on a smartphone, where the participant reads them aloud so that the recording and its corresponding prompt can be uploaded. This paper presents the Eyra toolkit, its quality control routines and annotation mechanism. The quality control relies on a forced-alignment module, which gives feedback to the participant, and an annotation module which allows data collectors to rate the read prompts after they are uploaded to the system. The paper presents an analysis of the performance of the quality control and describes two data collections for Icelandic and Javanese.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel van Niekerk|AUTHOR Daniel van Niekerk]]^^1^^, [[Charl van Heerden|AUTHOR Charl van Heerden]]^^1^^, [[Marelie Davel|AUTHOR Marelie Davel]]^^1^^, [[Neil Kleynhans|AUTHOR Neil Kleynhans]]^^1^^, [[Oddur Kjartansson|AUTHOR Oddur Kjartansson]]^^2^^, [[Martin Jansche|AUTHOR Martin Jansche]]^^2^^, [[Linne Ha|AUTHOR Linne Ha]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^North-West University, South Africa; ^^2^^Google, UK; ^^3^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2178–2182
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the development of text-to-speech corpora for four South African languages. The approach followed investigated the possibility of using low-cost methods including informal recording environments and untrained volunteer speakers. This objective and the additional future goal of expanding the corpus to increase coverage of South Africa’s 11 official languages necessitated experimenting with multi-speaker and code-switched data. The process and relevant observations are detailed throughout. The latest version of the corpora are available for download under an open-source licence and will likely see further development and refinement in future.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Gutkin|AUTHOR Alexander Gutkin]]
</p><p class="cpabstractcardaffiliationlist">Google, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2183–2187
</span></p></div>
<div class="cpabstractcardabstract"><p>Acquiring data for text-to-speech (TTS) systems is expensive. This typically requires large amounts of training data, which is not available for low-resourced languages. Sometimes small amounts of data can be collected, while often no data may be available at all. This paper presents an acoustic modeling approach utilizing long short-term memory (LSTM) recurrent neural networks (RNN) aimed at partially addressing the language data scarcity problem. Unlike speaker-adaptation systems that aim to preserve speaker similarity across languages, the salient feature of the proposed approach is that, once constructed, the resulting system does not need retraining to cope with the previously unseen languages. This is due to language and speaker-agnostic model topology and universal linguistic feature set. Experiments on twelve languages show that the system is able to produce intelligible and sometimes natural output when a language is unseen. We also show that, when small amounts of training data are available, pooling the data sometimes improves the overall intelligibility and naturalness. Finally, we show that sometimes having a multilingual system with no prior exposure to the language is better than building single-speaker system from small amounts of data for that language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joseph Mendelson|AUTHOR Joseph Mendelson]]^^1^^, [[Pilar Oplustil|AUTHOR Pilar Oplustil]]^^2^^, [[Oliver Watts|AUTHOR Oliver Watts]]^^2^^, [[Simon King|AUTHOR Simon King]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2188–2192
</span></p></div>
<div class="cpabstractcardabstract"><p>When a text-to-speech (TTS) system is required to speak world news, a large fraction of the words to be spoken will be proper names originating in a wide variety of languages. Phonetization of these names based on target language letter-to-sound rules will typically be inadequate. This is detrimental not only during synthesis, when inappropriate phone sequences are produced, but also during training, if the system is trained on data from the same domain. This is because poor phonetization during forced alignment based on hidden Markov models can pollute the whole model set, resulting in degraded alignment even of normal target-language words. This paper presents four techniques designed to address this issue in the context of a Swahili TTS system: automatic transcription of proper names based on a lexicon from a better-resourced language; the addition of a parallel phone set and special part-of-speech tag exclusively dedicated to proper names; a manually-crafted phone mapping which allows substitutions for potentially more accurate phones in proper names during forced alignment; the addition in proper names of a grapheme-derived frame-level feature, supplementing the standard phonetic inputs to the acoustic model. We present results from objective and subjective evaluations of systems built using these four techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Armin Saeb|AUTHOR Armin Saeb]]^^1^^, [[Raghav Menon|AUTHOR Raghav Menon]]^^1^^, [[Hugh Cameron|AUTHOR Hugh Cameron]]^^2^^, [[William Kibira|AUTHOR William Kibira]]^^2^^, [[John Quinn|AUTHOR John Quinn]]^^2^^, [[Thomas Niesler|AUTHOR Thomas Niesler]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stellenbosch University, South Africa; ^^2^^UN Global Pulse, Uganda</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2118–2122
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a radio browsing system developed on a very small corpus of annotated speech by using semi-supervised training of multilingual DNN/HMM acoustic models. This system is intended to support relief and developmental programmes by the United Nations (UN) in parts of Africa where the spoken languages are extremely under resourced. We assume the availability of 12 minutes of annotated speech in the target language, and show how this can best be used to develop an acoustic model. First, a multilingual DNN/HMM is trained using Acholi as the target language and Luganda, Ugandan English and South African English as source languages. We show that the lowest word error rates are achieved by using this model to label further untranscribed target language data and then developing SGMM acoustic model from the extended dataset. The performance of an ASR system trained in this way is sufficient for keyword detection that yields useful and actionable near real-time information to developmental organisations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nikolaos Malandrakis|AUTHOR Nikolaos Malandrakis]]^^1^^, [[Ondřej Glembek|AUTHOR Ondřej Glembek]]^^2^^, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Brno University of Technology, Czech Republic</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2123–2127
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the first evaluation framework for the extraction of Situation Frames — structures describing humanitarian assistance needs — from non-English speech audio, conducted for the DARPA LORELEI (Low Resource Languages for Emergent Incidents) program. Participants in LORELEI had to process audio from a variety of sources, in non-English languages, and extract the information required to populate Situation Frames describing whether any need is mentioned, the type of need present and where the need exists. The evaluation was conducted over a period of 10 days and attracted submissions from 6 teams, each team spanning multiple organizations. Performance was evaluated using precision-recall curves. The results are encouraging, with most teams showing some capability to detect the type of situation discussed, but more work will be required to connect needs to specific locations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniil Kocharov|AUTHOR Daniil Kocharov]], [[Tatiana Kachkovskaia|AUTHOR Tatiana Kachkovskaia]], [[Pavel Skrelin|AUTHOR Pavel Skrelin]]
</p><p class="cpabstractcardaffiliationlist">Saint Petersburg State University, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2128–2132
</span></p></div>
<div class="cpabstractcardabstract"><p>Elicitation of information structure from speech is a crucial step in automatic speech understanding. In terms of both production and perception, we consider intonational phrase to be the basic meaningful unit of information structure in speech. The current paper presents a method of detecting these units in speech by processing both the recorded speech and its textual representation. Using syntactic information, we split text into small groups of words closely connected with each other. Assuming that intonational phrases are built from these small groups, we use acoustic information to reveal their actual boundaries. The procedure was initially developed for processing Russian speech, and we have achieved the best published results for this language with F1 equal to 0.91. We assume that it may be adapted for other languages that have some amount of read speech resources, including under-resourced languages. For comparison we have evaluated it on English material (Boston University Radio Speech Corpus). Our results, F1 of 0.76, are comparable with the top systems designed for English.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurabhchand Bhati|AUTHOR Saurabhchand Bhati]], [[Shekhar Nayak|AUTHOR Shekhar Nayak]], [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]]
</p><p class="cpabstractcardaffiliationlist">IIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2133–2137
</span></p></div>
<div class="cpabstractcardabstract"><p>Zero resource speech processing refers to a scenario where no or minimal transcribed data is available. In this paper, we propose a three-step unsupervised approach to zero resource speech processing, which does not require any other information/dataset. In the first step, we segment the speech signal into phoneme-like units, resulting in a large number of varying length segments. The second step involves clustering the varying-length segments into a finite number of clusters so that each segment can be labeled with a cluster index. The unsupervised transcriptions, thus obtained, can be thought of as a sequence of virtual phone labels. In the third step, a deep neural network classifier is trained to map the feature vectors extracted from the signal to its corresponding virtual phone label. The virtual phone posteriors extracted from the DNN are used as features in the zero resource speech processing. The effectiveness of the proposed approach is evaluated on both ABX and spoken term discovery tasks (STD) using spontaneous American English and Tsonga language datasets, provided as part of zero resource 2015 challenge. It is observed that the proposed system outperforms baselines, supplied along the datasets, in both the tasks without any task specific modifications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elodie Gauthier|AUTHOR Elodie Gauthier]]^^1^^, [[Laurent Besacier|AUTHOR Laurent Besacier]]^^1^^, [[Sylvie Voisin|AUTHOR Sylvie Voisin]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIG (UMR 5217), France; ^^2^^DDL (UMR 5596), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2138–2142
</span></p></div>
<div class="cpabstractcardabstract"><p>Growing digital archives and improving algorithms for automatic analysis of text and speech create new research opportunities for fundamental research in phonetics. Such empirical approaches allow statistical evaluation of a much larger set of hypothesis about phonetic variation and its conditioning factors (among them geographical / dialectal variants). This paper illustrates this vision and proposes to challenge automatic methods for the analysis of a not easily observable phenomenon: vowel length contrast. We focus on Wolof, an under-resourced language from Sub-Saharan Africa. In particular, we propose multiple features to make a fine evaluation of the degree of length contrast under different factors such as: read vs semi-spontaneous speech; standard vs dialectal Wolof. Our measures made fully automatically on more than 20k vowel tokens show that our proposed features can highlight different degrees of contrast for each vowel considered. We notably show that contrast is weaker in semi-spontaneous speech and in a non standard semi-spontaneous dialect.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thomas Glarner|AUTHOR Thomas Glarner]]^^1^^, [[Benedikt Boenninghoff|AUTHOR Benedikt Boenninghoff]]^^2^^, [[Oliver Walter|AUTHOR Oliver Walter]]^^1^^, [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Paderborn, Germany; ^^2^^Ruhr-Universität Bochum, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2143–2147
</span></p></div>
<div class="cpabstractcardabstract"><p>In this contribution we show how to exploit text data to support word discovery from audio input in an underresourced target language. Given audio, of which a certain amount is transcribed at the word level, and additional unrelated text data, the approach is able to learn a probabilistic mapping from acoustic units to characters and utilize it to segment the audio data into words without the need of a pronunciation dictionary. This is achieved by three components: an unsupervised acoustic unit discovery system, a supervisedly trained acoustic unit-to-grapheme converter, and a word discovery system, which is initialized with a language model trained on the text data. Experiments for multiple setups show that the initialization of the language model with text data improves the word segmentation performance by a large margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaodan Zhuang|AUTHOR Xiaodan Zhuang]], [[Arnab Ghoshal|AUTHOR Arnab Ghoshal]], [[Antti-Veikko Rosti|AUTHOR Antti-Veikko Rosti]], [[Matthias Paulik|AUTHOR Matthias Paulik]], [[Daben Liu|AUTHOR Daben Liu]]
</p><p class="cpabstractcardaffiliationlist">Apple, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2148–2152
</span></p></div>
<div class="cpabstractcardabstract"><p>The success of deep neural network (DNN) acoustic models is partly owed to large amounts of training data available for different applications. This work investigates ways to improve DNN acoustic models for Bluetooth narrowband mobile applications when relatively small amounts of in-domain training data are available. To address the challenge of limited in-domain data, we use cross-bandwidth and cross-lingual transfer learning methods to leverage knowledge from other domains with more training data (different bandwidth and/or languages). Specifically, narrowband DNNs in a target language are initialized using the weights of DNNs trained on bandlimited wide-band data in the same language or those trained on a different (resource-rich) language. We investigate multiple recipes involving such methods with different data resources. For all languages in our experiments, these recipes achieve up to 45% relative WER reduction, compared to training solely on the Bluetooth narrowband data in the target language. Furthermore, these recipes are very beneficial even when over two hundred hours of manually transcribed in-domain data is available, and we can achieve better accuracy than the baselines with as little as 20 hours of in-domain data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Basil Abraham|AUTHOR Basil Abraham]], [[S. Umesh|AUTHOR S. Umesh]], [[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2153–2157
</span></p></div>
<div class="cpabstractcardabstract"><p>Using articulatory features for speech recognition improves the performance of low-resource languages. One way to obtain articulatory features is by using an articulatory classifier (pseudo-articulatory features). The performance of the articulatory features depends on the efficacy of this classifier. But, training such a robust classifier for a low-resource language is constrained due to the limited amount of training data. We can overcome this by training the articulatory classifier using a high resource language. This classifier can then be used to generate articulatory features for the low-resource language. However, this technique fails when high and low-resource languages have mismatches in their environmental conditions. In this paper, we address both the aforementioned problems by jointly estimating the articulatory features and low-resource acoustic model. The experiments were performed on two low-resource Indian languages namely, Hindi and Tamil. English was used as the high-resource language. A relative improvement of 23% and 10% were obtained for Hindi and Tamil, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rong Tong|AUTHOR Rong Tong]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]], [[Bin Ma|AUTHOR Bin Ma]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2193–2197
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech technology for children is more challenging than for adults, because there is a lack of children’s speech corpora. Moreover, there is higher heterogeneity in children’s speech due to variability in anatomy across age and gender, larger variance in speaking rate and vocal effort, and immature command of word usage, grammar, and linguistic structure. Speech productions from Singapore children possess even more variability due to the multilingual environment in the city-state, causing inter-influences from Chinese languages (e.g., Hokkien and Mandarin), English dialects (e.g., American and British), and Indian languages (e.g., Hindi and Tamil). In this paper, we show that acoustic modeling of children’s speech can leverage on a larger set of adult data. We compare two data augmentation approaches for children’s acoustic modeling. The first approach disregards the child and adult categories and consolidates the two datasets together as one entire set. The second approach is multi-task learning: during training the acoustic characteristics of adults and children are jointly learned through shared hidden layers of the deep neural network, yet they still retain their respective targets using two distinct softmax layers. We empirically show that the multi-task learning approach outperforms the baseline in both speech recognition and computer-assisted pronunciation training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elin Larsen|AUTHOR Elin Larsen]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]
</p><p class="cpabstractcardaffiliationlist">LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2198–2202
</span></p></div>
<div class="cpabstractcardabstract"><p>A range of computational approaches have been used to model the discovery of word forms from continuous speech by infants. Typically, these algorithms are evaluated with respect to the ideal ‘gold standard’ word segmentation and lexicon. These metrics assess how well an algorithm matches the adult state, but may not reflect the intermediate states of the child’s lexical development. We set up a new evaluation method based on the correlation between word frequency counts derived from the application of an algorithm onto a corpus of child-directed speech, and the proportion of infants knowing those words, according to parental reports. We evaluate a representative set of 4 algorithms, applied to transcriptions of the Brent corpus, which have been phonologized using either phonemes or syllables as basic units. Results show remarkable variation in the extent to which these 8 algorithm-unit combinations predicted infant vocabulary, with some of these predictions surpassing those derived from the adult gold standard segmentation. We argue that infant vocabulary prediction provides a useful complement to traditional evaluation; for example, the best predictor model was also one of the worst in terms of segmentation score, and there was no clear relationship between token or boundary F-score and vocabulary prediction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mats Wirén|AUTHOR Mats Wirén]], [[Kristina N. Björkenstam|AUTHOR Kristina N. Björkenstam]], [[Robert Östling|AUTHOR Robert Östling]]
</p><p class="cpabstractcardaffiliationlist">Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2203–2207
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-verbal cues from speakers, such as eye gaze and hand positions, play an important role in word learning [1]. This is consistent with the notion that for meaning to be reconstructed, acoustic patterns need to be linked to time-synchronous patterns from at least one other modality [2]. In previous studies of a multimodally annotated corpus of parent-child interaction, we have shown that parents interacting with infants at the early word-learning stage (7–9 months) display a large amount of time-synchronous patterns, but that this behaviour tails off with increasing age of the children [3]. Furthermore, we have attempted to quantify the informativeness of the different non-verbal cues, that is, to what extent they actually help to discriminate between different possible referents, and how critical the timing of the cues is [4]. The purpose of this paper is to generalise our earlier model by quantifying informativeness resulting from non-verbal cues occurring both before and after their associated verbal references.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ellen Marklund|AUTHOR Ellen Marklund]], [[David Pagmar|AUTHOR David Pagmar]], [[Tove Gerholm|AUTHOR Tove Gerholm]], [[Lisa Gustavsson|AUTHOR Lisa Gustavsson]]
</p><p class="cpabstractcardaffiliationlist">Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2208–2212
</span></p></div>
<div class="cpabstractcardabstract"><p>The purpose of the present study was to introduce a computational simulation of timing in child-adult interaction. The simulation uses temporal information from real adult-child interactions as default temporal behavior of two simulated agents. Dependencies between the agents’ behavior are added, and how the simulated interactions compare to real interaction data as a result is investigated. In the present study, the real data consisted of transcriptions of a mother interacting with her 12-month-old child, and the data simulated was vocalizations. The first experiment shows that although the two agents generate vocalizations according to the temporal characteristics of the interlocutors in the real data, simulated interaction with no contingencies between the two agents’ behavior differs from real interaction data. In the second experiment, a contingency was introduced to the simulation: the likelihood that the adult agent initiated a vocalization if the child agent was already vocalizing. Overall, the simulated data is more similar to the real interaction data when the adult agent is less likely to start speaking while the child agent vocalizes. The results are in line with previous studies on turn-taking in parent-child interaction at comparable ages. This illustrates that computational simulations are useful tools when investigating parent-child interactions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sofia Strömbergsson|AUTHOR Sofia Strömbergsson]]^^1^^, [[Jens Edlund|AUTHOR Jens Edlund]]^^2^^, [[Jana Götze|AUTHOR Jana Götze]]^^1^^, [[Kristina Nilsson Björkenstam|AUTHOR Kristina Nilsson Björkenstam]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Karolinska Institute, Sweden; ^^2^^KTH, Sweden; ^^3^^Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2213–2217
</span></p></div>
<div class="cpabstractcardabstract"><p>Child-directed spoken data is the ideal source of support for claims about children’s linguistic environments. However, phonological transcriptions of child-directed speech are scarce, compared to sources like adult-directed speech or text data. Acquiring reliable descriptions of children’s phonological environments from more readily accessible sources would mean considerable savings of time and money. The first step towards this goal is to quantify the reliability of descriptions derived from such secondary sources.
We investigate how phonological distributions vary across different modalities (spoken vs. written), and across the age of the intended audience (children vs. adults). Using a previously unseen collection of Swedish adult- and child-directed spoken and written data, we combine lexicon look-up and grapheme-to-phoneme conversion to approximate phonological characteristics. The analysis shows distributional differences across datasets both for single phonemes and for longer phoneme sequences. Some of these are predictably attributed to lexical and contextual characteristics of text vs. speech.
The generated phonological transcriptions are remarkably reliable. The differences in phonological distributions between child-directed speech and secondary sources highlight a need for compensatory measures when relying on written data or on adult-directed spoken data, and/or for continued collection of actual child-directed speech in research on children’s language environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rahma Chaabouni|AUTHOR Rahma Chaabouni]]^^1^^, [[Ewan Dunbar|AUTHOR Ewan Dunbar]]^^1^^, [[Neil Zeghidour|AUTHOR Neil Zeghidour]]^^1^^, [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ENS, France; ^^2^^ENS, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2218–2222
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent works have explored deep architectures for learning multimodal speech representation (e.g. audio and images, articulation and audio) in a supervised way. Here we investigate the role of combining different speech modalities, i.e. audio and visual information representing the lips’ movements, in a weakly supervised way using Siamese networks and lexical same-different side information. In particular, we ask whether one modality can benefit from the other to provide a richer representation for phone recognition in a weakly supervised setting. We introduce mono-task and multi-task methods for merging speech and visual modalities for phone recognition. The mono-task learning consists in applying a Siamese network on the concatenation of the two modalities, while the multi-task learning receives several different combinations of modalities at train time. We show that multi-task learning enhances discriminability for visual and multimodal inputs while minimally impacting auditory inputs. Furthermore, we present a qualitative analysis of the obtained phone embeddings, and show that cross-modal visual input can improve the discriminability of phonological features which are visually discernable (rounding, open/close, labial place of articulation), resulting in representations that are closer to abstract linguistic features than those based on audio only.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yasunari Obuchi|AUTHOR Yasunari Obuchi]]
</p><p class="cpabstractcardaffiliationlist">Tokyo University of Technology, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2223–2227
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice attractiveness is an indicator which is somehow objective and somehow subjective. It would be helpful to assume that each voice has its own attractiveness. However, the paired comparison results of human listeners sometimes include inconsistency. In this paper, we propose a multidimensional mapping scheme of voice attractiveness, which explains the existence of objective merit values of voices and subjective preference of listeners. Paired comparison is modeled in a probabilistic framework, and the optimal mapping is obtained from the paired comparison results on the maximum likelihood criterion.
The merit values can be estimated from the acoustic feature using the machine learning framework. We show how the estimation process works using real database consisting of common Japanese greeting utterances. Experiments using 1- and 2- dimensional merit spaces confirm that the comparison result prediction from the acoustic feature becomes more accurate in the 2-dimensional case.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} </span></p></div>
<div class="cpabstractcardabstract"><p>(No abstract available at the time of publication)</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]]
</p><p class="cpabstractcardaffiliationlist">MPI for Psycholinguistics, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2228–2232
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech is an acoustic signal with inherent amplitude modulations in the 1–9 Hz range. Recent models of speech perception propose that this rhythmic nature of speech is central to speech recognition. Moreover, rhythmic amplitude modulations have been shown to have beneficial effects on language processing and the subjective impression listeners have of the speaker. This study investigated the role of amplitude modulations in the political arena by comparing the speech produced by Hillary Clinton and Donald Trump in the three presidential debates of 2016.
Inspection of the modulation spectra, revealing the spectral content of the two speakers’ amplitude envelopes after matching for overall intensity, showed considerably greater power in Clinton’s modulation spectra (compared to Trump’s) across the three debates, particularly in the 1–9 Hz range. The findings suggest that Clinton’s speech had a more pronounced temporal envelope with rhythmic amplitude modulations below 9 Hz, with a preference for modulations around 3 Hz. This may be taken as evidence for a more structured temporal organization of syllables in Clinton’s speech, potentially due to more frequent use of preplanned utterances. Outcomes are interpreted in light of the potential beneficial effects of a rhythmic temporal envelope on intelligibility and speaker perception.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]], [[Rafael Zequeira Jiménez|AUTHOR Rafael Zequeira Jiménez]], [[Sebastian Möller|AUTHOR Sebastian Möller]]
</p><p class="cpabstractcardaffiliationlist">T-Labs, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2233–2237
</span></p></div>
<div class="cpabstractcardabstract"><p>Human perceptions of speaker characteristics, needed to perform automatic predictions from speech features, have generally been collected by conducting demanding in-lab listening tests under controlled conditions. Concurrently, crowdsourcing has emerged as a valuable approach for running user studies through surveys or quantitative ratings. Micro-task crowdsourcing markets enable the completion of small tasks (commonly of minutes or seconds), rewarding users with micro-payments. This paradigm permits effortless collection of user input from a large and diverse pool of participants at low cost. This paper presents different auditory tests for collecting perceptual voice likability ratings employing a common set of 30 male and female voices. These tests are based on direct scaling and on paired-comparisons, and were conducted in the laboratory and via crowdsourcing using micro-tasks. Design considerations are proposed for adapting the laboratory listening tests to a mobile-based crowdsourcing platform to obtain trustworthy listeners’ answers. Our likability scores obtained by the different test approaches are highly correlated. This outcome motivates the use of crowdsourcing for future listening tests investigating e.g. speaker characterization, reducing the efforts involved in engaging participants and administering the tests on-site.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jürgen Trouvain|AUTHOR Jürgen Trouvain]], [[Frank Zimmerer|AUTHOR Frank Zimmerer]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2238–2242
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated how the perceived attractiveness of voices was influenced by a foreign language, a foreign accent, and the level of fluency in the foreign language. Stimuli were taken from a French-German corpus of read speech with German native speakers as raters. Additional factors were stimulus length (syllable or entire sentence) and sex (of the raters and speakers). Results with German native raters reveal that stimuli spanning just a syllable were judged significantly less attractive than those containing a sentence, and that stimuli from French speakers were assessed as more attractive than those of German speakers. This backs the cliché that French has an attractive image for German listeners. An analysis of the best vs. the worst rated sentences suggest that an individual mix of voice quality, disfluency management, prosodic behaviour and pronunciation precision is responsible for the results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Antje Schweitzer|AUTHOR Antje Schweitzer]], [[Natalie Lewandowski|AUTHOR Natalie Lewandowski]], [[Daniel Duran|AUTHOR Daniel Duran]]
</p><p class="cpabstractcardaffiliationlist">Universität Stuttgart, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2243–2247
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates how acoustic and lexical properties of spontaneous speech in dialogs affect perceived social attractiveness in terms of speaker likeability, friendliness, competence, and self-confidence. We analyze a database of longer spontaneous dialogs between German female speakers and the mutual ratings that dialog partners assigned to one another after every conversation. Thus the ratings reflect long-term impressions based on dialog behavior. Using linear mixed models, we investigate both classical acoustic-prosodic and lexical parameters as well as parameters that capture the degree of speakers’ adaptation, or “convergence”, of these parameters to each other. Specifically we find that likeability is correlated with the speaker’s lexical convergence as well as with her convergence in f,,0,, peak height. Friendliness is significantly related to variation in intensity. For competence, the proportion of positive words in the dialog, variation in shimmer, and overall phonetic convergence are significant correlates. Self-confidence finally is related to several prosodic, phonetic, and lexical adaptation parameters. In some cases, the effect depends on whether interlocutors also had eye contact during their conversation. Taken together, these findings provide evidence that in addition to classical parameters, convergence parameters play an important role in the mutual perception of social attractiveness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eszter Novák-Tót|AUTHOR Eszter Novák-Tót]]^^1^^, [[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]^^1^^, [[Aoju Chen|AUTHOR Aoju Chen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern Denmark, Denmark; ^^2^^Universiteit Utrecht, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2248–2252
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous studies proved the immense importance of nonverbal skills when it comes to being persuasive and coming across as charismatic. It was also found that men sound more convincing and persuasive (i.e. altogether more charismatic) than women under otherwise comparable conditions. This gender bias is investigated in the present study by analyzing and comparing acoustic-melodic charisma features of male and female business executives. In line with the gender bias in perception, our results show that female CEOs who are judged to be similarly charismatic as their male counterpart(s) produce more and stronger acoustic charisma cues. This suggests that there is a gender bias which is compensated for by making a greater effort on the part of the female speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Michalsky|AUTHOR Jan Michalsky]], [[Heike Schoormann|AUTHOR Heike Schoormann]]
</p><p class="cpabstractcardaffiliationlist">Carl von Ossietzky Universität Oldenburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2253–2256
</span></p></div>
<div class="cpabstractcardabstract"><p>While there is a growing body of research on which and how pitch features are perceived as attractive or likable, there are few studies investigating how the impression of a speaker as attractive or likable affects the speech behavior of his/her interlocutor. Recent studies have shown that perceived attractiveness and likability may not only have an effect on a speaker’s pitch features in isolation but also on the prosodic entrainment. It has been shown that how speakers synchronize their pitch features relatively to their interlocutor is affected by such impressions. This study investigates pitch convergence, examining whether speakers become more similar over the course of a conversation depending on perceived attractiveness and/or likability. The expected pitch convergence is thereby investigated on two levels, over the entire conversation (globally) as well as turn-wise (locally). The results from a speed dating experiment with 98 mixed-sex dialogues of heterosexual singles show that speakers become more similar globally and locally over time both in register and range. Furthermore, the degree of pitch convergence is greatly affected by perceived attractiveness and likability with effects differing between attractiveness and likability as well as between the global and the local level.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Li Jiao|AUTHOR Li Jiao]]^^1^^, [[Chengxia Wang|AUTHOR Chengxia Wang]]^^2^^, [[Cristiane Hsu|AUTHOR Cristiane Hsu]]^^2^^, [[Peter Birkholz|AUTHOR Peter Birkholz]]^^3^^, [[Yi Xu|AUTHOR Yi Xu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tongji University, China; ^^2^^University College London, UK; ^^3^^Technische Universität Dresden, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2257–2261
</span></p></div>
<div class="cpabstractcardabstract"><p>Poshness refers to how much a British English speaker sounds upper class when they talk. Popular descriptions of posh English mostly focus on vocabulary, accent and phonology. This study tests the hypothesis that, as a social index, poshness is also manifested via phonetic properties known to encode vocal attractiveness. Specifically, posh English, because of its impression of being detached, authoritative and condescending, would more closely resemble an attractive male voice than an attractive female voice. In four experiments, we tested this hypothesis by acoustically manipulating Cambridge-accented English utterances by a male and a female speaker through PSOLA resynthesis, and having native speakers of British English judge how posh or attractive each utterance sounds. The manipulated acoustic dimensions are formant dispersion, pitch shift and speech rate. Initial results from the first two experiments showed a trend in the hypothesized direction for the male speakers’ utterances. But for the female utterances there was a ceiling effect due to the frequent alternation of speaker gender within the same test session. When the two speakers’ utterances were separated by blocks in the third and fourth experiments, a clearer support for the main hypothesis was found.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Timo Baumann|AUTHOR Timo Baumann]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2262–2266
<a href="./IS2017/MEDIA/1697" class="externallinkbutton" target="_blank">{{$:/causal/Multimedia Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech quality and likability is a multi-faceted phenomenon consisting of a combination of perceptory features that cannot easily be computed nor weighed automatically. Yet, it is often easy to decide which of two voices one likes better, even though it would be hard to describe why, or to name the underlying basic perceptory features. Although likability is inherently subjective and individual preferences differ frequently, generalizations are useful and there is often a broad intersubjective consensus about whether one speaker is more likable than another. However, breaking down likability rankings into pairwise comparisons leads to a quadratic explosion of rating pairs. We present a methodology and software to efficiently create a likability ranking for many speakers from crowdsourced pairwise likability ratings. We collected pairwise likability ratings for many (>220) speakers from many raters (>160) and turn these ratings into one likability ranking. We investigate the resulting speaker ranking stability under different conditions: limiting the number of ratings and the dependence on rater and speaker characteristics. We also analyze the ranking wrt. acoustic correlates to find out what factors influence likability. We publish our ranking and the underlying ratings in order to facilitate further research.</p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cptablecelltopbottomspace2|k
|cpsessionlisttable|k
|^<div class="cpsessionlistsessioncode">[[Mon-K1-1|SESSION Mon-K1-1 — ISCA Medal 2017 Ceremony]]</div> |^<div class="cpsessionlistsessionname">ISCA Medal 2017 Ceremony</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-SS-1-8|SESSION Mon-SS-1-8 — Special Session: Interspeech 2017 Automatic Speaker Verification Spoofing and Countermeasures Challenge 1]]</div> |^<div class="cpsessionlistsessionname">Special Session: Interspeech 2017 Automatic Speaker Verification Spoofing and Countermeasures Challenge 1</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-SS-1-11|SESSION Mon-SS-1-11 — Special Session: Speech Technology for Code-Switching in Multilingual Communities]]</div> |^<div class="cpsessionlistsessionname">Special Session: Speech Technology for Code-Switching in Multilingual Communities</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-SS-2-8|SESSION Mon-SS-2-8 — Special Session: Interspeech 2017 Automatic Speaker Verification Spoofing and Countermeasures Challenge 2]]</div> |^<div class="cpsessionlistsessionname">Special Session: Interspeech 2017 Automatic Speaker Verification Spoofing and Countermeasures Challenge 2</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-1-1|SESSION Mon-O-1-1 — Conversational Telephone Speech Recognition]]</div> |^<div class="cpsessionlistsessionname">Conversational Telephone Speech Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-1-2|SESSION Mon-O-1-2 — Multimodal Paralinguistics]]</div> |^<div class="cpsessionlistsessionname">Multimodal Paralinguistics</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-1-4|SESSION Mon-O-1-4 — Dereverberation, Echo Cancellation and Speech]]</div> |^<div class="cpsessionlistsessionname">Dereverberation, Echo Cancellation and Speech </div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-1-6|SESSION Mon-O-1-6 — Acoustic and Articulatory Phonetics]]</div> |^<div class="cpsessionlistsessionname">Acoustic and Articulatory Phonetics</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-1-10|SESSION Mon-O-1-10 — Multimodal and Articulatory Synthesis]]</div> |^<div class="cpsessionlistsessionname">Multimodal and Articulatory Synthesis</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-2-1|SESSION Mon-O-2-1 — Neural Networks for Language Modeling]]</div> |^<div class="cpsessionlistsessionname">Neural Networks for Language Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-2-2|SESSION Mon-O-2-2 — Pathological Speech and Language]]</div> |^<div class="cpsessionlistsessionname">Pathological Speech and Language</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-2-4|SESSION Mon-O-2-4 — Speech Analysis and Representation 1]]</div> |^<div class="cpsessionlistsessionname">Speech Analysis and Representation 1</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-2-6|SESSION Mon-O-2-6 — Perception of Dialects and L2]]</div> |^<div class="cpsessionlistsessionname">Perception of Dialects and L2</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-O-2-10|SESSION Mon-O-2-10 — Far-field Speech Recognition]]</div> |^<div class="cpsessionlistsessionname">Far-field Speech Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-1-1|SESSION Mon-P-1-1 — Speech Analysis and Representation 2]]</div> |^<div class="cpsessionlistsessionname">Speech Analysis and Representation 2</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-1-2|SESSION Mon-P-1-2 — Speech and Audio Segmentation and Classification 2]]</div> |^<div class="cpsessionlistsessionname">Speech and Audio Segmentation and Classification 2</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-1-4|SESSION Mon-P-1-4 — Search, Computational Strategies and Language Modeling]]</div> |^<div class="cpsessionlistsessionname">Search, Computational Strategies and Language Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-2-1|SESSION Mon-P-2-1 — Speech Perception]]</div> |^<div class="cpsessionlistsessionname">Speech Perception</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-2-2|SESSION Mon-P-2-2 — Speech Production and Perception]]</div> |^<div class="cpsessionlistsessionname">Speech Production and Perception</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-2-3|SESSION Mon-P-2-3 — Multi-lingual Models and Adaptation for ASR]]</div> |^<div class="cpsessionlistsessionname">Multi-lingual Models and Adaptation for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-P-2-4|SESSION Mon-P-2-4 — Prosody and Text Processing]]</div> |^<div class="cpsessionlistsessionname">Prosody and Text Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-S&T-1/2-A|SESSION Mon-S&T-1/2-A — Show & Tell 1]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 1</div> |
|^<div class="cpsessionlistsessioncode">[[Mon-S&T-1/2-B|SESSION Mon-S&T-1/2-B — Show & Tell 2]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 2</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-K2-1|SESSION Tue-K2-1 — Keynote 1: James Allen]]</div> |^<div class="cpsessionlistsessionname">Keynote 1: James Allen</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-SS-3-11|SESSION Tue-SS-3-11 — Special Session: Speech and Human-Robot Interaction]]</div> |^<div class="cpsessionlistsessionname">Special Session: Speech and Human-Robot Interaction</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-SS-4-11|SESSION Tue-SS-4-11 — Special Session: Incremental Processing and Responsive Behaviour]]</div> |^<div class="cpsessionlistsessionname">Special Session: Incremental Processing and Responsive Behaviour</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-SS-5-11|SESSION Tue-SS-5-11 — Special Session: Acoustic Manifestations of Social Characteristics]]</div> |^<div class="cpsessionlistsessionname">Special Session: Acoustic Manifestations of Social Characteristics</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-1|SESSION Tue-O-3-1 — Neural Network Acoustic Models for ASR 1]]</div> |^<div class="cpsessionlistsessionname">Neural Network Acoustic Models for ASR 1</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-2|SESSION Tue-O-3-2 — Models of Speech Production]]</div> |^<div class="cpsessionlistsessionname">Models of Speech Production</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-4|SESSION Tue-O-3-4 — Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-6|SESSION Tue-O-3-6 — Phonation and Voice Quality]]</div> |^<div class="cpsessionlistsessionname">Phonation and Voice Quality</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-8|SESSION Tue-O-3-8 — Speech Synthesis Prosody]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis Prosody</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-3-10|SESSION Tue-O-3-10 — Emotion Recognition]]</div> |^<div class="cpsessionlistsessionname">Emotion Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-1|SESSION Tue-O-4-1 — WaveNet and Novel Paradigms]]</div> |^<div class="cpsessionlistsessionname">WaveNet and Novel Paradigms</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-2|SESSION Tue-O-4-2 — Models of Speech Perception]]</div> |^<div class="cpsessionlistsessionname">Models of Speech Perception</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-4|SESSION Tue-O-4-4 — Source Separation and Auditory Scene Analysis]]</div> |^<div class="cpsessionlistsessionname">Source Separation and Auditory Scene Analysis</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-6|SESSION Tue-O-4-6 — Prosody: Tone and Intonation]]</div> |^<div class="cpsessionlistsessionname">Prosody: Tone and Intonation</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-8|SESSION Tue-O-4-8 — Emotion Modeling]]</div> |^<div class="cpsessionlistsessionname">Emotion Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-4-10|SESSION Tue-O-4-10 — Voice Conversion 1]]</div> |^<div class="cpsessionlistsessionname">Voice Conversion 1</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-1|SESSION Tue-O-5-1 — Neural Network Acoustic Models for ASR 2]]</div> |^<div class="cpsessionlistsessionname">Neural Network Acoustic Models for ASR 2</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-2|SESSION Tue-O-5-2 — Speaker Recognition Evaluation]]</div> |^<div class="cpsessionlistsessionname">Speaker Recognition Evaluation</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-4|SESSION Tue-O-5-4 — Glottal Source Modeling]]</div> |^<div class="cpsessionlistsessionname">Glottal Source Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-6|SESSION Tue-O-5-6 — Prosody: Rhythm, Stress, Quantity and Phrasing]]</div> |^<div class="cpsessionlistsessionname">Prosody: Rhythm, Stress, Quantity and Phrasing</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-8|SESSION Tue-O-5-8 — Speech Recognition for Langauge Learning]]</div> |^<div class="cpsessionlistsessionname">Speech Recognition for Langauge Learning</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-O-5-10|SESSION Tue-O-5-10 — Stance, Credibility, and Deception]]</div> |^<div class="cpsessionlistsessionname">Stance, Credibility, and Deception</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-3-1|SESSION Tue-P-3-1 — Short Utterances Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Short Utterances Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-3-2|SESSION Tue-P-3-2 — Speaker Characterization and Recognition]]</div> |^<div class="cpsessionlistsessionname">Speaker Characterization and Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-4-1|SESSION Tue-P-4-1 — Acoustic Models for ASR 1]]</div> |^<div class="cpsessionlistsessionname">Acoustic Models for ASR 1</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-4-2|SESSION Tue-P-4-2 — Acoustic Models for ASR 2]]</div> |^<div class="cpsessionlistsessionname">Acoustic Models for ASR 2</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-4-3|SESSION Tue-P-4-3 — Dialog Modeling]]</div> |^<div class="cpsessionlistsessionname">Dialog Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-5-1|SESSION Tue-P-5-1 — L1 and L2 Acquisition]]</div> |^<div class="cpsessionlistsessionname">L1 and L2 Acquisition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-5-2|SESSION Tue-P-5-2 — Voice, Speech and Hearing Disorders]]</div> |^<div class="cpsessionlistsessionname">Voice, Speech and Hearing Disorders</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-5-3|SESSION Tue-P-5-3 — Source Separation and Voice Activity Detection]]</div> |^<div class="cpsessionlistsessionname">Source Separation and Voice Activity Detection</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-P-5-4|SESSION Tue-P-5-4 — Speech-enhancement]]</div> |^<div class="cpsessionlistsessionname">Speech-enhancement</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-S&T-3/4-A|SESSION Tue-S&T-3/4-A — Show & Tell 3]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 3</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-S&T-3/4-B|SESSION Tue-S&T-3/4-B — Show & Tell 4]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 4</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-K3-1|SESSION Wed-K3-1 — Keynote 2: Catherine Pelachaud]]</div> |^<div class="cpsessionlistsessionname">Keynote 2: Catherine Pelachaud</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-SS-6-2|SESSION Wed-SS-6-2 — Special Session: Digital Revolution for Under-resourced Languages 1]]</div> |^<div class="cpsessionlistsessionname">Special Session: Digital Revolution for Under-resourced Languages 1</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-SS-6-11|SESSION Wed-SS-6-11 — Special Session: Data Collection, Transcription and Annotation Issues in Child Language Acquisition]]</div> |^<div class="cpsessionlistsessionname">Special Session: Data Collection, Transcription and Annotation Issues in Child Language Acquisition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-SS-7-1|SESSION Wed-SS-7-1 — Special Session: Digital Revolution for Under-resourced Languages 2]]</div> |^<div class="cpsessionlistsessionname">Special Session: Digital Revolution for Under-resourced Languages 2</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-SS-7-11|SESSION Wed-SS-7-11 — Special Session: Computational Models in Child Language Acquisition]]</div> |^<div class="cpsessionlistsessionname">Special Session: Computational Models in Child Language Acquisition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-SS-8-11|SESSION Wed-SS-8-11 — Special Session: Voice Attractiveness]]</div> |^<div class="cpsessionlistsessionname">Special Session: Voice Attractiveness</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-6-1|SESSION Wed-O-6-1 — Speech Production and Physiology]]</div> |^<div class="cpsessionlistsessionname">Speech Production and Physiology</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-6-4|SESSION Wed-O-6-4 — Speech and Harmonic Analysis]]</div> |^<div class="cpsessionlistsessionname">Speech and Harmonic Analysis</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-6-6|SESSION Wed-O-6-6 — Dialog and Prosody]]</div> |^<div class="cpsessionlistsessionname">Dialog and Prosody</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-6-8|SESSION Wed-O-6-8 — Social Signals, Styles, and Interaction]]</div> |^<div class="cpsessionlistsessionname">Social Signals, Styles, and Interaction</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-6-10|SESSION Wed-O-6-10 — Acoustic Model Adaptation]]</div> |^<div class="cpsessionlistsessionname">Acoustic Model Adaptation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-1|SESSION Wed-O-7-1 — Cognition and Brain Studies]]</div> |^<div class="cpsessionlistsessionname">Cognition and Brain Studies</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-2|SESSION Wed-O-7-2 — Noise Robust Speech Recognition]]</div> |^<div class="cpsessionlistsessionname">Noise Robust Speech Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-4|SESSION Wed-O-7-4 — Topic Spotting, Entity Extraction and Semantic Analysis]]</div> |^<div class="cpsessionlistsessionname">Topic Spotting, Entity Extraction and Semantic Analysis</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-6|SESSION Wed-O-7-6 — Dialog Systems]]</div> |^<div class="cpsessionlistsessionname">Dialog Systems</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-8|SESSION Wed-O-7-8 — Lexical and Pronunciation Modeling]]</div> |^<div class="cpsessionlistsessionname">Lexical and Pronunciation Modeling</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-7-10|SESSION Wed-O-7-10 — Language Recognition]]</div> |^<div class="cpsessionlistsessionname">Language Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-8-1|SESSION Wed-O-8-1 — Speaker Database and Anti-spoofing]]</div> |^<div class="cpsessionlistsessionname">Speaker Database and Anti-spoofing</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-8-4|SESSION Wed-O-8-4 — Speech Translation]]</div> |^<div class="cpsessionlistsessionname">Speech Translation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-8-6|SESSION Wed-O-8-6 — Multi-channel Speech Enhancement]]</div> |^<div class="cpsessionlistsessionname">Multi-channel Speech Enhancement</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-8-8|SESSION Wed-O-8-8 — Speech Recognition: Applications in Medical Practice]]</div> |^<div class="cpsessionlistsessionname">Speech Recognition: Applications in Medical Practice</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-O-8-10|SESSION Wed-O-8-10 — Language models for ASR]]</div> |^<div class="cpsessionlistsessionname">Language models for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-6-1|SESSION Wed-P-6-1 — Speech Recognition: Technologies for New Applicaitions and Paradigms]]</div> |^<div class="cpsessionlistsessionname">Speech Recognition: Technologies for New Applicaitions and Paradigms</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-6-2|SESSION Wed-P-6-2 — Speaker and Language Recognition Applications]]</div> |^<div class="cpsessionlistsessionname">Speaker and Language Recognition Applications</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-6-3|SESSION Wed-P-6-3 — Spoken Document Processing]]</div> |^<div class="cpsessionlistsessionname">Spoken Document Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-6-4|SESSION Wed-P-6-4 — Speech Intelligibility]]</div> |^<div class="cpsessionlistsessionname">Speech Intelligibility</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-7-2|SESSION Wed-P-7-2 — Articulatory and Acoustic Phonetics]]</div> |^<div class="cpsessionlistsessionname">Articulatory and Acoustic Phonetics</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-7-3|SESSION Wed-P-7-3 — Music and Audio Processing]]</div> |^<div class="cpsessionlistsessionname">Music and Audio Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-7-4|SESSION Wed-P-7-4 — Disorders Related to Speech and Language]]</div> |^<div class="cpsessionlistsessionname">Disorders Related to Speech and Language</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-8-1|SESSION Wed-P-8-1 — Prosody]]</div> |^<div class="cpsessionlistsessionname">Prosody</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-8-2|SESSION Wed-P-8-2 — Speaker States and Traits]]</div> |^<div class="cpsessionlistsessionname">Speaker States and Traits</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-8-3|SESSION Wed-P-8-3 — Language Understanding and Generation]]</div> |^<div class="cpsessionlistsessionname">Language Understanding and Generation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-P-8-4|SESSION Wed-P-8-4 — Voice Conversion 2]]</div> |^<div class="cpsessionlistsessionname">Voice Conversion 2</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-S&T-6/7-A|SESSION Wed-S&T-6/7-A — Show & Tell 5]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 5</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-S&T-6/7-B|SESSION Wed-S&T-6/7-B — Show & Tell 6]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 6</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-K4-1|SESSION Thu-K4-1 — Keynote 3: Björn Lindblom]]</div> |^<div class="cpsessionlistsessionname">Keynote 3: Björn Lindblom</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-SS-9-10|SESSION Thu-SS-9-10 — Special Session: Interspeech 2017 Computational Paralinguistics ChallengE (ComParE) 1]]</div> |^<div class="cpsessionlistsessionname">Special Session: Interspeech 2017 Computational Paralinguistics ChallengE (ComParE) 1</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-SS-9-11|SESSION Thu-SS-9-11 — Special Session: State of the Art in Physics-based Voice Simulation]]</div> |^<div class="cpsessionlistsessionname">Special Session: State of the Art in Physics-based Voice Simulation</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-SS-10-10|SESSION Thu-SS-10-10 — Special Session: Interspeech 2017 Computational Paralinguistics ChallengE (ComParE) 2]]</div> |^<div class="cpsessionlistsessionname">Special Session: Interspeech 2017 Computational Paralinguistics ChallengE (ComParE) 2</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-9-1|SESSION Thu-O-9-1 — Discriminative Training for ASR]]</div> |^<div class="cpsessionlistsessionname">Discriminative Training for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-9-2|SESSION Thu-O-9-2 — Speaker Diarization]]</div> |^<div class="cpsessionlistsessionname">Speaker Diarization</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-9-4|SESSION Thu-O-9-4 — Spoken Term Detection]]</div> |^<div class="cpsessionlistsessionname">Spoken Term Detection</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-9-6|SESSION Thu-O-9-6 — Noise Reduction]]</div> |^<div class="cpsessionlistsessionname">Noise Reduction</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-9-8|SESSION Thu-O-9-8 — Speech Recognition: Multimodal Systems]]</div> |^<div class="cpsessionlistsessionname">Speech Recognition: Multimodal Systems</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-10-1|SESSION Thu-O-10-1 — Neural Network Acoustic Models for ASR 3]]</div> |^<div class="cpsessionlistsessionname">Neural Network Acoustic Models for ASR 3</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-10-2|SESSION Thu-O-10-2 — Robust Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Robust Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-10-4|SESSION Thu-O-10-4 — Multimodal Resources and Annotation]]</div> |^<div class="cpsessionlistsessionname">Multimodal Resources and Annotation</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-10-8|SESSION Thu-O-10-8 — Forensic Phonetics and Sociophonetic Varieties]]</div> |^<div class="cpsessionlistsessionname">Forensic Phonetics and Sociophonetic Varieties</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-O-10-11|SESSION Thu-O-10-11 — Speech and Audio Segmentation and Classification 1]]</div> |^<div class="cpsessionlistsessionname">Speech and Audio Segmentation and Classification 1</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-P-9-1|SESSION Thu-P-9-1 — Noise Robust and Far-field ASR]]</div> |^<div class="cpsessionlistsessionname">Noise Robust and Far-field ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-P-9-3|SESSION Thu-P-9-3 — Styles, Varieties, Forensics and Tools]]</div> |^<div class="cpsessionlistsessionname">Styles, Varieties, Forensics and Tools</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-P-9-4|SESSION Thu-P-9-4 — Speech Synthesis: Data, Evaluation, and Novel Paradigms]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Data, Evaluation, and Novel Paradigms</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-S&T-9/10-A|SESSION Thu-S&T-9/10-A — Show & Tell 7]]</div> |^<div class="cpsessionlistsessionname">Show & Tell 7</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:45–10:15, Monday, 21 Aug. 2017, Aula Magna|<|
|Chair: |Haizhou Li|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-K1-1|PAPER Mon-K1-1 — ISCA Medal for Scientific Achievement]]</div>|<div class="cpsessionviewpapertitle">ISCA Medal for Scientific Achievement</div><div class="cpsessionviewpaperauthor">[[Haizhou Li|AUTHOR Haizhou Li]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, Aula Magna|<|
|Chair: |Penny Karanasou, Ralf Schlüter|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-1|PAPER Mon-O-1-1-1 — Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features]]</div>|<div class="cpsessionviewpapertitle">Improved Single System Conversational Telephone Speech Recognition with VGG Bottleneck Features</div><div class="cpsessionviewpaperauthor">[[William Hartmann|AUTHOR William Hartmann]], [[Roger Hsiao|AUTHOR Roger Hsiao]], [[Tim Ng|AUTHOR Tim Ng]], [[Jeff Ma|AUTHOR Jeff Ma]], [[Francis Keith|AUTHOR Francis Keith]], [[Man-Hung Siu|AUTHOR Man-Hung Siu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170145.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-2|PAPER Mon-O-1-1-2 — Student-Teacher Training with Diverse Decision Tree Ensembles]]</div>|<div class="cpsessionviewpapertitle">Student-Teacher Training with Diverse Decision Tree Ensembles</div><div class="cpsessionviewpaperauthor">[[Jeremy H.M. Wong|AUTHOR Jeremy H.M. Wong]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-3|PAPER Mon-O-1-1-3 — Embedding-Based Speaker Adaptive Training of Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Embedding-Based Speaker Adaptive Training of Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Vaibhava Goel|AUTHOR Vaibhava Goel]], [[George Saon|AUTHOR George Saon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-4|PAPER Mon-O-1-1-4 — Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer]]</div>|<div class="cpsessionviewpapertitle">Improving Deliverable Speech-to-Text Systems with Multilingual Knowledge Transfer</div><div class="cpsessionviewpaperauthor">[[Jeff Ma|AUTHOR Jeff Ma]], [[Francis Keith|AUTHOR Francis Keith]], [[Tim Ng|AUTHOR Tim Ng]], [[Man-Hung Siu|AUTHOR Man-Hung Siu]], [[Owen Kimball|AUTHOR Owen Kimball]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-5|PAPER Mon-O-1-1-5 — English Conversational Telephone Speech Recognition by Humans and Machines]]</div>|<div class="cpsessionviewpapertitle">English Conversational Telephone Speech Recognition by Humans and Machines</div><div class="cpsessionviewpaperauthor">[[George Saon|AUTHOR George Saon]], [[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Tom Sercu|AUTHOR Tom Sercu]], [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Michael Picheny|AUTHOR Michael Picheny]], [[Lynn-Li Lim|AUTHOR Lynn-Li Lim]], [[Bergul Roomi|AUTHOR Bergul Roomi]], [[Phil Hall|AUTHOR Phil Hall]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-1-6|PAPER Mon-O-1-1-6 — Comparing Human and Machine Errors in Conversational Speech Transcription]]</div>|<div class="cpsessionviewpapertitle">Comparing Human and Machine Errors in Conversational Speech Transcription</div><div class="cpsessionviewpaperauthor">[[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, E10|<|
|Chair: |Ingmar Steiner, Korin Richmond|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170325.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-1|PAPER Mon-O-1-10-1 — The Influence of Synthetic Voice on the Evaluation of a Virtual Character]]</div>|<div class="cpsessionviewpapertitle">The Influence of Synthetic Voice on the Evaluation of a Virtual Character</div><div class="cpsessionviewpaperauthor">[[João Paulo Cabral|AUTHOR João Paulo Cabral]], [[Benjamin R. Cowan|AUTHOR Benjamin R. Cowan]], [[Katja Zibrek|AUTHOR Katja Zibrek]], [[Rachel McDonnell|AUTHOR Rachel McDonnell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170900.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-2|PAPER Mon-O-1-10-2 — Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network]]</div>|<div class="cpsessionviewpapertitle">Articulatory Text-to-Speech Synthesis Using the Digital Waveguide Mesh Driven by a Deep Neural Network</div><div class="cpsessionviewpaperauthor">[[Amelia J. Gully|AUTHOR Amelia J. Gully]], [[Takenori Yoshimura|AUTHOR Takenori Yoshimura]], [[Damian T. Murphy|AUTHOR Damian T. Murphy]], [[Kei Hashimoto|AUTHOR Kei Hashimoto]], [[Yoshihiko Nankaku|AUTHOR Yoshihiko Nankaku]], [[Keiichi Tokuda|AUTHOR Keiichi Tokuda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-3|PAPER Mon-O-1-10-3 — An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis]]</div>|<div class="cpsessionviewpapertitle">An HMM/DNN Comparison for Synchronized Text-to-Speech and Tongue Motion Synthesis</div><div class="cpsessionviewpaperauthor">[[Sébastien Le Maguer|AUTHOR Sébastien Le Maguer]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]], [[Alexander Hewer|AUTHOR Alexander Hewer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-4|PAPER Mon-O-1-10-4 — VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model]]</div>|<div class="cpsessionviewpapertitle">VCV Synthesis Using Task Dynamics to Animate a Factor-Based Articulatory Model</div><div class="cpsessionviewpaperauthor">[[Rachel Alexander|AUTHOR Rachel Alexander]], [[Tanner Sorensen|AUTHOR Tanner Sorensen]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-5|PAPER Mon-O-1-10-5 — Beyond the Listening Test: An Interactive Approach to TTS Evaluation]]</div>|<div class="cpsessionviewpapertitle">Beyond the Listening Test: An Interactive Approach to TTS Evaluation</div><div class="cpsessionviewpaperauthor">[[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Matthew P. Aylett|AUTHOR Matthew P. Aylett]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-10-6|PAPER Mon-O-1-10-6 — Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Integrating Articulatory Information in Deep Learning-Based Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Beiming Cao|AUTHOR Beiming Cao]], [[Myungjong Kim|AUTHOR Myungjong Kim]], [[Jan van Santen|AUTHOR Jan van Santen]], [[Ted Mau|AUTHOR Ted Mau]], [[Jun Wang|AUTHOR Jun Wang]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, A2|<|
|Chair: |Paula Lopez-Otero, Elizabeth Shriberg|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-1|PAPER Mon-O-1-2-1 — Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach]]</div>|<div class="cpsessionviewpapertitle">Multimodal Markers of Persuasive Speech: Designing a Virtual Debate Coach</div><div class="cpsessionviewpaperauthor">[[Volha Petukhova|AUTHOR Volha Petukhova]], [[Manoj Raju|AUTHOR Manoj Raju]], [[Harry Bunt|AUTHOR Harry Bunt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-2|PAPER Mon-O-1-2-2 — Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder]]</div>|<div class="cpsessionviewpapertitle">Acoustic-Prosodic and Physiological Response to Stressful Interactions in Children with Autism Spectrum Disorder</div><div class="cpsessionviewpaperauthor">[[Daniel Bone|AUTHOR Daniel Bone]], [[Julia Mertens|AUTHOR Julia Mertens]], [[Emily Zane|AUTHOR Emily Zane]], [[Sungbok Lee|AUTHOR Sungbok Lee]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]], [[Ruth Grossman|AUTHOR Ruth Grossman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171278.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-3|PAPER Mon-O-1-2-3 — A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors]]</div>|<div class="cpsessionviewpapertitle">A Stepwise Analysis of Aggregated Crowdsourced Labels Describing Multimodal Emotional Behaviors</div><div class="cpsessionviewpaperauthor">[[Alec Burmania|AUTHOR Alec Burmania]], [[Carlos Busso|AUTHOR Carlos Busso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170999.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-4|PAPER Mon-O-1-2-4 — An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech]]</div>|<div class="cpsessionviewpapertitle">An Information Theoretic Analysis of the Temporal Synchrony Between Head Gestures and Prosodic Patterns in Spontaneous Speech</div><div class="cpsessionviewpaperauthor">[[Gaurav Fotedar|AUTHOR Gaurav Fotedar]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171088.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-5|PAPER Mon-O-1-2-5 — Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques]]</div>|<div class="cpsessionviewpapertitle">Multimodal Prediction of Affective Dimensions via Fusing Multiple Regression Techniques</div><div class="cpsessionviewpaperauthor">[[D.-Y. Huang|AUTHOR D.-Y. Huang]], [[Wan Ding|AUTHOR Wan Ding]], [[Mingyu Xu|AUTHOR Mingyu Xu]], [[Huaiping Ming|AUTHOR Huaiping Ming]], [[Minghui Dong|AUTHOR Minghui Dong]], [[Xinguo Yu|AUTHOR Xinguo Yu]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-2-6|PAPER Mon-O-1-2-6 — Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies]]</div>|<div class="cpsessionviewpapertitle">Co-Production of Speech and Pointing Gestures in Clear and Perturbed Interactive Tasks: Multimodal Designation Strategies</div><div class="cpsessionviewpaperauthor">[[Marion Dohen|AUTHOR Marion Dohen]], [[Benjamin Roustan|AUTHOR Benjamin Roustan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, B4|<|
|Chair: |Stephen Zahorian , Bernd T. Meyer|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-1|PAPER Mon-O-1-4-1 — Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing]]</div>|<div class="cpsessionviewpapertitle">Improving Speaker Verification for Reverberant Conditions with Deep Neural Network Dereverberation Processing</div><div class="cpsessionviewpaperauthor">[[Peter Guzewich|AUTHOR Peter Guzewich]], [[Stephen A. Zahorian|AUTHOR Stephen A. Zahorian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-2|PAPER Mon-O-1-4-2 — Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance]]</div>|<div class="cpsessionviewpapertitle">Stepsize Control for Acoustic Feedback Cancellation Based on the Detection of Reverberant Signal Periods and the Estimated System Distance</div><div class="cpsessionviewpaperauthor">[[Philipp Bulling|AUTHOR Philipp Bulling]], [[Klaus Linhard|AUTHOR Klaus Linhard]], [[Arthur Wolf|AUTHOR Arthur Wolf]], [[Gerhard Schmidt|AUTHOR Gerhard Schmidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-3|PAPER Mon-O-1-4-3 — A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems]]</div>|<div class="cpsessionviewpapertitle">A Delay-Flexible Stereo Acoustic Echo Cancellation for DFT-Based In-Car Communication (ICC) Systems</div><div class="cpsessionviewpaperauthor">[[Jan Franzen|AUTHOR Jan Franzen]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-4|PAPER Mon-O-1-4-4 — Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients]]</div>|<div class="cpsessionviewpapertitle">Speech Enhancement Based on Harmonic Estimation Combined with MMSE to Improve Speech Intelligibility for Cochlear Implant Recipients</div><div class="cpsessionviewpaperauthor">[[Dongmei Wang|AUTHOR Dongmei Wang]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-5|PAPER Mon-O-1-4-5 — Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier]]</div>|<div class="cpsessionviewpapertitle">Improving Speech Intelligibility in Binaural Hearing Aids by Estimating a Time-Frequency Mask with a Weighted Least Squares Classifier</div><div class="cpsessionviewpaperauthor">[[David Ayllón|AUTHOR David Ayllón]], [[Roberto Gil-Pita|AUTHOR Roberto Gil-Pita]], [[Manuel Rosa-Zurera|AUTHOR Manuel Rosa-Zurera]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-4-6|PAPER Mon-O-1-4-6 — Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant]]</div>|<div class="cpsessionviewpapertitle">Simulations of High-Frequency Vocoder on Mandarin Speech Recognition for Acoustic Hearing Preserved Cochlear Implant</div><div class="cpsessionviewpaperauthor">[[Tsung-Chen Wu|AUTHOR Tsung-Chen Wu]], [[Tai-Shih Chi|AUTHOR Tai-Shih Chi]], [[Chia-Fone Lee|AUTHOR Chia-Fone Lee]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, C6|<|
|Chair: |Marzena Zygis, Štefan Beňuš|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-1|PAPER Mon-O-1-6-1 — Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study]]</div>|<div class="cpsessionviewpapertitle">Phonetic Correlates of Pharyngeal and Pharyngealized Consonants in Saudi, Lebanese, and Jordanian Arabic: An rt-MRI Study</div><div class="cpsessionviewpaperauthor">[[Zainab Hermes|AUTHOR Zainab Hermes]], [[Marissa Barlaz|AUTHOR Marissa Barlaz]], [[Ryan Shosted|AUTHOR Ryan Shosted]], [[Zhi-Pei Liang|AUTHOR Zhi-Pei Liang]], [[Brad Sutton|AUTHOR Brad Sutton]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-2|PAPER Mon-O-1-6-2 — Glottal Opening and Strategies of Production of Fricatives]]</div>|<div class="cpsessionviewpapertitle">Glottal Opening and Strategies of Production of Fricatives</div><div class="cpsessionviewpaperauthor">[[Benjamin Elie|AUTHOR Benjamin Elie]], [[Yves Laprie|AUTHOR Yves Laprie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171292.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-3|PAPER Mon-O-1-6-3 — Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic]]</div>|<div class="cpsessionviewpapertitle">Acoustics and Articulation of Medial versus Final Coronal Stop Gemination Contrasts in Moroccan Arabic</div><div class="cpsessionviewpaperauthor">[[Mohamed Yassine Frej|AUTHOR Mohamed Yassine Frej]], [[Christopher Carignan|AUTHOR Christopher Carignan]], [[Catherine T. Best|AUTHOR Catherine T. Best]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-4|PAPER Mon-O-1-6-4 — How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic]]</div>|<div class="cpsessionviewpapertitle">How are Four-Level Length Distinctions Produced? Evidence from Moroccan Arabic</div><div class="cpsessionviewpaperauthor">[[Giuseppina Turco|AUTHOR Giuseppina Turco]], [[Karim Shoul|AUTHOR Karim Shoul]], [[Rachid Ridouane|AUTHOR Rachid Ridouane]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-5|PAPER Mon-O-1-6-5 — Vowels in the Barunga Variety of North Australian Kriol]]</div>|<div class="cpsessionviewpapertitle">Vowels in the Barunga Variety of North Australian Kriol</div><div class="cpsessionviewpaperauthor">[[Caroline Jones|AUTHOR Caroline Jones]], [[Katherine Demuth|AUTHOR Katherine Demuth]], [[Weicong Li|AUTHOR Weicong Li]], [[Andre Almeida|AUTHOR Andre Almeida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-1-6-6|PAPER Mon-O-1-6-6 — Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony]]</div>|<div class="cpsessionviewpapertitle">Nature of Contrast and Coarticulation: Evidence from Mizo Tones and Assamese Vowel Harmony</div><div class="cpsessionviewpaperauthor">[[Indranil Dutta|AUTHOR Indranil Dutta]], [[Irfan S.|AUTHOR Irfan S.]], [[Pamir Gogoi|AUTHOR Pamir Gogoi]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, Aula Magna|<|
|Chair: |Tanel Alumäe, Xunying Liu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-1|PAPER Mon-O-2-1-1 — Approaches for Neural-Network Language Model Adaptation]]</div>|<div class="cpsessionviewpapertitle">Approaches for Neural-Network Language Model Adaptation</div><div class="cpsessionviewpaperauthor">[[Min Ma|AUTHOR Min Ma]], [[Michael Nirschl|AUTHOR Michael Nirschl]], [[Fadi Biadsy|AUTHOR Fadi Biadsy]], [[Shankar Kumar|AUTHOR Shankar Kumar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-2|PAPER Mon-O-2-1-2 — A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models]]</div>|<div class="cpsessionviewpapertitle">A Batch Noise Contrastive Estimation Approach for Training Large Vocabulary Language Models</div><div class="cpsessionviewpaperauthor">[[Youssef Oualil|AUTHOR Youssef Oualil]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170513.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-3|PAPER Mon-O-2-1-3 — Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Investigating Bidirectional Recurrent Neural Network Language Models for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[X. Chen|AUTHOR X. Chen]], [[A. Ragni|AUTHOR A. Ragni]], [[X. Liu|AUTHOR X. Liu]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170564.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-4|PAPER Mon-O-2-1-4 — Fast Neural Network Language Model Lookups at N-Gram Speeds]]</div>|<div class="cpsessionviewpapertitle">Fast Neural Network Language Model Lookups at N-Gram Speeds</div><div class="cpsessionviewpaperauthor">[[Yinghui Huang|AUTHOR Yinghui Huang]], [[Abhinav Sethy|AUTHOR Abhinav Sethy]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-5|PAPER Mon-O-2-1-5 — Empirical Exploration of Novel Architectures and Objectives for Language Models]]</div>|<div class="cpsessionviewpapertitle">Empirical Exploration of Novel Architectures and Objectives for Language Models</div><div class="cpsessionviewpaperauthor">[[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Abhinav Sethy|AUTHOR Abhinav Sethy]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[George Saon|AUTHOR George Saon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-1-6|PAPER Mon-O-2-1-6 — Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks]]</div>|<div class="cpsessionviewpapertitle">Residual Memory Networks in Language Modeling: Improving the Reputation of Feed-Forward Networks</div><div class="cpsessionviewpaperauthor">[[Karel Beneš|AUTHOR Karel Beneš]], [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]], [[Lukáš Burget|AUTHOR Lukáš Burget]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, E10|<|
|Chair: |Thomas Hain, Zheng-Hua Tan|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-1|PAPER Mon-O-2-10-1 — Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home]]</div>|<div class="cpsessionviewpapertitle">Generation of Large-Scale Simulated Utterances in Virtual Rooms to Train Deep-Neural Networks for Far-Field Speech Recognition in Google Home</div><div class="cpsessionviewpaperauthor">[[Chanwoo Kim|AUTHOR Chanwoo Kim]], [[Ananya Misra|AUTHOR Ananya Misra]], [[Kean Chin|AUTHOR Kean Chin]], [[Thad Hughes|AUTHOR Thad Hughes]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-2|PAPER Mon-O-2-10-2 — Neural Network-Based Spectrum Estimation for Online WPE Dereverberation]]</div>|<div class="cpsessionviewpapertitle">Neural Network-Based Spectrum Estimation for Online WPE Dereverberation</div><div class="cpsessionviewpaperauthor">[[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Haeyong Kwon|AUTHOR Haeyong Kwon]], [[Takuma Mori|AUTHOR Takuma Mori]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-3|PAPER Mon-O-2-10-3 — Factorial Modeling for Effective Suppression of Directional Noise]]</div>|<div class="cpsessionviewpapertitle">Factorial Modeling for Effective Suppression of Directional Noise</div><div class="cpsessionviewpaperauthor">[[Osamu Ichikawa|AUTHOR Osamu Ichikawa]], [[Takashi Fukuda|AUTHOR Takashi Fukuda]], [[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Steven J. Rennie|AUTHOR Steven J. Rennie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-4|PAPER Mon-O-2-10-4 — On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones]]</div>|<div class="cpsessionviewpapertitle">On Design of Robust Deep Models for CHiME-4 Multi-Channel Speech Recognition with Multiple Configurations of Array Microphones</div><div class="cpsessionviewpaperauthor">[[Yan-Hui Tu|AUTHOR Yan-Hui Tu]], [[Jun Du|AUTHOR Jun Du]], [[Lei Sun|AUTHOR Lei Sun]], [[Feng Ma|AUTHOR Feng Ma]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-5|PAPER Mon-O-2-10-5 — Acoustic Modeling for Google Home]]</div>|<div class="cpsessionviewpapertitle">Acoustic Modeling for Google Home</div><div class="cpsessionviewpaperauthor">[[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Joe Caroselli|AUTHOR Joe Caroselli]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]], [[Ananya Misra|AUTHOR Ananya Misra]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Haşim Sak|AUTHOR Haşim Sak]], [[Golan Pundak|AUTHOR Golan Pundak]], [[Kean Chin|AUTHOR Kean Chin]], [[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]], [[Kevin W. Wilson|AUTHOR Kevin W. Wilson]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[Chanwoo Kim|AUTHOR Chanwoo Kim]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Mitchel Weintraub|AUTHOR Mitchel Weintraub]], [[Erik McDermott|AUTHOR Erik McDermott]], [[Richard Rose|AUTHOR Richard Rose]], [[Matt Shannon|AUTHOR Matt Shannon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-10-6|PAPER Mon-O-2-10-6 — On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">On Multi-Domain Training and Adaptation of End-to-End RNN Acoustic Models for Distant Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Seyedmahdad Mirsamadi|AUTHOR Seyedmahdad Mirsamadi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, A2|<|
|Chair: |Heidi Christensen, Rafa Orozco|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-1|PAPER Mon-O-2-2-1 — Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis]]</div>|<div class="cpsessionviewpapertitle">Dominant Distortion Classification for Pre-Processing of Vowels in Remote Biomedical Voice Analysis</div><div class="cpsessionviewpaperauthor">[[Amir Hossein Poorjam|AUTHOR Amir Hossein Poorjam]], [[Jesper Rindom Jensen|AUTHOR Jesper Rindom Jensen]], [[Max A. Little|AUTHOR Max A. Little]], [[Mads Græsbøll Christensen|AUTHOR Mads Græsbøll Christensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170626.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-2|PAPER Mon-O-2-2-2 — Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study]]</div>|<div class="cpsessionviewpapertitle">Automatic Paraphasia Detection from Aphasic Speech: A Preliminary Study</div><div class="cpsessionviewpaperauthor">[[Duc Le|AUTHOR Duc Le]], [[Keli Licata|AUTHOR Keli Licata]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-3|PAPER Mon-O-2-2-3 — Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors]]</div>|<div class="cpsessionviewpapertitle">Evaluation of the Neurological State of People with Parkinson’s Disease Using i-Vectors</div><div class="cpsessionviewpaperauthor">[[N. Garcia|AUTHOR N. Garcia]], [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]], [[L.F. D’Haro|AUTHOR L.F. D’Haro]], [[Najim Dehak|AUTHOR Najim Dehak]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-4|PAPER Mon-O-2-2-4 — Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow]]</div>|<div class="cpsessionviewpapertitle">Objective Severity Assessment from Disordered Voice Using Estimated Glottal Airflow</div><div class="cpsessionviewpaperauthor">[[Yu-Ren Chien|AUTHOR Yu-Ren Chien]], [[Michal Borský|AUTHOR Michal Borský]], [[Jón Guðnason|AUTHOR Jón Guðnason]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-5|PAPER Mon-O-2-2-5 — Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach]]</div>|<div class="cpsessionviewpapertitle">Earlier Identification of Children with Autism Spectrum Disorder: An Automatic Vocalisation-Based Approach</div><div class="cpsessionviewpaperauthor">[[Florian B. Pokorny|AUTHOR Florian B. Pokorny]], [[Björn Schuller|AUTHOR Björn Schuller]], [[Peter B. Marschik|AUTHOR Peter B. Marschik]], [[Raymond Brueckner|AUTHOR Raymond Brueckner]], [[Pär Nyström|AUTHOR Pär Nyström]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Sven Bölte|AUTHOR Sven Bölte]], [[Christa Einspieler|AUTHOR Christa Einspieler]], [[Terje Falck-Ytter|AUTHOR Terje Falck-Ytter]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171078.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-2-6|PAPER Mon-O-2-2-6 — Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease]]</div>|<div class="cpsessionviewpapertitle">Convolutional Neural Network to Model Articulation Impairments in Patients with Parkinson’s Disease</div><div class="cpsessionviewpaperauthor">[[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]], [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, B4|<|
|Chair: |Hema Murthy, Jon Barker|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171179.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-1|PAPER Mon-O-2-4-1 — Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs]]</div>|<div class="cpsessionviewpapertitle">Phone Classification Using a Non-Linear Manifold with Broad Phone Class Dependent DNNs</div><div class="cpsessionviewpaperauthor">[[Linxue Bai|AUTHOR Linxue Bai]], [[Peter Jančovič|AUTHOR Peter Jančovič]], [[Martin Russell|AUTHOR Martin Russell]], [[Philip Weber|AUTHOR Philip Weber]], [[Steve Houghton|AUTHOR Steve Houghton]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-2|PAPER Mon-O-2-4-2 — An Investigation of Crowd Speech for Room Occupancy Estimation]]</div>|<div class="cpsessionviewpapertitle">An Investigation of Crowd Speech for Room Occupancy Estimation</div><div class="cpsessionviewpaperauthor">[[Siyuan Chen|AUTHOR Siyuan Chen]], [[Julien Epps|AUTHOR Julien Epps]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Phu Ngoc Le|AUTHOR Phu Ngoc Le]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170726.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-3|PAPER Mon-O-2-4-3 — Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals]]</div>|<div class="cpsessionviewpapertitle">Time-Frequency Coherence for Periodic-Aperiodic Decomposition of Speech Signals</div><div class="cpsessionviewpaperauthor">[[Karthika Vijayan|AUTHOR Karthika Vijayan]], [[Jitendra Kumar Dhiman|AUTHOR Jitendra Kumar Dhiman]], [[Chandra Sekhar Seelamantula|AUTHOR Chandra Sekhar Seelamantula]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-4|PAPER Mon-O-2-4-4 — Musical Speech: A New Methodology for Transcribing Speech Prosody]]</div>|<div class="cpsessionviewpapertitle">Musical Speech: A New Methodology for Transcribing Speech Prosody</div><div class="cpsessionviewpaperauthor">[[Alexsandro R. Meireles|AUTHOR Alexsandro R. Meireles]], [[Ant^onio R.M. Simões|AUTHOR Ant^onio R.M. Simões]], [[Antonio Celso Ribeiro|AUTHOR Antonio Celso Ribeiro]], [[Beatriz Raposo de Medeiros|AUTHOR Beatriz Raposo de Medeiros]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-5|PAPER Mon-O-2-4-5 — Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training]]</div>|<div class="cpsessionviewpapertitle">Estimation of Place of Articulation of Fricatives from Spectral Characteristics for Speech Training</div><div class="cpsessionviewpaperauthor">[[K.S. Nataraj|AUTHOR K.S. Nataraj]], [[Prem C. Pandey|AUTHOR Prem C. Pandey]], [[Hirak Dasgupta|AUTHOR Hirak Dasgupta]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170389.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-4-6|PAPER Mon-O-2-4-6 — Estimation of the Probability Distribution of Spectral Fine Structure in the Speech Source]]</div>|<div class="cpsessionviewpapertitle">Estimation of the Probability Distribution of Spectral Fine Structure in the Speech Source</div><div class="cpsessionviewpaperauthor">[[Tom Bäckström|AUTHOR Tom Bäckström]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, C6|<|
|Chair: |Marija Tabain, Felicitas Kleber|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-1|PAPER Mon-O-2-6-1 — End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives]]</div>|<div class="cpsessionviewpapertitle">End-to-End Acoustic Feedback in Language Learning for Correcting Devoiced French Final-Fricatives</div><div class="cpsessionviewpaperauthor">[[Sucheta Ghosh|AUTHOR Sucheta Ghosh]], [[Camille Fauth|AUTHOR Camille Fauth]], [[Yves Laprie|AUTHOR Yves Laprie]], [[Aghilas Sini|AUTHOR Aghilas Sini]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-2|PAPER Mon-O-2-6-2 — Dialect Perception by Older Children]]</div>|<div class="cpsessionviewpapertitle">Dialect Perception by Older Children</div><div class="cpsessionviewpaperauthor">[[Ewa Jacewicz|AUTHOR Ewa Jacewicz]], [[Robert A. Fox|AUTHOR Robert A. Fox]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-3|PAPER Mon-O-2-6-3 — Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops]]</div>|<div class="cpsessionviewpapertitle">Perception of Non-Contrastive Variations in American English by Japanese Learners: Flaps are Less Favored Than Stops</div><div class="cpsessionviewpaperauthor">[[Kiyoko Yoneyama|AUTHOR Kiyoko Yoneyama]], [[Mafuyu Kitahara|AUTHOR Mafuyu Kitahara]], [[Keiichi Tajima|AUTHOR Keiichi Tajima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-4|PAPER Mon-O-2-6-4 — L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility]]</div>|<div class="cpsessionviewpapertitle">L1 Perceptions of L2 Prosody: The Interplay Between Intonation, Rhythm, and Speech Rate and Their Contribution to Accentedness and Comprehensibility</div><div class="cpsessionviewpaperauthor">[[Lieke van Maastricht|AUTHOR Lieke van Maastricht]], [[Tim Zee|AUTHOR Tim Zee]], [[Emiel Krahmer|AUTHOR Emiel Krahmer]], [[Marc Swerts|AUTHOR Marc Swerts]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-5|PAPER Mon-O-2-6-5 — Effects of Pitch Fall and L1 on Vowel Length Identification in L2 Japanese]]</div>|<div class="cpsessionviewpapertitle">Effects of Pitch Fall and L1 on Vowel Length Identification in L2 Japanese</div><div class="cpsessionviewpaperauthor">[[Izumi Takiguchi|AUTHOR Izumi Takiguchi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-O-2-6-6|PAPER Mon-O-2-6-6 — A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners]]</div>|<div class="cpsessionviewpapertitle">A Preliminary Study of Prosodic Disambiguation by Chinese EFL Learners</div><div class="cpsessionviewpaperauthor">[[Yuanyuan Zhang|AUTHOR Yuanyuan Zhang]], [[Hongwei Ding|AUTHOR Hongwei Ding]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, Poster 1|<|
|Chair: |Sekhar Seelamantula|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-1|PAPER Mon-P-1-1-1 — Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System]]</div>|<div class="cpsessionviewpapertitle">Low-Dimensional Representation of Spectral Envelope Without Deterioration for Full-Band Speech Analysis/Synthesis System</div><div class="cpsessionviewpaperauthor">[[Masanori Morise|AUTHOR Masanori Morise]], [[Genta Miyashita|AUTHOR Genta Miyashita]], [[Kenji Ozawa|AUTHOR Kenji Ozawa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-2|PAPER Mon-P-1-1-2 — Robust Source-Filter Separation of Speech Signal in the Phase Domain]]</div>|<div class="cpsessionviewpapertitle">Robust Source-Filter Separation of Speech Signal in the Phase Domain</div><div class="cpsessionviewpaperauthor">[[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Jon Barker|AUTHOR Jon Barker]], [[Oscar Saz Torralba|AUTHOR Oscar Saz Torralba]], [[Thomas Hain|AUTHOR Thomas Hain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-3|PAPER Mon-P-1-1-3 — A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes]]</div>|<div class="cpsessionviewpapertitle">A Time-Warping Pitch Tracking Algorithm Considering Fast f₀ Changes</div><div class="cpsessionviewpaperauthor">[[Simon Stone|AUTHOR Simon Stone]], [[Peter Steiner|AUTHOR Peter Steiner]], [[Peter Birkholz|AUTHOR Peter Birkholz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-4|PAPER Mon-P-1-1-4 — A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation]]</div>|<div class="cpsessionviewpapertitle">A Modulation Property of Time-Frequency Derivatives of Filtered Phase and its Application to Aperiodicity and f,,o,, Estimation</div><div class="cpsessionviewpaperauthor">[[Hideki Kawahara|AUTHOR Hideki Kawahara]], [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]], [[Masanori Morise|AUTHOR Masanori Morise]], [[Hideki Banno|AUTHOR Hideki Banno]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170624.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-5|PAPER Mon-P-1-1-5 — Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments]]</div>|<div class="cpsessionviewpapertitle">Non-Local Estimation of Speech Signal for Vowel Onset Point Detection in Varied Environments</div><div class="cpsessionviewpaperauthor">[[Avinash Kumar|AUTHOR Avinash Kumar]], [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]], [[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-6|PAPER Mon-P-1-1-6 — Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Time-Domain Envelope Modulating the Noise Component of Excitation in a Continuous Residual-Based Vocoder for Statistical Parametric Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Mohammed Salah Al-Radhi|AUTHOR Mohammed Salah Al-Radhi]], [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]], [[Géza Németh|AUTHOR Géza Németh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-7|PAPER Mon-P-1-1-7 — Wavelet Speech Enhancement Based on Robust Principal Component Analysis]]</div>|<div class="cpsessionviewpapertitle">Wavelet Speech Enhancement Based on Robust Principal Component Analysis</div><div class="cpsessionviewpaperauthor">[[Chia-Lung Wu|AUTHOR Chia-Lung Wu]], [[Hsiang-Ping Hsu|AUTHOR Hsiang-Ping Hsu]], [[Syu-Siang Wang|AUTHOR Syu-Siang Wang]], [[Jeih-Weih Hung|AUTHOR Jeih-Weih Hung]], [[Ying-Hui Lai|AUTHOR Ying-Hui Lai]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]], [[Yu Tsao|AUTHOR Yu Tsao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170790.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-8|PAPER Mon-P-1-1-8 — Vowel Onset Point Detection Using Sonority Information]]</div>|<div class="cpsessionviewpapertitle">Vowel Onset Point Detection Using Sonority Information</div><div class="cpsessionviewpaperauthor">[[Bidisha Sharma|AUTHOR Bidisha Sharma]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-9|PAPER Mon-P-1-1-9 — Analytic Filter Bank for Speech Analysis, Feature Extraction and Perceptual Studies]]</div>|<div class="cpsessionviewpapertitle">Analytic Filter Bank for Speech Analysis, Feature Extraction and Perceptual Studies</div><div class="cpsessionviewpaperauthor">[[Unto K. Laine|AUTHOR Unto K. Laine]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-1-10|PAPER Mon-P-1-1-10 — Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Learning the Mapping Function from Voltage Amplitudes to Sensor Positions in 3D-EMA Using Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Christian Kroos|AUTHOR Christian Kroos]], [[Mark D. Plumbley|AUTHOR Mark D. Plumbley]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, Poster 2|<|
|Chair: |Hugo Van hamme|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-1|PAPER Mon-P-1-2-1 — Multilingual i-Vector Based Statistical Modeling for Music Genre Classification]]</div>|<div class="cpsessionviewpapertitle">Multilingual i-Vector Based Statistical Modeling for Music Genre Classification</div><div class="cpsessionviewpaperauthor">[[Jia Dai|AUTHOR Jia Dai]], [[Wei Xue|AUTHOR Wei Xue]], [[Wenju Liu|AUTHOR Wenju Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-2|PAPER Mon-P-1-2-2 — Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation]]</div>|<div class="cpsessionviewpapertitle">Indoor/Outdoor Audio Classification Using Foreground Speech Segmentation</div><div class="cpsessionviewpaperauthor">[[Banriskhem K. Khonglah|AUTHOR Banriskhem K. Khonglah]], [[K.T. Deepak|AUTHOR K.T. Deepak]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-3|PAPER Mon-P-1-2-3 — Attention Based CLDNNs for Short-Duration Acoustic Scene Classification]]</div>|<div class="cpsessionviewpapertitle">Attention Based CLDNNs for Short-Duration Acoustic Scene Classification</div><div class="cpsessionviewpaperauthor">[[Jinxi Guo|AUTHOR Jinxi Guo]], [[Ning Xu|AUTHOR Ning Xu]], [[Li-Jia Li|AUTHOR Li-Jia Li]], [[Abeer Alwan|AUTHOR Abeer Alwan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-4|PAPER Mon-P-1-2-4 — Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection]]</div>|<div class="cpsessionviewpapertitle">Frame-Wise Dynamic Threshold Based Polyphonic Acoustic Event Detection</div><div class="cpsessionviewpaperauthor">[[Xianjun Xia|AUTHOR Xianjun Xia]], [[Roberto Togneri|AUTHOR Roberto Togneri]], [[Ferdous Sohel|AUTHOR Ferdous Sohel]], [[David Huang|AUTHOR David Huang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-5|PAPER Mon-P-1-2-5 — Enhanced Feature Extraction for Speech Detection in Media Audio]]</div>|<div class="cpsessionviewpapertitle">Enhanced Feature Extraction for Speech Detection in Media Audio</div><div class="cpsessionviewpaperauthor">[[Inseon Jang|AUTHOR Inseon Jang]], [[ChungHyun Ahn|AUTHOR ChungHyun Ahn]], [[Jeongil Seo|AUTHOR Jeongil Seo]], [[Younseon Jang|AUTHOR Younseon Jang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170982.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-6|PAPER Mon-P-1-2-6 — Audio Classification Using Class-Specific Learned Descriptors]]</div>|<div class="cpsessionviewpapertitle">Audio Classification Using Class-Specific Learned Descriptors</div><div class="cpsessionviewpaperauthor">[[Sukanya Sonowal|AUTHOR Sukanya Sonowal]], [[Tushar Sandhan|AUTHOR Tushar Sandhan]], [[Inkyu Choi|AUTHOR Inkyu Choi]], [[Nam Soo Kim|AUTHOR Nam Soo Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-7|PAPER Mon-P-1-2-7 — Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery]]</div>|<div class="cpsessionviewpapertitle">Hidden Markov Model Variational Autoencoder for Acoustic Unit Discovery</div><div class="cpsessionviewpaperauthor">[[Janek Ebbers|AUTHOR Janek Ebbers]], [[Jahn Heymann|AUTHOR Jahn Heymann]], [[Lukas Drude|AUTHOR Lukas Drude]], [[Thomas Glarner|AUTHOR Thomas Glarner]], [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-8|PAPER Mon-P-1-2-8 — Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Virtual Adversarial Training and Data Augmentation for Acoustic Event Detection with Gated Recurrent Neural Networks</div><div class="cpsessionviewpaperauthor">[[Matthias Zöhrer|AUTHOR Matthias Zöhrer]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171386.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-9|PAPER Mon-P-1-2-9 — Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi]]</div>|<div class="cpsessionviewpapertitle">Montreal Forced Aligner: Trainable Text-Speech Alignment Using Kaldi</div><div class="cpsessionviewpaperauthor">[[Michael McAuliffe|AUTHOR Michael McAuliffe]], [[Michaela Socolof|AUTHOR Michaela Socolof]], [[Sarah Mihuc|AUTHOR Sarah Mihuc]], [[Michael Wagner|AUTHOR Michael Wagner]], [[Morgan Sonderegger|AUTHOR Morgan Sonderegger]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-2-10|PAPER Mon-P-1-2-10 — A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network]]</div>|<div class="cpsessionviewpapertitle">A Robust Voiced/Unvoiced Phoneme Classification from Whispered Speech Using the ‘Color’ of Whispered Phonemes and Deep Neural Network</div><div class="cpsessionviewpaperauthor">[[G. Nisha Meenakshi|AUTHOR G. Nisha Meenakshi]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, Poster 4|<|
|Chair: |György Szaszák|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-1|PAPER Mon-P-1-4-1 — Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Rescoring-Aware Beam Search for Reduced Search Errors in Contextual Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ian Williams|AUTHOR Ian Williams]], [[Petar Aleksic|AUTHOR Petar Aleksic]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-2|PAPER Mon-P-1-4-2 — Comparison of Decoding Strategies for CTC Acoustic Models]]</div>|<div class="cpsessionviewpapertitle">Comparison of Decoding Strategies for CTC Acoustic Models</div><div class="cpsessionviewpaperauthor">[[Thomas Zenkel|AUTHOR Thomas Zenkel]], [[Ramon Sanabria|AUTHOR Ramon Sanabria]], [[Florian Metze|AUTHOR Florian Metze]], [[Jan Niehues|AUTHOR Jan Niehues]], [[Matthias Sperber|AUTHOR Matthias Sperber]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171680.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-3|PAPER Mon-P-1-4-3 — Phone Duration Modeling for LVCSR Using Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Phone Duration Modeling for LVCSR Using Neural Networks</div><div class="cpsessionviewpaperauthor">[[Hossein Hadian|AUTHOR Hossein Hadian]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Hossein Sameti|AUTHOR Hossein Sameti]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-4|PAPER Mon-P-1-4-4 — Towards Better Decoding and Language Model Integration in Sequence to Sequence Models]]</div>|<div class="cpsessionviewpapertitle">Towards Better Decoding and Language Model Integration in Sequence to Sequence Models</div><div class="cpsessionviewpaperauthor">[[Jan Chorowski|AUTHOR Jan Chorowski]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-5|PAPER Mon-P-1-4-5 — Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Empirical Evaluation of Parallel Training Algorithms on Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Wenpeng Li|AUTHOR Wenpeng Li]], [[Binbin Zhang|AUTHOR Binbin Zhang]], [[Lei Xie|AUTHOR Lei Xie]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-6|PAPER Mon-P-1-4-6 — Binary Deep Neural Networks for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Binary Deep Neural Networks for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Xu Xiang|AUTHOR Xu Xiang]], [[Yanmin Qian|AUTHOR Yanmin Qian]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-7|PAPER Mon-P-1-4-7 — Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization]]</div>|<div class="cpsessionviewpapertitle">Hierarchical Constrained Bayesian Optimization for Feature, Acoustic Model and Decoder Parameter Optimization</div><div class="cpsessionviewpaperauthor">[[Akshay Chandrashekaran|AUTHOR Akshay Chandrashekaran]], [[Ian Lane|AUTHOR Ian Lane]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-8|PAPER Mon-P-1-4-8 — Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Use of Global and Acoustic Features Associated with Contextual Factors to Adapt Language Models for Spontaneous Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shohei Toyama|AUTHOR Shohei Toyama]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-9|PAPER Mon-P-1-4-9 — Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Joint Learning of Correlated Sequence Labeling Tasks Using Bidirectional Recurrent Neural Networks</div><div class="cpsessionviewpaperauthor">[[Vardaan Pahuja|AUTHOR Vardaan Pahuja]], [[Anirban Laha|AUTHOR Anirban Laha]], [[Shachar Mirkin|AUTHOR Shachar Mirkin]], [[Vikas Raykar|AUTHOR Vikas Raykar]], [[Lili Kotlerman|AUTHOR Lili Kotlerman]], [[Guy Lev|AUTHOR Guy Lev]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-10|PAPER Mon-P-1-4-10 — Estimation of Gap Between Current Language Models and Human Performance]]</div>|<div class="cpsessionviewpapertitle">Estimation of Gap Between Current Language Models and Human Performance</div><div class="cpsessionviewpaperauthor">[[Xiaoyu Shen|AUTHOR Xiaoyu Shen]], [[Youssef Oualil|AUTHOR Youssef Oualil]], [[Clayton Greenberg|AUTHOR Clayton Greenberg]], [[Mittul Singh|AUTHOR Mittul Singh]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-1-4-11|PAPER Mon-P-1-4-11 — A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery]]</div>|<div class="cpsessionviewpapertitle">A Phonological Phrase Sequence Modelling Approach for Resource Efficient and Robust Real-Time Punctuation Recovery</div><div class="cpsessionviewpaperauthor">[[Anna Moró|AUTHOR Anna Moró]], [[György Szaszák|AUTHOR György Szaszák]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, Poster 1|<|
|Chair: |Louis ten Bosch|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-1|PAPER Mon-P-2-1-1 — Factors Affecting the Intelligibility of Low-Pass Filtered Speech]]</div>|<div class="cpsessionviewpapertitle">Factors Affecting the Intelligibility of Low-Pass Filtered Speech</div><div class="cpsessionviewpaperauthor">[[Lei Wang|AUTHOR Lei Wang]], [[Fei Chen|AUTHOR Fei Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-2|PAPER Mon-P-2-1-2 — Phonetic Restoration of Temporally Reversed Speech]]</div>|<div class="cpsessionviewpapertitle">Phonetic Restoration of Temporally Reversed Speech</div><div class="cpsessionviewpaperauthor">[[Shi-yu Wang|AUTHOR Shi-yu Wang]], [[Fei Chen|AUTHOR Fei Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-3|PAPER Mon-P-2-1-3 — Simultaneous Articulatory and Acoustic Distortion in L1 and L2 Listening: Locally Time-Reversed “Fast” Speech]]</div>|<div class="cpsessionviewpapertitle">Simultaneous Articulatory and Acoustic Distortion in L1 and L2 Listening: Locally Time-Reversed “Fast” Speech</div><div class="cpsessionviewpaperauthor">[[Mako Ishida|AUTHOR Mako Ishida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-4|PAPER Mon-P-2-1-4 — Lexically Guided Perceptual Learning in Mandarin Chinese]]</div>|<div class="cpsessionviewpapertitle">Lexically Guided Perceptual Learning in Mandarin Chinese</div><div class="cpsessionviewpaperauthor">[[L. Ann Burchfield|AUTHOR L. Ann Burchfield]], [[San-hei Kenny Luk|AUTHOR San-hei Kenny Luk]], [[Mark Antoniou|AUTHOR Mark Antoniou]], [[Anne Cutler|AUTHOR Anne Cutler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-5|PAPER Mon-P-2-1-5 — The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise]]</div>|<div class="cpsessionviewpapertitle">The Effect of Spectral Profile on the Intelligibility of Emotional Speech in Noise</div><div class="cpsessionviewpaperauthor">[[Chris Davis|AUTHOR Chris Davis]], [[Chee Seng Chong|AUTHOR Chee Seng Chong]], [[Jeesun Kim|AUTHOR Jeesun Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-6|PAPER Mon-P-2-1-6 — Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking]]</div>|<div class="cpsessionviewpapertitle">Whether Long-Term Tracking of Speech Rate Affects Perception Depends on Who is Talking</div><div class="cpsessionviewpaperauthor">[[Merel Maslowski|AUTHOR Merel Maslowski]], [[Antje S. Meyer|AUTHOR Antje S. Meyer]], [[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-7|PAPER Mon-P-2-1-7 — Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech]]</div>|<div class="cpsessionviewpapertitle">Emotional Thin-Slicing: A Proposal for a Short- and Long-Term Division of Emotional Speech</div><div class="cpsessionviewpaperauthor">[[Daniel Oliveira Peres|AUTHOR Daniel Oliveira Peres]], [[Dominic Watt|AUTHOR Dominic Watt]], [[Waldemar Ferreira Netto|AUTHOR Waldemar Ferreira Netto]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-8|PAPER Mon-P-2-1-8 — Predicting Epenthetic Vowel Quality from Acoustics]]</div>|<div class="cpsessionviewpapertitle">Predicting Epenthetic Vowel Quality from Acoustics</div><div class="cpsessionviewpaperauthor">[[Adriana Guevara-Rukoz|AUTHOR Adriana Guevara-Rukoz]], [[Erika Parlato-Oliveira|AUTHOR Erika Parlato-Oliveira]], [[Shi Yu|AUTHOR Shi Yu]], [[Yuki Hirose|AUTHOR Yuki Hirose]], [[Sharon Peperkamp|AUTHOR Sharon Peperkamp]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-9|PAPER Mon-P-2-1-9 — The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds]]</div>|<div class="cpsessionviewpapertitle">The Effect of Spectral Tilt on Size Discrimination of Voiced Speech Sounds</div><div class="cpsessionviewpaperauthor">[[Toshie Matsui|AUTHOR Toshie Matsui]], [[Toshio Irino|AUTHOR Toshio Irino]], [[Kodai Yamamoto|AUTHOR Kodai Yamamoto]], [[Hideki Kawahara|AUTHOR Hideki Kawahara]], [[Roy D. Patterson|AUTHOR Roy D. Patterson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170532.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-10|PAPER Mon-P-2-1-10 — Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car]]</div>|<div class="cpsessionviewpapertitle">Misperceptions of the Emotional Content of Natural and Vocoded Speech in a Car</div><div class="cpsessionviewpaperauthor">[[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]], [[Gustav Eje Henter|AUTHOR Gustav Eje Henter]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-11|PAPER Mon-P-2-1-11 — The Relative Cueing Power of F0 and Duration in German Prominence Perception]]</div>|<div class="cpsessionviewpapertitle">The Relative Cueing Power of F0 and Duration in German Prominence Perception</div><div class="cpsessionviewpaperauthor">[[Oliver Niebuhr|AUTHOR Oliver Niebuhr]], [[Jana Winkler|AUTHOR Jana Winkler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-12|PAPER Mon-P-2-1-12 — Perception and Acoustics of Vowel Nasality in Brazilian Portuguese]]</div>|<div class="cpsessionviewpapertitle">Perception and Acoustics of Vowel Nasality in Brazilian Portuguese</div><div class="cpsessionviewpaperauthor">[[Luciana Marques|AUTHOR Luciana Marques]], [[Rebecca Scarborough|AUTHOR Rebecca Scarborough]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-1-13|PAPER Mon-P-2-1-13 — Sociophonetic Realizations Guide Subsequent Lexical Access]]</div>|<div class="cpsessionviewpapertitle">Sociophonetic Realizations Guide Subsequent Lexical Access</div><div class="cpsessionviewpaperauthor">[[Jonny Kim|AUTHOR Jonny Kim]], [[Katie Drager|AUTHOR Katie Drager]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, Poster 2|<|
|Chair: |Wentao Gu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-1|PAPER Mon-P-2-2-1 — Critical Articulators Identification from RT-MRI of the Vocal Tract]]</div>|<div class="cpsessionviewpapertitle">Critical Articulators Identification from RT-MRI of the Vocal Tract</div><div class="cpsessionviewpaperauthor">[[Samuel Silva|AUTHOR Samuel Silva]], [[António Teixeira|AUTHOR António Teixeira]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-2|PAPER Mon-P-2-2-2 — Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images]]</div>|<div class="cpsessionviewpapertitle">Semantic Edge Detection for Tracking Vocal Tract Air-Tissue Boundaries in Real-Time Magnetic Resonance Images</div><div class="cpsessionviewpaperauthor">[[Krishna Somandepalli|AUTHOR Krishna Somandepalli]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-3|PAPER Mon-P-2-2-3 — Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors]]</div>|<div class="cpsessionviewpapertitle">Vocal Tract Airway Tissue Boundary Tracking for rtMRI Using Shape and Appearance Priors</div><div class="cpsessionviewpaperauthor">[[Sasan Asadiabadi|AUTHOR Sasan Asadiabadi]], [[Engin Erzin|AUTHOR Engin Erzin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-4|PAPER Mon-P-2-2-4 — An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley]]</div>|<div class="cpsessionviewpapertitle">An Objective Critical Distance Measure Based on the Relative Level of Spectral Valley</div><div class="cpsessionviewpaperauthor">[[T.V. Ananthapadmanabha|AUTHOR T.V. Ananthapadmanabha]], [[A.G. Ramakrishnan|AUTHOR A.G. Ramakrishnan]], [[Shubham Sharma|AUTHOR Shubham Sharma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-5|PAPER Mon-P-2-2-5 — Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science]]</div>|<div class="cpsessionviewpapertitle">Database of Volumetric and Real-Time Vocal Tract MRI for Speech Science</div><div class="cpsessionviewpaperauthor">[[Tanner Sorensen|AUTHOR Tanner Sorensen]], [[Zisis Skordilis|AUTHOR Zisis Skordilis]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Yoon-Chul Kim|AUTHOR Yoon-Chul Kim]], [[Yinghua Zhu|AUTHOR Yinghua Zhu]], [[Jangwon Kim|AUTHOR Jangwon Kim]], [[Adam Lammert|AUTHOR Adam Lammert]], [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Louis Goldstein|AUTHOR Louis Goldstein]], [[Dani Byrd|AUTHOR Dani Byrd]], [[Krishna Nayak|AUTHOR Krishna Nayak]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-6|PAPER Mon-P-2-2-6 — The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration]]</div>|<div class="cpsessionviewpapertitle">The Influence on Realization and Perception of Lexical Tones from Affricate’s Aspiration</div><div class="cpsessionviewpaperauthor">[[Chong Cao|AUTHOR Chong Cao]], [[Yanlu Xie|AUTHOR Yanlu Xie]], [[Qi Zhang|AUTHOR Qi Zhang]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-7|PAPER Mon-P-2-2-7 — Audiovisual Recalibration of Vowel Categories]]</div>|<div class="cpsessionviewpapertitle">Audiovisual Recalibration of Vowel Categories</div><div class="cpsessionviewpaperauthor">[[Matthias K. Franken|AUTHOR Matthias K. Franken]], [[Frank Eisner|AUTHOR Frank Eisner]], [[Jan-Mathijs Schoffelen|AUTHOR Jan-Mathijs Schoffelen]], [[Daniel J. Acheson|AUTHOR Daniel J. Acheson]], [[Peter Hagoort|AUTHOR Peter Hagoort]], [[James M. McQueen|AUTHOR James M. McQueen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-8|PAPER Mon-P-2-2-8 — The Effect of Gesture on Persuasive Speech]]</div>|<div class="cpsessionviewpapertitle">The Effect of Gesture on Persuasive Speech</div><div class="cpsessionviewpaperauthor">[[Judith Peters|AUTHOR Judith Peters]], [[Marieke Hoetjes|AUTHOR Marieke Hoetjes]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-9|PAPER Mon-P-2-2-9 — Auditory-Visual Integration of Talker Gender in Cantonese Tone Perception]]</div>|<div class="cpsessionviewpapertitle">Auditory-Visual Integration of Talker Gender in Cantonese Tone Perception</div><div class="cpsessionviewpaperauthor">[[Wei Lai|AUTHOR Wei Lai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-10|PAPER Mon-P-2-2-10 — Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception]]</div>|<div class="cpsessionviewpapertitle">Event-Related Potentials Associated with Somatosensory Effect in Audio-Visual Speech Perception</div><div class="cpsessionviewpaperauthor">[[Takayuki Ito|AUTHOR Takayuki Ito]], [[Hiroki Ohashi|AUTHOR Hiroki Ohashi]], [[Eva Montas|AUTHOR Eva Montas]], [[Vincent L. Gracco|AUTHOR Vincent L. Gracco]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-11|PAPER Mon-P-2-2-11 — When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch]]</div>|<div class="cpsessionviewpapertitle">When a Dog is a Cat and How it Changes Your Pupil Size: Pupil Dilation in Response to Information Mismatch</div><div class="cpsessionviewpaperauthor">[[Lena F. Renner|AUTHOR Lena F. Renner]], [[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-12|PAPER Mon-P-2-2-12 — Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations]]</div>|<div class="cpsessionviewpapertitle">Cross-Modal Analysis Between Phonation Differences and Texture Images Based on Sentiment Correlations</div><div class="cpsessionviewpaperauthor">[[Win Thuzar Kyaw|AUTHOR Win Thuzar Kyaw]], [[Yoshinori Sagisaka|AUTHOR Yoshinori Sagisaka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-13|PAPER Mon-P-2-2-13 — Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech]]</div>|<div class="cpsessionviewpapertitle">Wireless Neck-Surface Accelerometer and Microphone on Flex Circuit with Application to Noise-Robust Monitoring of Lombard Speech</div><div class="cpsessionviewpaperauthor">[[Daryush D. Mehta|AUTHOR Daryush D. Mehta]], [[Patrick C. Chwalek|AUTHOR Patrick C. Chwalek]], [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]], [[Laura J. Brattain|AUTHOR Laura J. Brattain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171371.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-14|PAPER Mon-P-2-2-14 — Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions]]</div>|<div class="cpsessionviewpapertitle">Video-Based Tracking of Jaw Movements During Speech: Preliminary Results and Future Directions</div><div class="cpsessionviewpaperauthor">[[Andrea Bandini|AUTHOR Andrea Bandini]], [[Aravind Namasivayam|AUTHOR Aravind Namasivayam]], [[Yana Yunusova|AUTHOR Yana Yunusova]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-15|PAPER Mon-P-2-2-15 — Accurate Synchronization of Speech and EGG Signal Using Phase Information]]</div>|<div class="cpsessionviewpapertitle">Accurate Synchronization of Speech and EGG Signal Using Phase Information</div><div class="cpsessionviewpaperauthor">[[Sunil Kumar S.B.|AUTHOR Sunil Kumar S.B.]], [[K. Sreenivasa Rao|AUTHOR K. Sreenivasa Rao]], [[Tanumay Mandal|AUTHOR Tanumay Mandal]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-2-16|PAPER Mon-P-2-2-16 — The Acquisition of Focal Lengthening in Stockholm Swedish]]</div>|<div class="cpsessionviewpapertitle">The Acquisition of Focal Lengthening in Stockholm Swedish</div><div class="cpsessionviewpaperauthor">[[Anna Sara H. Romøren|AUTHOR Anna Sara H. Romøren]], [[Aoju Chen|AUTHOR Aoju Chen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, Poster 3|<|
|Chair: |Khe Chai Sim|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-1|PAPER Mon-P-2-3-1 — Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multilingual Recurrent Neural Networks with Residual Learning for Low-Resource Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shiyu Zhou|AUTHOR Shiyu Zhou]], [[Yuanyuan Zhao|AUTHOR Yuanyuan Zhao]], [[Shuang Xu|AUTHOR Shuang Xu]], [[Bo Xu|AUTHOR Bo Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170505.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-2|PAPER Mon-P-2-3-2 — CTC Training of Multi-Phone Acoustic Models for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">CTC Training of Multi-Phone Acoustic Models for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Olivier Siohan|AUTHOR Olivier Siohan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-3|PAPER Mon-P-2-3-3 — An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation]]</div>|<div class="cpsessionviewpapertitle">An Investigation of Deep Neural Networks for Multilingual Speech Recognition Training and Adaptation</div><div class="cpsessionviewpaperauthor">[[Sibo Tong|AUTHOR Sibo Tong]], [[Philip N. Garner|AUTHOR Philip N. Garner]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-4|PAPER Mon-P-2-3-4 — 2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation]]</div>|<div class="cpsessionviewpapertitle">2016 BUT Babel System: Multilingual BLSTM Acoustic Model with i-Vector Based Adaptation</div><div class="cpsessionviewpaperauthor">[[Martin Karafiát|AUTHOR Martin Karafiát]], [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]], [[Pavel Matějka|AUTHOR Pavel Matějka]], [[Karel Veselý|AUTHOR Karel Veselý]], [[František Grézl|AUTHOR František Grézl]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-5|PAPER Mon-P-2-3-5 — Optimizing DNN Adaptation for Recognition of Enhanced Speech]]</div>|<div class="cpsessionviewpapertitle">Optimizing DNN Adaptation for Recognition of Enhanced Speech</div><div class="cpsessionviewpaperauthor">[[Marco Matassoni|AUTHOR Marco Matassoni]], [[Alessio Brutti|AUTHOR Alessio Brutti]], [[Daniele Falavigna|AUTHOR Daniele Falavigna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-6|PAPER Mon-P-2-3-6 — Deep Least Squares Regression for Speaker Adaptation]]</div>|<div class="cpsessionviewpapertitle">Deep Least Squares Regression for Speaker Adaptation</div><div class="cpsessionviewpaperauthor">[[Younggwan Kim|AUTHOR Younggwan Kim]], [[Hyungjun Lim|AUTHOR Hyungjun Lim]], [[Jahyun Goo|AUTHOR Jahyun Goo]], [[Hoirin Kim|AUTHOR Hoirin Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-7|PAPER Mon-P-2-3-7 — Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multi-Task Learning Using Mismatched Transcription for Under-Resourced Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Van Hai Do|AUTHOR Van Hai Do]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]], [[Boon Pang Lim|AUTHOR Boon Pang Lim]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-8|PAPER Mon-P-2-3-8 — Generalized Distillation Framework for Speaker Normalization]]</div>|<div class="cpsessionviewpapertitle">Generalized Distillation Framework for Speaker Normalization</div><div class="cpsessionviewpaperauthor">[[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]], [[Sandeep Reddy Kothinti|AUTHOR Sandeep Reddy Kothinti]], [[S. Umesh|AUTHOR S. Umesh]], [[Basil Abraham|AUTHOR Basil Abraham]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-9|PAPER Mon-P-2-3-9 — Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models]]</div>|<div class="cpsessionviewpapertitle">Learning Factorized Transforms for Unsupervised Adaptation of LSTM-RNN Acoustic Models</div><div class="cpsessionviewpaperauthor">[[Lahiru Samarakoon|AUTHOR Lahiru Samarakoon]], [[Brian Mak|AUTHOR Brian Mak]], [[Khe Chai Sim|AUTHOR Khe Chai Sim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171365.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-3-10|PAPER Mon-P-2-3-10 — Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments]]</div>|<div class="cpsessionviewpapertitle">Factorised Representations for Neural Network Adaptation to Diverse Acoustic Environments</div><div class="cpsessionviewpaperauthor">[[Joachim Fainberg|AUTHOR Joachim Fainberg]], [[Steve Renals|AUTHOR Steve Renals]], [[Peter Bell|AUTHOR Peter Bell]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, Poster 4|<|
|Chair: |Zofia Malisz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-1|PAPER Mon-P-2-4-1 — An RNN Model of Text Normalization]]</div>|<div class="cpsessionviewpapertitle">An RNN Model of Text Normalization</div><div class="cpsessionviewpaperauthor">[[Richard Sproat|AUTHOR Richard Sproat]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-2|PAPER Mon-P-2-4-2 — Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels]]</div>|<div class="cpsessionviewpapertitle">Weakly-Supervised Phrase Assignment from Text in a Speech-Synthesis System Using Noisy Labels</div><div class="cpsessionviewpaperauthor">[[Asaf Rendel|AUTHOR Asaf Rendel]], [[Raul Fernandez|AUTHOR Raul Fernandez]], [[Zvi Kons|AUTHOR Zvi Kons]], [[Andrew Rosenberg|AUTHOR Andrew Rosenberg]], [[Ron Hoory|AUTHOR Ron Hoory]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-3|PAPER Mon-P-2-4-3 — Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Prosody Aware Word-Level Encoder Based on BLSTM-RNNs for DNN-Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Yusuke Ijima|AUTHOR Yusuke Ijima]], [[Nobukatsu Hojo|AUTHOR Nobukatsu Hojo]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Taichi Asami|AUTHOR Taichi Asami]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170669.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-4|PAPER Mon-P-2-4-4 — Global Syllable Vectors for Building TTS Front-End with Deep Learning]]</div>|<div class="cpsessionviewpapertitle">Global Syllable Vectors for Building TTS Front-End with Deep Learning</div><div class="cpsessionviewpaperauthor">[[Jinfu Ni|AUTHOR Jinfu Ni]], [[Yoshinori Shiga|AUTHOR Yoshinori Shiga]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-5|PAPER Mon-P-2-4-5 — Prosody Control of Utterance Sequence for Information Delivering]]</div>|<div class="cpsessionviewpapertitle">Prosody Control of Utterance Sequence for Information Delivering</div><div class="cpsessionviewpaperauthor">[[Ishin Fukuoka|AUTHOR Ishin Fukuoka]], [[Kazuhiko Iwata|AUTHOR Kazuhiko Iwata]], [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-6|PAPER Mon-P-2-4-6 — Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer]]</div>|<div class="cpsessionviewpapertitle">Multi-Task Learning for Prosodic Structure Generation Using BLSTM RNN with Structured Output Layer</div><div class="cpsessionviewpaperauthor">[[Yuchen Huang|AUTHOR Yuchen Huang]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Runnan Li|AUTHOR Runnan Li]], [[Helen Meng|AUTHOR Helen Meng]], [[Lianhong Cai|AUTHOR Lianhong Cai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-7|PAPER Mon-P-2-4-7 — Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction]]</div>|<div class="cpsessionviewpapertitle">Investigating Efficient Feature Representation Methods and Training Objective for BLSTM-Based Phone Duration Prediction</div><div class="cpsessionviewpaperauthor">[[Yibin Zheng|AUTHOR Yibin Zheng]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]], [[Ya Li|AUTHOR Ya Li]], [[Bin Liu|AUTHOR Bin Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-8|PAPER Mon-P-2-4-8 — Discrete Duration Model for Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Discrete Duration Model for Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Bo Chen|AUTHOR Bo Chen]], [[Tianling Bian|AUTHOR Tianling Bian]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-9|PAPER Mon-P-2-4-9 — Comparison of Modeling Target in LSTM-RNN Duration Model]]</div>|<div class="cpsessionviewpapertitle">Comparison of Modeling Target in LSTM-RNN Duration Model</div><div class="cpsessionviewpaperauthor">[[Bo Chen|AUTHOR Bo Chen]], [[Jiahao Lai|AUTHOR Jiahao Lai]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-10|PAPER Mon-P-2-4-10 — Learning Word Vector Representations Based on Acoustic Counts]]</div>|<div class="cpsessionviewpapertitle">Learning Word Vector Representations Based on Acoustic Counts</div><div class="cpsessionviewpaperauthor">[[M. Sam Ribeiro|AUTHOR M. Sam Ribeiro]], [[Oliver Watts|AUTHOR Oliver Watts]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-P-2-4-11|PAPER Mon-P-2-4-11 — Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies]]</div>|<div class="cpsessionviewpapertitle">Synthesising Uncertainty: The Interplay of Vocal Effort and Hesitation Disfluencies</div><div class="cpsessionviewpaperauthor">[[Éva Székely|AUTHOR Éva Székely]], [[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Joakim Gustafson|AUTHOR Joakim Gustafson]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, 14:30–16:30, Monday, 21 Aug. 2017, E306|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172034.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-1|PAPER Mon-S&T-2-A-1 — Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora]]</div>|<div class="cpsessionviewpapertitle">Prosograph: A Tool for Prosody Visualisation of Large Speech Corpora</div><div class="cpsessionviewpaperauthor">[[Alp Öktem|AUTHOR Alp Öktem]], [[Mireia Farrús|AUTHOR Mireia Farrús]], [[Leo Wanner|AUTHOR Leo Wanner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-2|PAPER Mon-S&T-2-A-2 — ChunkitApp: Investigating the Relevant Units of Online Speech Processing]]</div>|<div class="cpsessionviewpapertitle">ChunkitApp: Investigating the Relevant Units of Online Speech Processing</div><div class="cpsessionviewpaperauthor">[[Svetlana Vetchinnikova|AUTHOR Svetlana Vetchinnikova]], [[Anna Mauranen|AUTHOR Anna Mauranen]], [[Nina Mikušová|AUTHOR Nina Mikušová]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-3|PAPER Mon-S&T-2-A-3 — Extending the EMU Speech Database Management System: Cloud Hosting, Team Collaboration, Automatic Revision Control]]</div>|<div class="cpsessionviewpapertitle">Extending the EMU Speech Database Management System: Cloud Hosting, Team Collaboration, Automatic Revision Control</div><div class="cpsessionviewpaperauthor">[[Markus Jochim|AUTHOR Markus Jochim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-4|PAPER Mon-S&T-2-A-4 — HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children]]</div>|<div class="cpsessionviewpapertitle">HomeBank: A Repository for Long-Form Real-World Audio Recordings of Children</div><div class="cpsessionviewpaperauthor">[[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]], [[Mark VanDam|AUTHOR Mark VanDam]], [[Elika Bergelson|AUTHOR Elika Bergelson]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-5|PAPER Mon-S&T-2-A-5 — A System for Real Time Collaborative Transcription Correction]]</div>|<div class="cpsessionviewpapertitle">A System for Real Time Collaborative Transcription Correction</div><div class="cpsessionviewpaperauthor">[[Peter Bell|AUTHOR Peter Bell]], [[Joachim Fainberg|AUTHOR Joachim Fainberg]], [[Catherine Lai|AUTHOR Catherine Lai]], [[Mark Sinclair|AUTHOR Mark Sinclair]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-A-6|PAPER Mon-S&T-2-A-6 — MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform]]</div>|<div class="cpsessionviewpapertitle">MoPAReST — Mobile Phone Assisted Remote Speech Therapy Platform</div><div class="cpsessionviewpaperauthor">[[Chitralekha Bhat|AUTHOR Chitralekha Bhat]], [[Anjali Kant|AUTHOR Anjali Kant]], [[Bhavik Vachhani|AUTHOR Bhavik Vachhani]], [[Sarita Rautara|AUTHOR Sarita Rautara]], [[Ashok Kumar Sinha|AUTHOR Ashok Kumar Sinha]], [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, 14:30–16:30, Monday, 21 Aug. 2017, E397|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-1|PAPER Mon-S&T-2-B-1 — An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates]]</div>|<div class="cpsessionviewpapertitle">An Apparatus to Investigate Western Opera Singing Skill Learning Using Performance and Result Biofeedback, and Measuring its Neural Correlates</div><div class="cpsessionviewpaperauthor">[[Aurore Jaumard-Hakoun|AUTHOR Aurore Jaumard-Hakoun]], [[Samy Chikhi|AUTHOR Samy Chikhi]], [[Takfarinas Medani|AUTHOR Takfarinas Medani]], [[Angelika Nair|AUTHOR Angelika Nair]], [[Gérard Dreyfus|AUTHOR Gérard Dreyfus]], [[François-Beno^ıt Vialatte|AUTHOR François-Beno^ıt Vialatte]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-2|PAPER Mon-S&T-2-B-2 — PercyConfigurator — Perception Experiments as a Service]]</div>|<div class="cpsessionviewpapertitle">PercyConfigurator — Perception Experiments as a Service</div><div class="cpsessionviewpaperauthor">[[Christoph Draxler|AUTHOR Christoph Draxler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-3|PAPER Mon-S&T-2-B-3 — System for Speech Transcription and Post-Editing in Microsoft Word]]</div>|<div class="cpsessionviewpapertitle">System for Speech Transcription and Post-Editing in Microsoft Word</div><div class="cpsessionviewpaperauthor">[[Askars Salimbajevs|AUTHOR Askars Salimbajevs]], [[Indra Ikauniece|AUTHOR Indra Ikauniece]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-4|PAPER Mon-S&T-2-B-4 — Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App]]</div>|<div class="cpsessionviewpapertitle">Emojive! Collecting Emotion Data from Speech and Facial Expression Using Mobile Game App</div><div class="cpsessionviewpaperauthor">[[Ji Ho Park|AUTHOR Ji Ho Park]], [[Nayeon Lee|AUTHOR Nayeon Lee]], [[Dario Bertero|AUTHOR Dario Bertero]], [[Anik Dey|AUTHOR Anik Dey]], [[Pascale Fung|AUTHOR Pascale Fung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172059.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-5|PAPER Mon-S&T-2-B-5 — Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently]]</div>|<div class="cpsessionviewpapertitle">Mylly — The Mill: A New Platform for Processing Speech and Text Corpora Easily and Efficiently</div><div class="cpsessionviewpaperauthor">[[Mietta Lennes|AUTHOR Mietta Lennes]], [[Jussi Piitulainen|AUTHOR Jussi Piitulainen]], [[Martin Matthiesen|AUTHOR Martin Matthiesen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-S&T-2-B-6|PAPER Mon-S&T-2-B-6 — Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI]]</div>|<div class="cpsessionviewpapertitle">Visual Learning 2: Pronunciation App Using Ultrasound, Video, and MRI</div><div class="cpsessionviewpaperauthor">[[Kyori Suzuki|AUTHOR Kyori Suzuki]], [[Ian Wilson|AUTHOR Ian Wilson]], [[Hayato Watanabe|AUTHOR Hayato Watanabe]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, F11|<|
|Chair: |Kalika Bali, Alan W. Black|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-10|PAPER Mon-SS-1-11-10 — Introduction]]</div>|<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-1|PAPER Mon-SS-1-11-1 — Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech]]</div>|<div class="cpsessionviewpapertitle">Longitudinal Speaker Clustering and Verification Corpus with Code-Switching Frisian-Dutch Speech</div><div class="cpsessionviewpaperauthor">[[Emre Yılmaz|AUTHOR Emre Yılmaz]], [[Jelske Dijkstra|AUTHOR Jelske Dijkstra]], [[Hans Van de Velde|AUTHOR Hans Van de Velde]], [[Frederik Kampstra|AUTHOR Frederik Kampstra]], [[Jouke Algra|AUTHOR Jouke Algra]], [[Henk van den Heuvel|AUTHOR Henk van den Heuvel]], [[David Van Leeuwen|AUTHOR David Van Leeuwen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-2|PAPER Mon-SS-1-11-2 — Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection]]</div>|<div class="cpsessionviewpapertitle">Exploiting Untranscribed Broadcast Data for Improved Code-Switching Detection</div><div class="cpsessionviewpaperauthor">[[Emre Yılmaz|AUTHOR Emre Yılmaz]], [[Henk van den Heuvel|AUTHOR Henk van den Heuvel]], [[David Van Leeuwen|AUTHOR David Van Leeuwen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-3|PAPER Mon-SS-1-11-3 — Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog]]</div>|<div class="cpsessionviewpapertitle"> Jee haan, I’d like both, por favor: Elicitation of a Code-Switched Corpus of Hindi–English and Spanish–English Human–Machine Dialog</div><div class="cpsessionviewpaperauthor">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-4|PAPER Mon-SS-1-11-4 — On Building Mixed Lingual Speech Synthesis Systems]]</div>|<div class="cpsessionviewpapertitle">On Building Mixed Lingual Speech Synthesis Systems</div><div class="cpsessionviewpaperauthor">[[SaiKrishna Rallabandi|AUTHOR SaiKrishna Rallabandi]], [[Alan W. Black|AUTHOR Alan W. Black]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-5|PAPER Mon-SS-1-11-5 — Speech Synthesis for Mixed-Language Navigation Instructions]]</div>|<div class="cpsessionviewpapertitle">Speech Synthesis for Mixed-Language Navigation Instructions</div><div class="cpsessionviewpaperauthor">[[Khyathi Raghavi Chandu|AUTHOR Khyathi Raghavi Chandu]], [[SaiKrishna Rallabandi|AUTHOR SaiKrishna Rallabandi]], [[Sunayana Sitaram|AUTHOR Sunayana Sitaram]], [[Alan W. Black|AUTHOR Alan W. Black]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-6|PAPER Mon-SS-1-11-6 — Addressing Code-Switching in French/Algerian Arabic Speech]]</div>|<div class="cpsessionviewpapertitle">Addressing Code-Switching in French/Algerian Arabic Speech</div><div class="cpsessionviewpaperauthor">[[Djegdjiga Amazouz|AUTHOR Djegdjiga Amazouz]], [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]], [[Lori Lamel|AUTHOR Lori Lamel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-7|PAPER Mon-SS-1-11-7 — Metrics for Modeling Code-Switching Across Corpora]]</div>|<div class="cpsessionviewpapertitle">Metrics for Modeling Code-Switching Across Corpora</div><div class="cpsessionviewpaperauthor">[[Gualberto Guzmán|AUTHOR Gualberto Guzmán]], [[Joseph Ricard|AUTHOR Joseph Ricard]], [[Jacqueline Serigos|AUTHOR Jacqueline Serigos]], [[Barbara E. Bullock|AUTHOR Barbara E. Bullock]], [[Almeida Jacqueline Toribio|AUTHOR Almeida Jacqueline Toribio]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-8|PAPER Mon-SS-1-11-8 — Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings]]</div>|<div class="cpsessionviewpapertitle">Synthesising isiZulu-English Code-Switch Bigrams Using Word Embeddings</div><div class="cpsessionviewpaperauthor">[[Ewald van der Westhuizen|AUTHOR Ewald van der Westhuizen]], [[Thomas Niesler|AUTHOR Thomas Niesler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171663.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-9|PAPER Mon-SS-1-11-9 — Crowdsourcing Universal Part-of-Speech Tags for Code-Switching]]</div>|<div class="cpsessionviewpapertitle">Crowdsourcing Universal Part-of-Speech Tags for Code-Switching</div><div class="cpsessionviewpaperauthor">[[Victor Soto|AUTHOR Victor Soto]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-11-11|PAPER Mon-SS-1-11-11 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Monday, 21 Aug. 2017, D8|<|
|Chair: |Tomi Kinnunen, Junichi Yamagishi|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-1|PAPER Mon-SS-1-8-1 — The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection]]</div>|<div class="cpsessionviewpapertitle">The ASVspoof 2017 Challenge: Assessing the Limits of Replay Spoofing Attack Detection</div><div class="cpsessionviewpaperauthor">[[Tomi Kinnunen|AUTHOR Tomi Kinnunen]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Héctor Delgado|AUTHOR Héctor Delgado]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Nicholas Evans|AUTHOR Nicholas Evans]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170450.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-2|PAPER Mon-SS-1-8-2 — Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge]]</div>|<div class="cpsessionviewpapertitle">Experimental Analysis of Features for Replay Attack Detection — Results on the ASVspoof 2017 Challenge</div><div class="cpsessionviewpaperauthor">[[Roberto Font|AUTHOR Roberto Font]], [[Juan M. Espín|AUTHOR Juan M. Espín]], [[María José Cano|AUTHOR María José Cano]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-3|PAPER Mon-SS-1-8-3 — Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection]]</div>|<div class="cpsessionviewpapertitle">Novel Variable Length Teager Energy Separation Based Instantaneous Frequency Features for Replay Detection</div><div class="cpsessionviewpaperauthor">[[Hemant A. Patil|AUTHOR Hemant A. Patil]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Tanvina B. Patel|AUTHOR Tanvina B. Patel]], [[Meet H. Soni|AUTHOR Meet H. Soni]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-4|PAPER Mon-SS-1-8-4 — Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion]]</div>|<div class="cpsessionviewpapertitle">Countermeasures for Automatic Speaker Verification Replay Spoofing Attack : On Data Augmentation, Feature Representation, Classification and Fusion</div><div class="cpsessionviewpaperauthor">[[Weicheng Cai|AUTHOR Weicheng Cai]], [[Danwei Cai|AUTHOR Danwei Cai]], [[Wenbo Liu|AUTHOR Wenbo Liu]], [[Gang Li|AUTHOR Gang Li]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-5|PAPER Mon-SS-1-8-5 — Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features]]</div>|<div class="cpsessionviewpapertitle">Spoof Detection Using Source, Instantaneous Frequency and Cepstral Features</div><div class="cpsessionviewpaperauthor">[[Sarfaraz Jelil|AUTHOR Sarfaraz Jelil]], [[Rohan Kumar Das|AUTHOR Rohan Kumar Das]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]], [[Rohit Sinha|AUTHOR Rohit Sinha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-6|PAPER Mon-SS-1-8-6 — Audio Replay Attack Detection Using High-Frequency Features]]</div>|<div class="cpsessionviewpapertitle">Audio Replay Attack Detection Using High-Frequency Features</div><div class="cpsessionviewpaperauthor">[[Marcin Witkowski|AUTHOR Marcin Witkowski]], [[Stanisław Kacprzak|AUTHOR Stanisław Kacprzak]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]], [[Jakub Gałka|AUTHOR Jakub Gałka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-1-8-7|PAPER Mon-SS-1-8-7 — Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing]]</div>|<div class="cpsessionviewpapertitle">Feature Selection Based on CQCCs for Automatic Speaker Verification Spoofing</div><div class="cpsessionviewpaperauthor">[[Xianliang Wang|AUTHOR Xianliang Wang]], [[Yanhong Xiao|AUTHOR Yanhong Xiao]], [[Xuan Zhu|AUTHOR Xuan Zhu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|14:30–16:30, Monday, 21 Aug. 2017, D8|<|
|Chair: |Nicholas Evans, Kong Aik Lee|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-1|PAPER Mon-SS-2-8-1 — Audio Replay Attack Detection with Deep Learning Frameworks]]</div>|<div class="cpsessionviewpapertitle">Audio Replay Attack Detection with Deep Learning Frameworks</div><div class="cpsessionviewpaperauthor">[[Galina Lavrentyeva|AUTHOR Galina Lavrentyeva]], [[Sergey Novoselov|AUTHOR Sergey Novoselov]], [[Egor Malykh|AUTHOR Egor Malykh]], [[Alexander Kozlov|AUTHOR Alexander Kozlov]], [[Oleg Kudashev|AUTHOR Oleg Kudashev]], [[Vadim Shchemelinin|AUTHOR Vadim Shchemelinin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-2|PAPER Mon-SS-2-8-2 — Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017]]</div>|<div class="cpsessionviewpapertitle">Ensemble Learning for Countermeasure of Audio Replay Spoofing Attack in ASVspoof2017</div><div class="cpsessionviewpaperauthor">[[Zhe Ji|AUTHOR Zhe Ji]], [[Zhi-Yi Li|AUTHOR Zhi-Yi Li]], [[Peng Li|AUTHOR Peng Li]], [[Maobo An|AUTHOR Maobo An]], [[Shengxiang Gao|AUTHOR Shengxiang Gao]], [[Dan Wu|AUTHOR Dan Wu]], [[Faru Zhao|AUTHOR Faru Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-3|PAPER Mon-SS-2-8-3 — A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">A Study on Replay Attack and Anti-Spoofing for Automatic Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Lantian Li|AUTHOR Lantian Li]], [[Yixiang Chen|AUTHOR Yixiang Chen]], [[Dong Wang|AUTHOR Dong Wang]], [[Thomas Fang Zheng|AUTHOR Thomas Fang Zheng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-4|PAPER Mon-SS-2-8-4 — Replay Attack Detection Using DNN for Channel Discrimination]]</div>|<div class="cpsessionviewpapertitle">Replay Attack Detection Using DNN for Channel Discrimination</div><div class="cpsessionviewpaperauthor">[[Parav Nagarsheth|AUTHOR Parav Nagarsheth]], [[Elie Khoury|AUTHOR Elie Khoury]], [[Kailash Patil|AUTHOR Kailash Patil]], [[Matt Garland|AUTHOR Matt Garland]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-5|PAPER Mon-SS-2-8-5 — ResNet and Model Fusion for Automatic Spoofing Detection]]</div>|<div class="cpsessionviewpapertitle">ResNet and Model Fusion for Automatic Spoofing Detection</div><div class="cpsessionviewpaperauthor">[[Zhuxin Chen|AUTHOR Zhuxin Chen]], [[Zhifeng Xie|AUTHOR Zhifeng Xie]], [[Weibin Zhang|AUTHOR Weibin Zhang]], [[Xiangmin Xu|AUTHOR Xiangmin Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-6|PAPER Mon-SS-2-8-6 — SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017]]</div>|<div class="cpsessionviewpapertitle">SFF Anti-Spoofer: IIIT-H Submission for Automatic Speaker Verification Spoofing and Countermeasures Challenge 2017</div><div class="cpsessionviewpaperauthor">[[K.N.R.K. Raju Alluri|AUTHOR K.N.R.K. Raju Alluri]], [[Sivanand Achanta|AUTHOR Sivanand Achanta]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[Anil Kumar Vuppala|AUTHOR Anil Kumar Vuppala]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Mon-SS-2-8-7|PAPER Mon-SS-2-8-7 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|08:30–09:30, Thursday, 24 Aug. 2017, Aula Magna|<|
|Chair: |Olov Engwall|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-K4-1|PAPER Thu-K4-1 — Re-Inventing Speech — The Biological Way]]</div>|<div class="cpsessionviewpapertitle">Re-Inventing Speech — The Biological Way</div><div class="cpsessionviewpaperauthor">[[Björn Lindblom|AUTHOR Björn Lindblom]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, Aula Magna|<|
|Chair: |Bhuvana Ramabhadran, Rohit Prabhavalkar|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-1|PAPER Thu-O-10-1-1 — Deep Neural Factorization for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Deep Neural Factorization for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Chen Shen|AUTHOR Chen Shen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171385.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-2|PAPER Thu-O-10-1-2 — Semi-Supervised DNN Training with Word Selection for ASR]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervised DNN Training with Word Selection for ASR</div><div class="cpsessionviewpaperauthor">[[Karel Veselý|AUTHOR Karel Veselý]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170751.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-3|PAPER Thu-O-10-1-3 — Gaussian Prediction Based Attention for Online End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Gaussian Prediction Based Attention for Online End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Junfeng Hou|AUTHOR Junfeng Hou]], [[Shiliang Zhang|AUTHOR Shiliang Zhang]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-4|PAPER Thu-O-10-1-4 — Efficient Knowledge Distillation from an Ensemble of Teachers]]</div>|<div class="cpsessionviewpapertitle">Efficient Knowledge Distillation from an Ensemble of Teachers</div><div class="cpsessionviewpaperauthor">[[Takashi Fukuda|AUTHOR Takashi Fukuda]], [[Masayuki Suzuki|AUTHOR Masayuki Suzuki]], [[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Jia Cui|AUTHOR Jia Cui]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-5|PAPER Thu-O-10-1-5 — An Analysis of “Attention” in Sequence-to-Sequence Models]]</div>|<div class="cpsessionviewpapertitle">An Analysis of “Attention” in Sequence-to-Sequence Models</div><div class="cpsessionviewpaperauthor">[[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Bo Li|AUTHOR Bo Li]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-1-6|PAPER Thu-O-10-1-6 — Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Neural Speech Recognizer: Acoustic-to-Word LSTM Model for Large Vocabulary Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Hagen Soltau|AUTHOR Hagen Soltau]], [[Hank Liao|AUTHOR Hank Liao]], [[Haşim Sak|AUTHOR Haşim Sak]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, F11|<|
|Chair: |Mahadeva Prasanna, Tomoki Toda|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-1|PAPER Thu-O-10-11-1 — Occupancy Detection in Commercial and Residential Environments Using Audio Signal]]</div>|<div class="cpsessionviewpapertitle">Occupancy Detection in Commercial and Residential Environments Using Audio Signal</div><div class="cpsessionviewpaperauthor">[[Shabnam Ghaffarzadegan|AUTHOR Shabnam Ghaffarzadegan]], [[Attila Reiss|AUTHOR Attila Reiss]], [[Mirko Ruhs|AUTHOR Mirko Ruhs]], [[Robert Duerichen|AUTHOR Robert Duerichen]], [[Zhe Feng|AUTHOR Zhe Feng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-2|PAPER Thu-O-10-11-2 — Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance]]</div>|<div class="cpsessionviewpapertitle">Data Augmentation, Missing Feature Mask and Kernel Classification for Through-the-Wall Acoustic Surveillance</div><div class="cpsessionviewpaperauthor">[[Huy Dat Tran|AUTHOR Huy Dat Tran]], [[Wen Zheng Terence Ng|AUTHOR Wen Zheng Terence Ng]], [[Yi Ren Leng|AUTHOR Yi Ren Leng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-3|PAPER Thu-O-10-11-3 — Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Endpoint Detection Using Grid Long Short-Term Memory Networks for Streaming Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Gabor Simko|AUTHOR Gabor Simko]], [[Carolina Parada|AUTHOR Carolina Parada]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-4|PAPER Thu-O-10-11-4 — Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages]]</div>|<div class="cpsessionviewpapertitle">Deep Learning Techniques in Tandem with Signal Processing Cues for Phonetic Segmentation for Text to Speech Synthesis in Indian Languages</div><div class="cpsessionviewpaperauthor">[[Arun Baby|AUTHOR Arun Baby]], [[Jeena J. Prakash|AUTHOR Jeena J. Prakash]], [[Rupak Vignesh|AUTHOR Rupak Vignesh]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-5|PAPER Thu-O-10-11-5 — Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries]]</div>|<div class="cpsessionviewpapertitle">Gate Activation Signal Analysis for Gated Recurrent Neural Networks and its Correlation with Phoneme Boundaries</div><div class="cpsessionviewpaperauthor">[[Yu-Hsuan Wang|AUTHOR Yu-Hsuan Wang]], [[Cheng-Tao Chung|AUTHOR Cheng-Tao Chung]], [[Hung-Yi Lee|AUTHOR Hung-Yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-11-6|PAPER Thu-O-10-11-6 — Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks]]</div>|<div class="cpsessionviewpapertitle">Speaker Change Detection in Broadcast TV Using Bidirectional Long Short-Term Memory Networks</div><div class="cpsessionviewpaperauthor">[[Ruiqing Yin|AUTHOR Ruiqing Yin]], [[Hervé Bredin|AUTHOR Hervé Bredin]], [[Claude Barras|AUTHOR Claude Barras]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, A2|<|
|Chair: |John Hansen, Tomi Kinnunen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-1|PAPER Thu-O-10-2-1 — CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances]]</div>|<div class="cpsessionviewpapertitle">CNN-Based Joint Mapping of Short and Long Utterance i-Vectors for Speaker Verification Using Short Utterances</div><div class="cpsessionviewpaperauthor">[[Jinxi Guo|AUTHOR Jinxi Guo]], [[Usha Amrutha Nookala|AUTHOR Usha Amrutha Nookala]], [[Abeer Alwan|AUTHOR Abeer Alwan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-2|PAPER Thu-O-10-2-2 — Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Curriculum Learning Based Probabilistic Linear Discriminant Analysis for Noise Robust Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[Abhinav Misra|AUTHOR Abhinav Misra]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-3|PAPER Thu-O-10-2-3 — i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">i-Vector Transformation Using a Novel Discriminative Denoising Autoencoder for Noise-Robust Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Shivangi Mahto|AUTHOR Shivangi Mahto]], [[Hitoshi Yamamoto|AUTHOR Hitoshi Yamamoto]], [[Takafumi Koshinaka|AUTHOR Takafumi Koshinaka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170727.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-4|PAPER Thu-O-10-2-4 — Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Discriminative Training of PLDA for Domain Adaptation in Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Qiongqiong Wang|AUTHOR Qiongqiong Wang]], [[Takafumi Koshinaka|AUTHOR Takafumi Koshinaka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-5|PAPER Thu-O-10-2-5 — Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Speaker Verification Under Adverse Conditions Using i-Vector Adaptation and Neural Networks</div><div class="cpsessionviewpaperauthor">[[Jahangir Alam|AUTHOR Jahangir Alam]], [[Patrick Kenny|AUTHOR Patrick Kenny]], [[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]], [[Marcel Kockmann|AUTHOR Marcel Kockmann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-2-6|PAPER Thu-O-10-2-6 — Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data]]</div>|<div class="cpsessionviewpapertitle">Improving Robustness of Speaker Recognition to New Conditions Using Unlabeled Data</div><div class="cpsessionviewpaperauthor">[[Diego Castan|AUTHOR Diego Castan]], [[Mitchell McLaren|AUTHOR Mitchell McLaren]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]], [[Aaron Lawson|AUTHOR Aaron Lawson]], [[Alicia Lozano-Diez|AUTHOR Alicia Lozano-Diez]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, B4|<|
|Chair: |Stephanie Strassel, Febe De Wet|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-1|PAPER Thu-O-10-4-1 — CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube]]</div>|<div class="cpsessionviewpapertitle">CALYOU: A Comparable Spoken Algerian Corpus Harvested from YouTube</div><div class="cpsessionviewpaperauthor">[[K. Abidi|AUTHOR K. Abidi]], [[M.A. Menacer|AUTHOR M.A. Menacer]], [[Kamel Smaïli|AUTHOR Kamel Smaïli]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-2|PAPER Thu-O-10-4-2 — PRAV: A Phonetically Rich Audio Visual Corpus]]</div>|<div class="cpsessionviewpapertitle">PRAV: A Phonetically Rich Audio Visual Corpus</div><div class="cpsessionviewpaperauthor">[[Abhishek Narwekar|AUTHOR Abhishek Narwekar]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-3|PAPER Thu-O-10-4-3 — NTCD-TIMIT: A New Database and Baseline for Noise-Robust Audio-Visual Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">NTCD-TIMIT: A New Database and Baseline for Noise-Robust Audio-Visual Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ahmed Hussen Abdelaziz|AUTHOR Ahmed Hussen Abdelaziz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-4|PAPER Thu-O-10-4-4 — The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density]]</div>|<div class="cpsessionviewpapertitle">The Extended SPaRKy Restaurant Corpus: Designing a Corpus with Variable Information Density</div><div class="cpsessionviewpaperauthor">[[David M. Howcroft|AUTHOR David M. Howcroft]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]], [[Vera Demberg|AUTHOR Vera Demberg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171115.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-5|PAPER Thu-O-10-4-5 — Automatic Construction of the Finnish Parliament Speech Corpus]]</div>|<div class="cpsessionviewpapertitle">Automatic Construction of the Finnish Parliament Speech Corpus</div><div class="cpsessionviewpaperauthor">[[André Mansikkaniemi|AUTHOR André Mansikkaniemi]], [[Peter Smit|AUTHOR Peter Smit]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-4-6|PAPER Thu-O-10-4-6 — Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech]]</div>|<div class="cpsessionviewpapertitle">Building Audio-Visual Phonetically Annotated Arabic Corpus for Expressive Text to Speech</div><div class="cpsessionviewpaperauthor">[[Omnia Abdo|AUTHOR Omnia Abdo]], [[Sherif Abdou|AUTHOR Sherif Abdou]], [[Mervat Fashal|AUTHOR Mervat Fashal]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, D8|<|
|Chair: |Agustin Gravano, Melanie Weirich|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-1|PAPER Thu-O-10-8-1 — What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison]]</div>|<div class="cpsessionviewpapertitle">What is the Relevant Population? Considerations for the Computation of Likelihood Ratios in Forensic Voice Comparison</div><div class="cpsessionviewpaperauthor">[[Vincent Hughes|AUTHOR Vincent Hughes]], [[Paul Foulkes|AUTHOR Paul Foulkes]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-2|PAPER Thu-O-10-8-2 — Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts]]</div>|<div class="cpsessionviewpapertitle">Voice Disguise vs. Impersonation: Acoustic and Perceptual Measurements of Vocal Flexibility in Non Experts</div><div class="cpsessionviewpaperauthor">[[Véronique Delvaux|AUTHOR Véronique Delvaux]], [[Lise Caucheteux|AUTHOR Lise Caucheteux]], [[Kathy Huet|AUTHOR Kathy Huet]], [[Myriam Piccaluga|AUTHOR Myriam Piccaluga]], [[Bernard Harmegnies|AUTHOR Bernard Harmegnies]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170470.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-3|PAPER Thu-O-10-8-3 — Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora]]</div>|<div class="cpsessionviewpapertitle">Schwa Realization in French: Using Automatic Speech Processing to Study Phonological and Socio-Linguistic Factors in Large Corpora</div><div class="cpsessionviewpaperauthor">[[Yaru Wu|AUTHOR Yaru Wu]], [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]], [[Cécile Fougeron|AUTHOR Cécile Fougeron]], [[Lori Lamel|AUTHOR Lori Lamel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-4|PAPER Thu-O-10-8-4 — The Social Life of Setswana Ejectives]]</div>|<div class="cpsessionviewpapertitle">The Social Life of Setswana Ejectives</div><div class="cpsessionviewpaperauthor">[[Daniel Duran|AUTHOR Daniel Duran]], [[Jagoda Bruni|AUTHOR Jagoda Bruni]], [[Grzegorz Dogil|AUTHOR Grzegorz Dogil]], [[Justus Roux|AUTHOR Justus Roux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-5|PAPER Thu-O-10-8-5 — How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers]]</div>|<div class="cpsessionviewpapertitle">How Long is Too Long? How Pause Features After Requests Affect the Perceived Willingness of Affirmative Answers</div><div class="cpsessionviewpaperauthor">[[Lea S. Kohtz|AUTHOR Lea S. Kohtz]], [[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-10-8-6|PAPER Thu-O-10-8-6 — Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence]]</div>|<div class="cpsessionviewpapertitle">Shadowing Synthesized Speech — Segmental Analysis of Phonetic Convergence</div><div class="cpsessionviewpaperauthor">[[Iona Gessinger|AUTHOR Iona Gessinger]], [[Eran Raveh|AUTHOR Eran Raveh]], [[Sébastien Le Maguer|AUTHOR Sébastien Le Maguer]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, Aula Magna|<|
|Chair: |Hagen Soltau, William Hartmann|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171118.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-1|PAPER Thu-O-9-1-1 — Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multitask Learning with Low-Level Auxiliary Tasks for Encoder-Decoder Based Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shubham Toshniwal|AUTHOR Shubham Toshniwal]], [[Hao Tang|AUTHOR Hao Tang]], [[Liang Lu|AUTHOR Liang Lu]], [[Karen Livescu|AUTHOR Karen Livescu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170639.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-2|PAPER Thu-O-9-1-2 — Optimizing Expected Word Error Rate via Sampling for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Optimizing Expected Word Error Rate via Sampling for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Matt Shannon|AUTHOR Matt Shannon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-3|PAPER Thu-O-9-1-3 — Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training]]</div>|<div class="cpsessionviewpapertitle">Annealed f-Smoothing as a Mechanism to Speed up Neural Network Training</div><div class="cpsessionviewpaperauthor">[[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Arun Narayanan|AUTHOR Arun Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-4|PAPER Thu-O-9-1-4 — Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Non-Uniform MCE Training of Deep Long Short-Term Memory Recurrent Neural Networks for Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Zhong Meng|AUTHOR Zhong Meng]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171784.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-5|PAPER Thu-O-9-1-5 — Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination]]</div>|<div class="cpsessionviewpapertitle">Exploiting Eigenposteriors for Semi-Supervised Training of DNN Acoustic Models with Sequence Discrimination</div><div class="cpsessionviewpaperauthor">[[Pranay Dighe|AUTHOR Pranay Dighe]], [[Afsaneh Asaei|AUTHOR Afsaneh Asaei]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-1-6|PAPER Thu-O-9-1-6 — Discriminative Autoencoders for Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Discriminative Autoencoders for Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Ming-Han Yang|AUTHOR Ming-Han Yang]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Yu-Ding Lu|AUTHOR Yu-Ding Lu]], [[Kuan-Yu Chen|AUTHOR Kuan-Yu Chen]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Berlin Chen|AUTHOR Berlin Chen]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, A2|<|
|Chair: |Eduardo Lleida, Kai Yu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-1|PAPER Thu-O-9-2-1 — Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement]]</div>|<div class="cpsessionviewpapertitle">Speaker Diarization Using Convolutional Neural Network for Statistics Accumulation Refinement</div><div class="cpsessionviewpaperauthor">[[Zbyněk Zajíc|AUTHOR Zbyněk Zajíc]], [[Marek Hrúz|AUTHOR Marek Hrúz]], [[Luděk Müller|AUTHOR Luděk Müller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-2|PAPER Thu-O-9-2-2 — Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation]]</div>|<div class="cpsessionviewpapertitle">Speaker2Vec: Unsupervised Learning and Adaptation of a Speaker Manifold Using Deep Neural Networks with an Evaluation on Speaker Segmentation</div><div class="cpsessionviewpaperauthor">[[Arindam Jati|AUTHOR Arindam Jati]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-3|PAPER Thu-O-9-2-3 — A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking]]</div>|<div class="cpsessionviewpapertitle">A Triplet Ranking-Based Neural Network for Speaker Diarization and Linking</div><div class="cpsessionviewpaperauthor">[[Gaël Le Lan|AUTHOR Gaël Le Lan]], [[Delphine Charlet|AUTHOR Delphine Charlet]], [[Anthony Larcher|AUTHOR Anthony Larcher]], [[Sylvain Meignier|AUTHOR Sylvain Meignier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-4|PAPER Thu-O-9-2-4 — Estimating Speaker Clustering Quality Using Logistic Regression]]</div>|<div class="cpsessionviewpapertitle">Estimating Speaker Clustering Quality Using Logistic Regression</div><div class="cpsessionviewpaperauthor">[[Yishai Cohen|AUTHOR Yishai Cohen]], [[Itshak Lapidot|AUTHOR Itshak Lapidot]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-5|PAPER Thu-O-9-2-5 — Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization]]</div>|<div class="cpsessionviewpapertitle">Combining Speaker Turn Embedding and Incremental Structure Prediction for Low-Latency Speaker Diarization</div><div class="cpsessionviewpaperauthor">[[Guillaume Wisniewksi|AUTHOR Guillaume Wisniewksi]], [[Hervé Bredin|AUTHOR Hervé Bredin]], [[G. Gelly|AUTHOR G. Gelly]], [[Claude Barras|AUTHOR Claude Barras]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-2-6|PAPER Thu-O-9-2-6 — ‘pyannote.metrics‘: A Toolkit for Reproducible Evaluation, Diagnostic, and Error Analysis of Speaker Diarization Systems]]</div>|<div class="cpsessionviewpapertitle">‘pyannote.metrics‘: A Toolkit for Reproducible Evaluation, Diagnostic, and Error Analysis of Speaker Diarization Systems</div><div class="cpsessionviewpaperauthor">[[Hervé Bredin|AUTHOR Hervé Bredin]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, B4|<|
|Chair: |Sanjeev Khudanpur, Murat Saraclar|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-1|PAPER Thu-O-9-4-1 — A Rescoring Approach for Keyword Search Using Lattice Context Information]]</div>|<div class="cpsessionviewpapertitle">A Rescoring Approach for Keyword Search Using Lattice Context Information</div><div class="cpsessionviewpaperauthor">[[Zhipeng Chen|AUTHOR Zhipeng Chen]], [[Ji Wu|AUTHOR Ji Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-2|PAPER Thu-O-9-4-2 — The Kaldi OpenKWS System: Improving Low Resource Keyword Search]]</div>|<div class="cpsessionviewpapertitle">The Kaldi OpenKWS System: Improving Low Resource Keyword Search</div><div class="cpsessionviewpaperauthor">[[Jan Trmal|AUTHOR Jan Trmal]], [[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Pegah Ghahremani|AUTHOR Pegah Ghahremani]], [[Yiming Wang|AUTHOR Yiming Wang]], [[Vimal Manohar|AUTHOR Vimal Manohar]], [[Hainan Xu|AUTHOR Hainan Xu]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-3|PAPER Thu-O-9-4-3 — The STC Keyword Search System for OpenKWS 2016 Evaluation]]</div>|<div class="cpsessionviewpapertitle">The STC Keyword Search System for OpenKWS 2016 Evaluation</div><div class="cpsessionviewpaperauthor">[[Yuri Khokhlov|AUTHOR Yuri Khokhlov]], [[Ivan Medennikov|AUTHOR Ivan Medennikov]], [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]], [[Valentin Mendelev|AUTHOR Valentin Mendelev]], [[Maxim Korenevsky|AUTHOR Maxim Korenevsky]], [[Alexey Prudnikov|AUTHOR Alexey Prudnikov]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Alexander Zatvornitsky|AUTHOR Alexander Zatvornitsky]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-4|PAPER Thu-O-9-4-4 — Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Compressed Time Delay Neural Network for Small-Footprint Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Ming Sun|AUTHOR Ming Sun]], [[David Snyder|AUTHOR David Snyder]], [[Yixin Gao|AUTHOR Yixin Gao]], [[Varun Nagaraja|AUTHOR Varun Nagaraja]], [[Mike Rodehorst|AUTHOR Mike Rodehorst]], [[Sankaran Panchapagesan|AUTHOR Sankaran Panchapagesan]], [[Nikko Strom|AUTHOR Nikko Strom]], [[Spyros Matsoukas|AUTHOR Spyros Matsoukas]], [[Shiv Vitaladevuni|AUTHOR Shiv Vitaladevuni]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-5|PAPER Thu-O-9-4-5 — Symbol Sequence Search from Telephone Conversation]]</div>|<div class="cpsessionviewpapertitle">Symbol Sequence Search from Telephone Conversation</div><div class="cpsessionviewpaperauthor">[[Masayuki Suzuki|AUTHOR Masayuki Suzuki]], [[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Abhinav Sethy|AUTHOR Abhinav Sethy]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Kenneth W. Church|AUTHOR Kenneth W. Church]], [[Mark Drake|AUTHOR Mark Drake]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-4-6|PAPER Thu-O-9-4-6 — Similarity Learning Based Query Modeling for Keyword Search]]</div>|<div class="cpsessionviewpapertitle">Similarity Learning Based Query Modeling for Keyword Search</div><div class="cpsessionviewpaperauthor">[[Batuhan Gundogdu|AUTHOR Batuhan Gundogdu]], [[Murat Saraclar|AUTHOR Murat Saraclar]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, C6|<|
|Chair: |Yan Huang, Tim Fingscheidt|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-1|PAPER Thu-O-9-6-1 — Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines]]</div>|<div class="cpsessionviewpapertitle">Deep Recurrent Neural Network Based Monaural Speech Separation Using Recurrent Temporal Restricted Boltzmann Machines</div><div class="cpsessionviewpaperauthor">[[Suman Samui|AUTHOR Suman Samui]], [[Indrajit Chakrabarti|AUTHOR Indrajit Chakrabarti]], [[Soumya K. Ghosh|AUTHOR Soumya K. Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170109.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-2|PAPER Thu-O-9-6-2 — Improved Codebook-Based Speech Enhancement Based on MBE Model]]</div>|<div class="cpsessionviewpapertitle">Improved Codebook-Based Speech Enhancement Based on MBE Model</div><div class="cpsessionviewpaperauthor">[[Qizheng Huang|AUTHOR Qizheng Huang]], [[Changchun Bao|AUTHOR Changchun Bao]], [[Xianyun Wang|AUTHOR Xianyun Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170515.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-3|PAPER Thu-O-9-6-3 — Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection]]</div>|<div class="cpsessionviewpapertitle">Improving Mask Learning Based Speech Enhancement System with Restoration Layers and Residual Connection</div><div class="cpsessionviewpaperauthor">[[Zhuo Chen|AUTHOR Zhuo Chen]], [[Yan Huang|AUTHOR Yan Huang]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-4|PAPER Thu-O-9-6-4 — Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Exploring Low-Dimensional Structures of Modulation Spectra for Robust Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Bi-Cheng Yan|AUTHOR Bi-Cheng Yan]], [[Chin-Hong Shih|AUTHOR Chin-Hong Shih]], [[Shih-Hung Liu|AUTHOR Shih-Hung Liu]], [[Berlin Chen|AUTHOR Berlin Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-5|PAPER Thu-O-9-6-5 — SEGAN: Speech Enhancement Generative Adversarial Network]]</div>|<div class="cpsessionviewpapertitle">SEGAN: Speech Enhancement Generative Adversarial Network</div><div class="cpsessionviewpaperauthor">[[Santiago Pascual|AUTHOR Santiago Pascual]], [[Antonio Bonafonte|AUTHOR Antonio Bonafonte]], [[Joan Serrà|AUTHOR Joan Serrà]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-6-6|PAPER Thu-O-9-6-6 — Concatenative Resynthesis Using Twin Networks]]</div>|<div class="cpsessionviewpapertitle">Concatenative Resynthesis Using Twin Networks</div><div class="cpsessionviewpaperauthor">[[Soumi Maiti|AUTHOR Soumi Maiti]], [[Michael I. Mandel|AUTHOR Michael I. Mandel]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, D8|<|
|Chair: |Patrick Wambacq, Florian Metze|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-1|PAPER Thu-O-9-8-1 — Combining Residual Networks with LSTMs for Lipreading]]</div>|<div class="cpsessionviewpapertitle">Combining Residual Networks with LSTMs for Lipreading</div><div class="cpsessionviewpaperauthor">[[Themos Stafylakis|AUTHOR Themos Stafylakis]], [[Georgios Tzimiropoulos|AUTHOR Georgios Tzimiropoulos]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170106.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-2|PAPER Thu-O-9-8-2 — Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques]]</div>|<div class="cpsessionviewpapertitle">Improving Computer Lipreading via DNN Sequence Discriminative Training Techniques</div><div class="cpsessionviewpaperauthor">[[Kwanchiva Thangthai|AUTHOR Kwanchiva Thangthai]], [[Richard Harvey|AUTHOR Richard Harvey]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-3|PAPER Thu-O-9-8-3 — Improving Speaker-Independent Lipreading with Domain-Adversarial Training]]</div>|<div class="cpsessionviewpapertitle">Improving Speaker-Independent Lipreading with Domain-Adversarial Training</div><div class="cpsessionviewpaperauthor">[[Michael Wand|AUTHOR Michael Wand]], [[Jürgen Schmidhuber|AUTHOR Jürgen Schmidhuber]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-4|PAPER Thu-O-9-8-4 — Turbo Decoders for Audio-Visual Continuous Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Turbo Decoders for Audio-Visual Continuous Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ahmed Hussen Abdelaziz|AUTHOR Ahmed Hussen Abdelaziz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-5|PAPER Thu-O-9-8-5 — DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface]]</div>|<div class="cpsessionviewpapertitle">DNN-Based Ultrasound-to-Speech Conversion for a Silent Speech Interface</div><div class="cpsessionviewpaperauthor">[[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]], [[Tamás Grósz|AUTHOR Tamás Grósz]], [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]], [[László Tóth|AUTHOR László Tóth]], [[Alexandra Markó|AUTHOR Alexandra Markó]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-O-9-8-6|PAPER Thu-O-9-8-6 — Visually Grounded Learning of Keyword Prediction from Untranscribed Speech]]</div>|<div class="cpsessionviewpapertitle">Visually Grounded Learning of Keyword Prediction from Untranscribed Speech</div><div class="cpsessionviewpaperauthor">[[Herman Kamper|AUTHOR Herman Kamper]], [[Shane Settle|AUTHOR Shane Settle]], [[Gregory Shakhnarovich|AUTHOR Gregory Shakhnarovich]], [[Karen Livescu|AUTHOR Karen Livescu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, Poster 1|<|
|Chair: |Volker Leutnant�|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-1|PAPER Thu-P-9-1-1 — Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder]]</div>|<div class="cpsessionviewpapertitle">Improved Automatic Speech Recognition Using Subband Temporal Envelope Features and Time-Delay Neural Network Denoising Autoencoder</div><div class="cpsessionviewpaperauthor">[[Cong-Thanh Do|AUTHOR Cong-Thanh Do]], [[Yannis Stylianou|AUTHOR Yannis Stylianou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-2|PAPER Thu-P-9-1-2 — Factored Deep Convolutional Neural Networks for Noise Robust Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Factored Deep Convolutional Neural Networks for Noise Robust Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Masakiyo Fujimoto|AUTHOR Masakiyo Fujimoto]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-3|PAPER Thu-P-9-1-3 — Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression]]</div>|<div class="cpsessionviewpapertitle">Global SNR Estimation of Speech Signals for Unknown Noise Conditions Using Noise Adapted Non-Linear Regression</div><div class="cpsessionviewpaperauthor">[[Pavlos Papadopoulos|AUTHOR Pavlos Papadopoulos]], [[Ruchir Travadi|AUTHOR Ruchir Travadi]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-4|PAPER Thu-P-9-1-4 — Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Joint Training of Multi-Channel-Condition Dereverberation and Acoustic Modeling of Microphone Array Speech for Robust Distant Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Fengpei Ge|AUTHOR Fengpei Ge]], [[Kehuang Li|AUTHOR Kehuang Li]], [[Bo Wu|AUTHOR Bo Wu]], [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]], [[Yonghong Yan|AUTHOR Yonghong Yan]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-5|PAPER Thu-P-9-1-5 — Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Uncertainty Decoding with Adaptive Sampling for Noise Robust DNN-Based Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Dung T. Tran|AUTHOR Dung T. Tran]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170805.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-6|PAPER Thu-P-9-1-6 — Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Attention-Based LSTM with Multi-Task Learning for Distant Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Yu Zhang|AUTHOR Yu Zhang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-7|PAPER Thu-P-9-1-7 — To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories]]</div>|<div class="cpsessionviewpapertitle">To Improve the Robustness of LSTM-RNN Acoustic Models Using Higher-Order Feedback from Multiple Histories</div><div class="cpsessionviewpaperauthor">[[Hengguan Huang|AUTHOR Hengguan Huang]], [[Brian Mak|AUTHOR Brian Mak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-8|PAPER Thu-P-9-1-8 — End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">End-to-End Speech Recognition with Auditory Attention for Multi-Microphone Distance Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Suyoun Kim|AUTHOR Suyoun Kim]], [[Ian Lane|AUTHOR Ian Lane]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171665.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-9|PAPER Thu-P-9-1-9 — Robust Speech Recognition Based on Binaural Auditory Processing]]</div>|<div class="cpsessionviewpapertitle">Robust Speech Recognition Based on Binaural Auditory Processing</div><div class="cpsessionviewpaperauthor">[[Anjali Menon|AUTHOR Anjali Menon]], [[Chanwoo Kim|AUTHOR Chanwoo Kim]], [[Richard M. Stern|AUTHOR Richard M. Stern]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171791.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-1-10|PAPER Thu-P-9-1-10 — Adaptive Multichannel Dereverberation for Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Adaptive Multichannel Dereverberation for Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Joe Caroselli|AUTHOR Joe Caroselli]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Richard Rose|AUTHOR Richard Rose]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, Poster 3|<|
|Chair: |Kiyoko Yoneyama|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171579.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-1|PAPER Thu-P-9-3-1 — The Effects of Real and Placebo Alcohol on Deaffrication]]</div>|<div class="cpsessionviewpapertitle">The Effects of Real and Placebo Alcohol on Deaffrication</div><div class="cpsessionviewpaperauthor">[[Urban Zihlmann|AUTHOR Urban Zihlmann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-2|PAPER Thu-P-9-3-2 — Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora]]</div>|<div class="cpsessionviewpapertitle">Polyglot and Speech Corpus Tools: A System for Representing, Integrating, and Querying Speech Corpora</div><div class="cpsessionviewpaperauthor">[[Michael McAuliffe|AUTHOR Michael McAuliffe]], [[Elias Stengel-Eskin|AUTHOR Elias Stengel-Eskin]], [[Michaela Socolof|AUTHOR Michaela Socolof]], [[Morgan Sonderegger|AUTHOR Morgan Sonderegger]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-3|PAPER Thu-P-9-3-3 — Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing]]</div>|<div class="cpsessionviewpapertitle">Mapping Across Feature Spaces in Forensic Voice Comparison: The Contribution of Auditory-Based Voice Quality to (Semi-)Automatic System Testing</div><div class="cpsessionviewpaperauthor">[[Vincent Hughes|AUTHOR Vincent Hughes]], [[Philip Harrison|AUTHOR Philip Harrison]], [[Paul Foulkes|AUTHOR Paul Foulkes]], [[Peter French|AUTHOR Peter French]], [[Colleen Kavanagh|AUTHOR Colleen Kavanagh]], [[Eugenia San Segundo|AUTHOR Eugenia San Segundo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-4|PAPER Thu-P-9-3-4 — Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation]]</div>|<div class="cpsessionviewpapertitle">Effect of Language, Speaking Style and Speaker on Long-Term F0 Estimation</div><div class="cpsessionviewpaperauthor">[[Pablo Arantes|AUTHOR Pablo Arantes]], [[Anders Eriksson|AUTHOR Anders Eriksson]], [[Suska Gutzeit|AUTHOR Suska Gutzeit]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-5|PAPER Thu-P-9-3-5 — Stability of Prosodic Characteristics Across Age and Gender Groups]]</div>|<div class="cpsessionviewpapertitle">Stability of Prosodic Characteristics Across Age and Gender Groups</div><div class="cpsessionviewpaperauthor">[[Jan Volín|AUTHOR Jan Volín]], [[Tereza Tykalová|AUTHOR Tereza Tykalová]], [[Tomáš Bořil|AUTHOR Tomáš Bořil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-6|PAPER Thu-P-9-3-6 — Electrophysiological Correlates of Familiar Voice Recognition]]</div>|<div class="cpsessionviewpapertitle">Electrophysiological Correlates of Familiar Voice Recognition</div><div class="cpsessionviewpaperauthor">[[Julien Plante-Hébert|AUTHOR Julien Plante-Hébert]], [[Victor J. Boucher|AUTHOR Victor J. Boucher]], [[Boutheina Jemel|AUTHOR Boutheina Jemel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-7|PAPER Thu-P-9-3-7 — Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion]]</div>|<div class="cpsessionviewpapertitle">Developing an Embosi (Bantu C25) Speech Variant Dictionary to Model Vowel Elision and Morpheme Deletion</div><div class="cpsessionviewpaperauthor">[[Jamison Cooper-Leavitt|AUTHOR Jamison Cooper-Leavitt]], [[Lori Lamel|AUTHOR Lori Lamel]], [[Annie Rialland|AUTHOR Annie Rialland]], [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]], [[Gilles Adda|AUTHOR Gilles Adda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-8|PAPER Thu-P-9-3-8 — R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum]]</div>|<div class="cpsessionviewpapertitle">R,,d,, as a Control Parameter to Explore Affective Correlates of the Tense-Lax Continuum</div><div class="cpsessionviewpaperauthor">[[Andy Murphy|AUTHOR Andy Murphy]], [[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-9|PAPER Thu-P-9-3-9 — Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles]]</div>|<div class="cpsessionviewpapertitle">Cross-Linguistic Distinctions Between Professional and Non-Professional Speaking Styles</div><div class="cpsessionviewpaperauthor">[[Plínio A. Barbosa|AUTHOR Plínio A. Barbosa]], [[Sandra Madureira|AUTHOR Sandra Madureira]], [[Philippe Boula de Mareüil|AUTHOR Philippe Boula de Mareüil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-10|PAPER Thu-P-9-3-10 — Perception and Production of Word-Final /ʁ/ in French]]</div>|<div class="cpsessionviewpapertitle">Perception and Production of Word-Final /ʁ/ in French</div><div class="cpsessionviewpaperauthor">[[Cedric Gendrot|AUTHOR Cedric Gendrot]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-11|PAPER Thu-P-9-3-11 — Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network]]</div>|<div class="cpsessionviewpapertitle">Glottal Source Estimation from Coded Telephone Speech Using a Deep Neural Network</div><div class="cpsessionviewpaperauthor">[[N.P. Narendra|AUTHOR N.P. Narendra]], [[Manu Airaksinen|AUTHOR Manu Airaksinen]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-12|PAPER Thu-P-9-3-12 — Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners]]</div>|<div class="cpsessionviewpapertitle">Automatic Labelling of Prosodic Prominence, Phrasing and Disfluencies in French Speech by Simulating the Perception of Naïve and Expert Listeners</div><div class="cpsessionviewpaperauthor">[[George Christodoulides|AUTHOR George Christodoulides]], [[Mathieu Avanzi|AUTHOR Mathieu Avanzi]], [[Anne Catherine Simon|AUTHOR Anne Catherine Simon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-13|PAPER Thu-P-9-3-13 — Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds]]</div>|<div class="cpsessionviewpapertitle">Don’t Count on ASR to Transcribe for You: Breaking Bias with Two Crowds</div><div class="cpsessionviewpaperauthor">[[Michael Levit|AUTHOR Michael Levit]], [[Yan Huang|AUTHOR Yan Huang]], [[Shuangyu Chang|AUTHOR Shuangyu Chang]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-14|PAPER Thu-P-9-3-14 — Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs]]</div>|<div class="cpsessionviewpapertitle">Effects of Training Data Variety in Generating Glottal Pulses from Acoustic Features with DNNs</div><div class="cpsessionviewpaperauthor">[[Manu Airaksinen|AUTHOR Manu Airaksinen]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170406.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-3-15|PAPER Thu-P-9-3-15 — Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World]]</div>|<div class="cpsessionviewpapertitle">Towards Intelligent Crowdsourcing for Audio Data Annotation: Integrating Active Learning in the Real World</div><div class="cpsessionviewpaperauthor">[[Simone Hantke|AUTHOR Simone Hantke]], [[Zixing Zhang|AUTHOR Zixing Zhang]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, Poster 4|<|
|Chair: |Sébastien Le Maguer|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-1|PAPER Thu-P-9-4-1 — Principles for Learning Controllable TTS from Annotated and Latent Variation]]</div>|<div class="cpsessionviewpapertitle">Principles for Learning Controllable TTS from Annotated and Latent Variation</div><div class="cpsessionviewpaperauthor">[[Gustav Eje Henter|AUTHOR Gustav Eje Henter]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Xin Wang|AUTHOR Xin Wang]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170362.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-2|PAPER Thu-P-9-4-2 — Sampling-Based Speech Parameter Generation Using Moment-Matching Networks]]</div>|<div class="cpsessionviewpapertitle">Sampling-Based Speech Parameter Generation Using Moment-Matching Networks</div><div class="cpsessionviewpaperauthor">[[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-3|PAPER Thu-P-9-4-3 — Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets]]</div>|<div class="cpsessionviewpapertitle">Unit Selection with Hierarchical Cascaded Long Short Term Memory Bidirectional Recurrent Neural Nets</div><div class="cpsessionviewpaperauthor">[[Vincent Pollet|AUTHOR Vincent Pollet]], [[Enrico Zovato|AUTHOR Enrico Zovato]], [[Sufian Irhimeh|AUTHOR Sufian Irhimeh]], [[Pier Batzu|AUTHOR Pier Batzu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-4|PAPER Thu-P-9-4-4 — Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data]]</div>|<div class="cpsessionviewpapertitle">Utterance Selection for Optimizing Intelligibility of TTS Voices Trained on ASR Data</div><div class="cpsessionviewpaperauthor">[[Erica Cooper|AUTHOR Erica Cooper]], [[Xinyue Wang|AUTHOR Xinyue Wang]], [[Alison Chang|AUTHOR Alison Chang]], [[Yocheved Levitan|AUTHOR Yocheved Levitan]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170479.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-5|PAPER Thu-P-9-4-5 — Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores]]</div>|<div class="cpsessionviewpapertitle">Bias and Statistical Significance in Evaluating Speech Synthesis with Mean Opinion Scores</div><div class="cpsessionviewpaperauthor">[[Andrew Rosenberg|AUTHOR Andrew Rosenberg]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-6|PAPER Thu-P-9-4-6 — Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Phase Modeling Using Integrated Linear Prediction Residual for Statistical Parametric Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Nagaraj Adiga|AUTHOR Nagaraj Adiga]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-7|PAPER Thu-P-9-4-7 — Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary]]</div>|<div class="cpsessionviewpapertitle">Evaluation of a Silent Speech Interface Based on Magnetic Sensing and Deep Learning for a Phonetically Rich Vocabulary</div><div class="cpsessionviewpaperauthor">[[Jose A. Gonzalez|AUTHOR Jose A. Gonzalez]], [[Lam A. Cheah|AUTHOR Lam A. Cheah]], [[Phil D. Green|AUTHOR Phil D. Green]], [[James M. Gilbert|AUTHOR James M. Gilbert]], [[Stephen R. Ell|AUTHOR Stephen R. Ell]], [[Roger K. Moore|AUTHOR Roger K. Moore]], [[Ed Holdsworth|AUTHOR Ed Holdsworth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-8|PAPER Thu-P-9-4-8 — Predicting Head Pose from Speech with a Conditional Variational Autoencoder]]</div>|<div class="cpsessionviewpapertitle">Predicting Head Pose from Speech with a Conditional Variational Autoencoder</div><div class="cpsessionviewpaperauthor">[[David Greenwood|AUTHOR David Greenwood]], [[Stephen Laycock|AUTHOR Stephen Laycock]], [[Iain Matthews|AUTHOR Iain Matthews]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-9|PAPER Thu-P-9-4-9 — Real-Time Reactive Speech Synthesis: Incorporating Interruptions]]</div>|<div class="cpsessionviewpapertitle">Real-Time Reactive Speech Synthesis: Incorporating Interruptions</div><div class="cpsessionviewpaperauthor">[[Mirjam Wester|AUTHOR Mirjam Wester]], [[David A. Braude|AUTHOR David A. Braude]], [[Blaise Potard|AUTHOR Blaise Potard]], [[Matthew P. Aylett|AUTHOR Matthew P. Aylett]], [[Francesca Shaw|AUTHOR Francesca Shaw]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171420.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-10|PAPER Thu-P-9-4-10 — A Neural Parametric Singing Synthesizer]]</div>|<div class="cpsessionviewpapertitle">A Neural Parametric Singing Synthesizer</div><div class="cpsessionviewpaperauthor">[[Merlijn Blaauw|AUTHOR Merlijn Blaauw]], [[Jordi Bonada|AUTHOR Jordi Bonada]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-11|PAPER Thu-P-9-4-11 — Tacotron: Towards End-to-End Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Tacotron: Towards End-to-End Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Yuxuan Wang|AUTHOR Yuxuan Wang]], [[R.J. Skerry-Ryan|AUTHOR R.J. Skerry-Ryan]], [[Daisy Stanton|AUTHOR Daisy Stanton]], [[Yonghui Wu|AUTHOR Yonghui Wu]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]], [[Zongheng Yang|AUTHOR Zongheng Yang]], [[Ying Xiao|AUTHOR Ying Xiao]], [[Zhifeng Chen|AUTHOR Zhifeng Chen]], [[Samy Bengio|AUTHOR Samy Bengio]], [[Quoc Le|AUTHOR Quoc Le]], [[Yannis Agiomyrgiannakis|AUTHOR Yannis Agiomyrgiannakis]], [[Rob Clark|AUTHOR Rob Clark]], [[Rif A. Saurous|AUTHOR Rif A. Saurous]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-12|PAPER Thu-P-9-4-12 — Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System]]</div>|<div class="cpsessionviewpapertitle">Siri On-Device Deep Learning-Guided Unit Selection Text-to-Speech System</div><div class="cpsessionviewpaperauthor">[[Tim Capes|AUTHOR Tim Capes]], [[Paul Coles|AUTHOR Paul Coles]], [[Alistair Conkie|AUTHOR Alistair Conkie]], [[Ladan Golipour|AUTHOR Ladan Golipour]], [[Abie Hadjitarkhani|AUTHOR Abie Hadjitarkhani]], [[Qiong Hu|AUTHOR Qiong Hu]], [[Nancy Huddleston|AUTHOR Nancy Huddleston]], [[Melvyn Hunt|AUTHOR Melvyn Hunt]], [[Jiangchuan Li|AUTHOR Jiangchuan Li]], [[Matthias Neeracher|AUTHOR Matthias Neeracher]], [[Kishore Prahallad|AUTHOR Kishore Prahallad]], [[Tuomo Raitio|AUTHOR Tuomo Raitio]], [[Ramya Rasipuram|AUTHOR Ramya Rasipuram]], [[Greg Townsend|AUTHOR Greg Townsend]], [[Becci Williamson|AUTHOR Becci Williamson]], [[David Winarsky|AUTHOR David Winarsky]], [[Zhizheng Wu|AUTHOR Zhizheng Wu]], [[Hepeng Zhang|AUTHOR Hepeng Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-13|PAPER Thu-P-9-4-13 — An Expanded Taxonomy of Semiotic Classes for Text Normalization]]</div>|<div class="cpsessionviewpapertitle">An Expanded Taxonomy of Semiotic Classes for Text Normalization</div><div class="cpsessionviewpaperauthor">[[Daan van Esch|AUTHOR Daan van Esch]], [[Richard Sproat|AUTHOR Richard Sproat]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-P-9-4-14|PAPER Thu-P-9-4-14 — Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra]]</div>|<div class="cpsessionviewpapertitle">Complex-Valued Restricted Boltzmann Machine for Direct Learning of Frequency Spectra</div><div class="cpsessionviewpaperauthor">[[Toru Nakashika|AUTHOR Toru Nakashika]], [[Shinji Takaki|AUTHOR Shinji Takaki]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, 13:30–15:30, Thursday, 24 Aug. 2017, E306|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-S&T-9-A-1|PAPER Thu-S&T-9-A-1 — Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations]]</div>|<div class="cpsessionviewpapertitle">Soundtracing for Realtime Speech Adjustment to Environmental Conditions in 3D Simulations</div><div class="cpsessionviewpaperauthor">[[Bartosz Ziółko|AUTHOR Bartosz Ziółko]], [[Tomasz Pȩdzima̧ż|AUTHOR Tomasz Pȩdzima̧ż]], [[Szymon Pałka|AUTHOR Szymon Pałka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-S&T-9-A-2|PAPER Thu-S&T-9-A-2 — Vocal-Tract Model with Static Articulators: Lips, Teeth, Tongue, and More]]</div>|<div class="cpsessionviewpapertitle">Vocal-Tract Model with Static Articulators: Lips, Teeth, Tongue, and More</div><div class="cpsessionviewpaperauthor">[[Takayuki Arai|AUTHOR Takayuki Arai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-S&T-9-A-3|PAPER Thu-S&T-9-A-3 — Remote Articulation Test System Based on WebRTC]]</div>|<div class="cpsessionviewpapertitle">Remote Articulation Test System Based on WebRTC</div><div class="cpsessionviewpaperauthor">[[Ikuyo Masuda-Katsuse|AUTHOR Ikuyo Masuda-Katsuse]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172054.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-S&T-9-A-4|PAPER Thu-S&T-9-A-4 — The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients]]</div>|<div class="cpsessionviewpapertitle">The ModelTalker Project: A Web-Based Voice Banking Pipeline for ALS/MND Patients</div><div class="cpsessionviewpaperauthor">[[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]], [[Jason Lilley|AUTHOR Jason Lilley]], [[Kathleen McGrath|AUTHOR Kathleen McGrath]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-S&T-9-A-5|PAPER Thu-S&T-9-A-5 — Visible Vowels: A Tool for the Visualization of Vowel Variation]]</div>|<div class="cpsessionviewpapertitle">Visible Vowels: A Tool for the Visualization of Vowel Variation</div><div class="cpsessionviewpaperauthor">[[Wilbert Heeringa|AUTHOR Wilbert Heeringa]], [[Hans Van de Velde|AUTHOR Hans Van de Velde]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Thursday, 24 Aug. 2017, E10|<|
|Chair: |Björn Schuller, Anton Batliner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-1|PAPER Thu-SS-10-10-1 — A Dual Source-Filter Model of Snore Audio for Snorer Group Classification]]</div>|<div class="cpsessionviewpapertitle">A Dual Source-Filter Model of Snore Audio for Snorer Group Classification</div><div class="cpsessionviewpaperauthor">[[Achuth Rao M.V.|AUTHOR Achuth Rao M.V.]], [[Shivani Yadav|AUTHOR Shivani Yadav]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-2|PAPER Thu-SS-10-10-2 — An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification]]</div>|<div class="cpsessionviewpapertitle">An ‘End-to-Evolution’ Hybrid Approach for Snore Sound Classification</div><div class="cpsessionviewpaperauthor">[[Michael Freitag|AUTHOR Michael Freitag]], [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-3|PAPER Thu-SS-10-10-3 — Snore Sound Classification Using Image-Based Deep Spectrum Features]]</div>|<div class="cpsessionviewpapertitle">Snore Sound Classification Using Image-Based Deep Spectrum Features</div><div class="cpsessionviewpaperauthor">[[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Sandra Ottl|AUTHOR Sandra Ottl]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Michael Freitag|AUTHOR Michael Freitag]], [[Sergey Pugachevskiy|AUTHOR Sergey Pugachevskiy]], [[Alice Baird|AUTHOR Alice Baird]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-4|PAPER Thu-SS-10-10-4 — Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information]]</div>|<div class="cpsessionviewpapertitle">Exploring Fusion Methods and Feature Space for the Classification of Paralinguistic Information</div><div class="cpsessionviewpaperauthor">[[David Tavarez|AUTHOR David Tavarez]], [[Xabier Sarasola|AUTHOR Xabier Sarasola]], [[Agustin Alonso|AUTHOR Agustin Alonso]], [[Jon Sanchez|AUTHOR Jon Sanchez]], [[Luis Serrano|AUTHOR Luis Serrano]], [[Eva Navas|AUTHOR Eva Navas]], [[Inma Hernáez|AUTHOR Inma Hernáez]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-5|PAPER Thu-SS-10-10-5 — DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification]]</div>|<div class="cpsessionviewpapertitle">DNN-Based Feature Extraction and Classifier Combination for Child-Directed Speech, Cold and Snoring Identification</div><div class="cpsessionviewpaperauthor">[[Gábor Gosztolya|AUTHOR Gábor Gosztolya]], [[Róbert Busa-Fekete|AUTHOR Róbert Busa-Fekete]], [[Tamás Grósz|AUTHOR Tamás Grósz]], [[László Tóth|AUTHOR László Tóth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170653.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-6|PAPER Thu-SS-10-10-6 — Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold]]</div>|<div class="cpsessionviewpapertitle">Introducing Weighted Kernel Classifiers for Handling Imbalanced Paralinguistic Corpora: Snoring, Addressee and Cold</div><div class="cpsessionviewpaperauthor">[[Heysem Kaya|AUTHOR Heysem Kaya]], [[Alexey A. Karpov|AUTHOR Alexey A. Karpov]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-7|PAPER Thu-SS-10-10-7 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: A Summary of Results]]</div>|<div class="cpsessionviewpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: A Summary of Results</div><div class="cpsessionviewpaperauthor">[[Stefan Steidl|AUTHOR Stefan Steidl]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-SS-10-10-8|PAPER Thu-SS-10-10-8 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor">[[Björn Schuller|AUTHOR Björn Schuller]], [[Anton Batliner|AUTHOR Anton Batliner]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, E10|<|
|Chair: |Björn Schuller, Anton Batliner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-1|PAPER Thu-SS-9-10-1 — The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring]]</div>|<div class="cpsessionviewpapertitle">The INTERSPEECH 2017 Computational Paralinguistics Challenge: Addressee, Cold & Snoring</div><div class="cpsessionviewpaperauthor">[[Björn Schuller|AUTHOR Björn Schuller]], [[Stefan Steidl|AUTHOR Stefan Steidl]], [[Anton Batliner|AUTHOR Anton Batliner]], [[Elika Bergelson|AUTHOR Elika Bergelson]], [[Jarek Krajewski|AUTHOR Jarek Krajewski]], [[Christoph Janott|AUTHOR Christoph Janott]], [[Andrei Amatuni|AUTHOR Andrei Amatuni]], [[Marisa Casillas|AUTHOR Marisa Casillas]], [[Amanda Seidl|AUTHOR Amanda Seidl]], [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]], [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]], [[Guillermo Hidalgo|AUTHOR Guillermo Hidalgo]], [[Sebastian Schnieder|AUTHOR Sebastian Schnieder]], [[Clemens Heiser|AUTHOR Clemens Heiser]], [[Winfried Hohenhorst|AUTHOR Winfried Hohenhorst]], [[Michael Herzog|AUTHOR Michael Herzog]], [[Maximilian Schmitt|AUTHOR Maximilian Schmitt]], [[Kun Qian|AUTHOR Kun Qian]], [[Yue Zhang|AUTHOR Yue Zhang]], [[George Trigeorgis|AUTHOR George Trigeorgis]], [[Panagiotis Tzirakis|AUTHOR Panagiotis Tzirakis]], [[Stefanos Zafeiriou|AUTHOR Stefanos Zafeiriou]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-2|PAPER Thu-SS-9-10-2 — Description of the Upper Respiratory Tract Infection Corpus (URTIC)]]</div>|<div class="cpsessionviewpapertitle">Description of the Upper Respiratory Tract Infection Corpus (URTIC)</div><div class="cpsessionviewpaperauthor">[[Jarek Krajewski|AUTHOR Jarek Krajewski]], [[Sebastian Schieder|AUTHOR Sebastian Schieder]], [[Anton Batliner|AUTHOR Anton Batliner]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-3|PAPER Thu-SS-9-10-3 — Description of the Munich-Passau Snore Sound Corpus (MPSSC)]]</div>|<div class="cpsessionviewpapertitle">Description of the Munich-Passau Snore Sound Corpus (MPSSC)</div><div class="cpsessionviewpaperauthor">[[Christoph Janott|AUTHOR Christoph Janott]], [[Anton Batliner|AUTHOR Anton Batliner]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-4|PAPER Thu-SS-9-10-4 — Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)]]</div>|<div class="cpsessionviewpapertitle">Description of the Homebank Child/Adult Addressee Corpus (HB-CHAAC)</div><div class="cpsessionviewpaperauthor">[[Elika Bergelson|AUTHOR Elika Bergelson]], [[Andrei Amatuni|AUTHOR Andrei Amatuni]], [[Marisa Casillas|AUTHOR Marisa Casillas]], [[Amanda Seidl|AUTHOR Amanda Seidl]], [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]], [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-5|PAPER Thu-SS-9-10-5 — It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge]]</div>|<div class="cpsessionviewpapertitle">It Sounds Like You Have a Cold! Testing Voice Features for the Interspeech 2017 Computational Paralinguistics Cold Challenge</div><div class="cpsessionviewpaperauthor">[[Mark Huckvale|AUTHOR Mark Huckvale]], [[András Beke|AUTHOR András Beke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171445.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-6|PAPER Thu-SS-9-10-6 — End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum]]</div>|<div class="cpsessionviewpapertitle">End-to-End Deep Learning Framework for Speech Paralinguistics Detection Based on Perception Aware Spectrum</div><div class="cpsessionviewpaperauthor">[[Danwei Cai|AUTHOR Danwei Cai]], [[Zhidong Ni|AUTHOR Zhidong Ni]], [[Wenbo Liu|AUTHOR Wenbo Liu]], [[Weicheng Cai|AUTHOR Weicheng Cai]], [[Gang Li|AUTHOR Gang Li]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-7|PAPER Thu-SS-9-10-7 — Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level]]</div>|<div class="cpsessionviewpapertitle">Infected Phonemes: How a Cold Impairs Speech on a Phonetic Level</div><div class="cpsessionviewpaperauthor">[[Johannes Wagner|AUTHOR Johannes Wagner]], [[Thiago Fraga-Silva|AUTHOR Thiago Fraga-Silva]], [[Yvan Josse|AUTHOR Yvan Josse]], [[Dominik Schiller|AUTHOR Dominik Schiller]], [[Andreas Seiderer|AUTHOR Andreas Seiderer]], [[Elisabeth André|AUTHOR Elisabeth André]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171550.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-8|PAPER Thu-SS-9-10-8 — Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition]]</div>|<div class="cpsessionviewpapertitle">Phoneme State Posteriorgram Features for Speech Based Automatic Classification of Speakers in Cold and Healthy Condition</div><div class="cpsessionviewpaperauthor">[[Akshay Kalkunte Suresh|AUTHOR Akshay Kalkunte Suresh]], [[Srinivasa Raghavan K.M.|AUTHOR Srinivasa Raghavan K.M.]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-10-9|PAPER Thu-SS-9-10-9 — An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN]]</div>|<div class="cpsessionviewpapertitle">An Integrated Solution for Snoring Sound Classification Using Bhattacharyya Distance Based GMM Supervectors with SVM, Feature Selection with Random Forest and Spectrogram with CNN</div><div class="cpsessionviewpaperauthor">[[Tin Lay Nwe|AUTHOR Tin Lay Nwe]], [[Huy Dat Tran|AUTHOR Huy Dat Tran]], [[Wen Zheng Terence Ng|AUTHOR Wen Zheng Terence Ng]], [[Bin Ma|AUTHOR Bin Ma]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Thursday, 24 Aug. 2017, F11|<|
|Chair: |Sten Ternström, Oriol Guasch|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-1|PAPER Thu-SS-9-11-1 — Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses]]</div>|<div class="cpsessionviewpapertitle">Acoustic Analysis of Detailed Three-Dimensional Shape of the Human Nasal Cavity and Paranasal Sinuses</div><div class="cpsessionviewpaperauthor">[[Tatsuya Kitamura|AUTHOR Tatsuya Kitamura]], [[Hironori Takemoto|AUTHOR Hironori Takemoto]], [[Hisanori Makinae|AUTHOR Hisanori Makinae]], [[Tetsutaro Yamaguchi|AUTHOR Tetsutaro Yamaguchi]], [[Kotaro Maki|AUTHOR Kotaro Maki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-2|PAPER Thu-SS-9-11-2 — A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences]]</div>|<div class="cpsessionviewpapertitle">A Semi-Polar Grid Strategy for the Three-Dimensional Finite Element Simulation of Vowel-Vowel Sequences</div><div class="cpsessionviewpaperauthor">[[Marc Arnela|AUTHOR Marc Arnela]], [[Saeed Dabbaghchian|AUTHOR Saeed Dabbaghchian]], [[Oriol Guasch|AUTHOR Oriol Guasch]], [[Olov Engwall|AUTHOR Olov Engwall]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-3|PAPER Thu-SS-9-11-3 — A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation]]</div>|<div class="cpsessionviewpapertitle">A Fast Robust 1D Flow Model for a Self-Oscillating Coupled 2D FEM Vocal Fold Simulation</div><div class="cpsessionviewpaperauthor">[[Arvind Vasudevan|AUTHOR Arvind Vasudevan]], [[Victor Zappi|AUTHOR Victor Zappi]], [[Peter Anderson|AUTHOR Peter Anderson]], [[Sidney Fels|AUTHOR Sidney Fels]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170875.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-4|PAPER Thu-SS-9-11-4 — Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance]]</div>|<div class="cpsessionviewpapertitle">Waveform Patterns in Pitch Glides Near a Vocal Tract Resonance</div><div class="cpsessionviewpaperauthor">[[Tiina Murtola|AUTHOR Tiina Murtola]], [[Jarmo Malinen|AUTHOR Jarmo Malinen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-5|PAPER Thu-SS-9-11-5 — A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound]]</div>|<div class="cpsessionviewpapertitle">A Unified Numerical Simulation of Vowel Production That Comprises Phonation and the Emitted Sound</div><div class="cpsessionviewpaperauthor">[[Niyazi Cem Degirmenci|AUTHOR Niyazi Cem Degirmenci]], [[Johan Jansson|AUTHOR Johan Jansson]], [[Johan Hoffman|AUTHOR Johan Hoffman]], [[Marc Arnela|AUTHOR Marc Arnela]], [[Patricia Sánchez-Martín|AUTHOR Patricia Sánchez-Martín]], [[Oriol Guasch|AUTHOR Oriol Guasch]], [[Sten Ternström|AUTHOR Sten Ternström]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-SS-9-11-6|PAPER Thu-SS-9-11-6 — Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model]]</div>|<div class="cpsessionviewpapertitle">Synthesis of VV Utterances from Muscle Activation to Sound with a 3D Model</div><div class="cpsessionviewpaperauthor">[[Saeed Dabbaghchian|AUTHOR Saeed Dabbaghchian]], [[Marc Arnela|AUTHOR Marc Arnela]], [[Olov Engwall|AUTHOR Olov Engwall]], [[Oriol Guasch|AUTHOR Oriol Guasch]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|08:30–09:30, Tuesday, 22 Aug. 2017, Aula Magna|<|
|Chair: |Joakim Gustafson|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-K2-1|PAPER Tue-K2-1 — Dialogue as Collaborative Problem Solving]]</div>|<div class="cpsessionviewpapertitle">Dialogue as Collaborative Problem Solving</div><div class="cpsessionviewpaperauthor">[[James Allen|AUTHOR James Allen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, Aula Magna|<|
|Chair: |Herve Bourlard, Jan Černocký|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-1|PAPER Tue-O-3-1-1 — A Comparison of Sequence-to-Sequence Models for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Sequence-to-Sequence Models for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Bo Li|AUTHOR Bo Li]], [[Leif Johnson|AUTHOR Leif Johnson]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-2|PAPER Tue-O-3-1-2 — CTC in the Context of Generalized Full-Sum HMM Training]]</div>|<div class="cpsessionviewpapertitle">CTC in the Context of Generalized Full-Sum HMM Training</div><div class="cpsessionviewpaperauthor">[[Albert Zeyer|AUTHOR Albert Zeyer]], [[Eugen Beck|AUTHOR Eugen Beck]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-3|PAPER Tue-O-3-1-3 — Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM]]</div>|<div class="cpsessionviewpapertitle">Advances in Joint CTC-Attention Based End-to-End Speech Recognition with a Deep CNN Encoder and RNN-LM</div><div class="cpsessionviewpaperauthor">[[Takaaki Hori|AUTHOR Takaaki Hori]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Yu Zhang|AUTHOR Yu Zhang]], [[William Chan|AUTHOR William Chan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170071.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-4|PAPER Tue-O-3-1-4 — Multitask Learning with CTC and Segmental CRF for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multitask Learning with CTC and Segmental CRF for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Liang Lu|AUTHOR Liang Lu]], [[Lingpeng Kong|AUTHOR Lingpeng Kong]], [[Chris Dyer|AUTHOR Chris Dyer]], [[Noah A. Smith|AUTHOR Noah A. Smith]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-5|PAPER Tue-O-3-1-5 — Direct Acoustics-to-Word Models for English Conversational Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Direct Acoustics-to-Word Models for English Conversational Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[George Saon|AUTHOR George Saon]], [[Michael Picheny|AUTHOR Michael Picheny]], [[David Nahamoo|AUTHOR David Nahamoo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-1-6|PAPER Tue-O-3-1-6 — Reducing the Computational Complexity of Two-Dimensional LSTMs]]</div>|<div class="cpsessionviewpapertitle">Reducing the Computational Complexity of Two-Dimensional LSTMs</div><div class="cpsessionviewpaperauthor">[[Bo Li|AUTHOR Bo Li]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, E10|<|
|Chair: |Elmar Nöth, Shrikanth Narayanan|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-1|PAPER Tue-O-3-10-1 — Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms]]</div>|<div class="cpsessionviewpapertitle">Efficient Emotion Recognition from Speech Using Deep Learning on Spectrograms</div><div class="cpsessionviewpaperauthor">[[Aharon Satt|AUTHOR Aharon Satt]], [[Shai Rozenberg|AUTHOR Shai Rozenberg]], [[Ron Hoory|AUTHOR Ron Hoory]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170713.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-2|PAPER Tue-O-3-10-2 — Interaction and Transition Model for Speech Emotion Recognition in Dialogue]]</div>|<div class="cpsessionviewpapertitle">Interaction and Transition Model for Speech Emotion Recognition in Dialogue</div><div class="cpsessionviewpaperauthor">[[Ruo Zhang|AUTHOR Ruo Zhang]], [[Ando Atsushi|AUTHOR Ando Atsushi]], [[Satoshi Kobashikawa|AUTHOR Satoshi Kobashikawa]], [[Yushi Aono|AUTHOR Yushi Aono]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-3|PAPER Tue-O-3-10-3 — Progressive Neural Networks for Transfer Learning in Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Progressive Neural Networks for Transfer Learning in Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[John Gideon|AUTHOR John Gideon]], [[Soheil Khorram|AUTHOR Soheil Khorram]], [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]], [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171494.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-4|PAPER Tue-O-3-10-4 — Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning]]</div>|<div class="cpsessionviewpapertitle">Jointly Predicting Arousal, Valence and Dominance with Multi-Task Learning</div><div class="cpsessionviewpaperauthor">[[Srinivas Parthasarathy|AUTHOR Srinivas Parthasarathy]], [[Carlos Busso|AUTHOR Carlos Busso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-5|PAPER Tue-O-3-10-5 — Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network]]</div>|<div class="cpsessionviewpapertitle">Discretized Continuous Speech Emotion Recognition with Multi-Task Deep Recurrent Neural Network</div><div class="cpsessionviewpaperauthor">[[Duc Le|AUTHOR Duc Le]], [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-10-6|PAPER Tue-O-3-10-6 — Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning]]</div>|<div class="cpsessionviewpapertitle">Towards Speech Emotion Recognition “in the Wild” Using Aggregated Corpora and Deep Multi-Task Learning</div><div class="cpsessionviewpaperauthor">[[Jaebok Kim|AUTHOR Jaebok Kim]], [[Gwenn Englebienne|AUTHOR Gwenn Englebienne]], [[Khiet P. Truong|AUTHOR Khiet P. Truong]], [[Vanessa Evers|AUTHOR Vanessa Evers]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, A2|<|
|Chair: |Marcin Wlodarczak, Daryush Mehta|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-1|PAPER Tue-O-3-2-1 — Functional Principal Component Analysis of Vocal Tract Area Functions]]</div>|<div class="cpsessionviewpapertitle">Functional Principal Component Analysis of Vocal Tract Area Functions</div><div class="cpsessionviewpaperauthor">[[Jorge C. Lucero|AUTHOR Jorge C. Lucero]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-2|PAPER Tue-O-3-2-2 — Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages]]</div>|<div class="cpsessionviewpapertitle">Analysis of Acoustic-to-Articulatory Speech Inversion Across Different Accents and Languages</div><div class="cpsessionviewpaperauthor">[[Ganesh Sivaraman|AUTHOR Ganesh Sivaraman]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]], [[Martijn Wieling|AUTHOR Martijn Wieling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170617.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-3|PAPER Tue-O-3-2-3 — Integrated Mechanical Model of [r]-[l] and [b]-[m]-[w] Producing Consonant Cluster [br].]]</div>|<div class="cpsessionviewpapertitle">Integrated Mechanical Model of [r]-[l] and [b]-[m]-[w] Producing Consonant Cluster [br]</div><div class="cpsessionviewpaperauthor">[[Takayuki Arai|AUTHOR Takayuki Arai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170804.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-4|PAPER Tue-O-3-2-4 — A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion]]</div>|<div class="cpsessionviewpapertitle">A Speaker Adaptive DNN Training Approach for Speaker-Independent Acoustic Inversion</div><div class="cpsessionviewpaperauthor">[[Leonardo Badino|AUTHOR Leonardo Badino]], [[Luca Franceschi|AUTHOR Luca Franceschi]], [[Raman Arora|AUTHOR Raman Arora]], [[Michele Donini|AUTHOR Michele Donini]], [[Massimiliano Pontil|AUTHOR Massimiliano Pontil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-5|PAPER Tue-O-3-2-5 — Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis]]</div>|<div class="cpsessionviewpapertitle">Acoustic-to-Articulatory Mapping Based on Mixture of Probabilistic Canonical Correlation Analysis</div><div class="cpsessionviewpaperauthor">[[Hidetsugu Uchida|AUTHOR Hidetsugu Uchida]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-2-6|PAPER Tue-O-3-2-6 — Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging]]</div>|<div class="cpsessionviewpapertitle">Test-Retest Repeatability of Articulatory Strategies Using Real-Time Magnetic Resonance Imaging</div><div class="cpsessionviewpaperauthor">[[Tanner Sorensen|AUTHOR Tanner Sorensen]], [[Asterios Toutios|AUTHOR Asterios Toutios]], [[Johannes Töger|AUTHOR Johannes Töger]], [[Louis Goldstein|AUTHOR Louis Goldstein]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, B4|<|
|Chair: |Jean-Francois Bonastre, Kornel Laskowski|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-1|PAPER Tue-O-3-4-1 — Deep Neural Network Embeddings for Text-Independent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Deep Neural Network Embeddings for Text-Independent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[David Snyder|AUTHOR David Snyder]], [[Daniel Garcia-Romero|AUTHOR Daniel Garcia-Romero]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-2|PAPER Tue-O-3-4-2 — Tied Variational Autoencoder Backends for i-Vector Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Tied Variational Autoencoder Backends for i-Vector Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Jesús Villalba|AUTHOR Jesús Villalba]], [[Niko Brümmer|AUTHOR Niko Brümmer]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-3|PAPER Tue-O-3-4-3 — Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features]]</div>|<div class="cpsessionviewpapertitle">Improved Gender Independent Speaker Recognition Using Convolutional Neural Network Based Bottleneck Features</div><div class="cpsessionviewpaperauthor">[[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-4|PAPER Tue-O-3-4-4 — Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information]]</div>|<div class="cpsessionviewpapertitle">Autoencoder Based Domain Adaptation for Speaker Recognition Under Insufficient Channel Information</div><div class="cpsessionviewpaperauthor">[[Suwon Shon|AUTHOR Suwon Shon]], [[Seongkyu Mun|AUTHOR Seongkyu Mun]], [[Wooil Kim|AUTHOR Wooil Kim]], [[Hanseok Ko|AUTHOR Hanseok Ko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170829.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-5|PAPER Tue-O-3-4-5 — Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Nonparametrically Trained Probabilistic Linear Discriminant Analysis for i-Vector Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Abbas Khosravani|AUTHOR Abbas Khosravani]], [[Mohammad Mehdi Homayounpour|AUTHOR Mohammad Mehdi Homayounpour]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170144.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-4-6|PAPER Tue-O-3-4-6 — DNN Bottleneck Features for Speaker Clustering]]</div>|<div class="cpsessionviewpapertitle">DNN Bottleneck Features for Speaker Clustering</div><div class="cpsessionviewpaperauthor">[[Jesús Jorrín|AUTHOR Jesús Jorrín]], [[Paola García|AUTHOR Paola García]], [[Luis Buera|AUTHOR Luis Buera]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, C6|<|
|Chair: |Peter Birkholz, Kikuo Maekawa|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-1|PAPER Tue-O-3-6-1 — Creak as a Feature of Lexical Stress in Estonian]]</div>|<div class="cpsessionviewpapertitle">Creak as a Feature of Lexical Stress in Estonian</div><div class="cpsessionviewpaperauthor">[[Kätlin Aare|AUTHOR Kätlin Aare]], [[Pärtel Lippus|AUTHOR Pärtel Lippus]], [[Juraj Šimko|AUTHOR Juraj Šimko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171535.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-2|PAPER Tue-O-3-6-2 — Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation]]</div>|<div class="cpsessionviewpapertitle">Cross-Speaker Variation in Voice Source Correlates of Focus and Deaccentuation</div><div class="cpsessionviewpaperauthor">[[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-3|PAPER Tue-O-3-6-3 — Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora]]</div>|<div class="cpsessionviewpapertitle">Acoustic Characterization of Word-Final Glottal Stops in Mizo and Assam Sora</div><div class="cpsessionviewpaperauthor">[[Sishir Kalita|AUTHOR Sishir Kalita]], [[Wendy Lalhminghlui|AUTHOR Wendy Lalhminghlui]], [[Luke Horo|AUTHOR Luke Horo]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]], [[Samarendra Dandapat|AUTHOR Samarendra Dandapat]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-4|PAPER Tue-O-3-6-4 — Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering]]</div>|<div class="cpsessionviewpapertitle">Iterative Optimal Preemphasis for Improved Glottal-Flow Estimation by Iterative Adaptive Inverse Filtering</div><div class="cpsessionviewpaperauthor">[[Parham Mokhtari|AUTHOR Parham Mokhtari]], [[Hiroshi Ando|AUTHOR Hiroshi Ando]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-5|PAPER Tue-O-3-6-5 — Automatic Measurement of Pre-Aspiration]]</div>|<div class="cpsessionviewpapertitle">Automatic Measurement of Pre-Aspiration</div><div class="cpsessionviewpaperauthor">[[Yaniv Sheena|AUTHOR Yaniv Sheena]], [[Míša Hejná|AUTHOR Míša Hejná]], [[Yossi Adi|AUTHOR Yossi Adi]], [[Joseph Keshet|AUTHOR Joseph Keshet]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-6-6|PAPER Tue-O-3-6-6 — Acoustic and Electroglottographic Study of Breathy and Modal Vowels as Produced by Heritage and Native Gujarati Speakers]]</div>|<div class="cpsessionviewpapertitle">Acoustic and Electroglottographic Study of Breathy and Modal Vowels as Produced by Heritage and Native Gujarati Speakers</div><div class="cpsessionviewpaperauthor">[[Kiranpreet Nara|AUTHOR Kiranpreet Nara]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, D8|<|
|Chair: |Mirjam Wester, Prasanta Ghosh|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-1|PAPER Tue-O-3-8-1 — An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">An RNN-Based Quantized F0 Model with Multi-Tier Feedback Links for Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Xin Wang|AUTHOR Xin Wang]], [[Shinji Takaki|AUTHOR Shinji Takaki]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-2|PAPER Tue-O-3-8-2 — Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information]]</div>|<div class="cpsessionviewpapertitle">Phrase Break Prediction for Long-Form Reading TTS: Exploiting Text Structure Information</div><div class="cpsessionviewpaperauthor">[[Viacheslav Klimkov|AUTHOR Viacheslav Klimkov]], [[Adam Nadolski|AUTHOR Adam Nadolski]], [[Alexis Moinet|AUTHOR Alexis Moinet]], [[Bartosz Putrycz|AUTHOR Bartosz Putrycz]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]], [[Thomas Merritt|AUTHOR Thomas Merritt]], [[Thomas Drugman|AUTHOR Thomas Drugman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-3|PAPER Tue-O-3-8-3 — Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Physically Constrained Statistical F,,0,, Prediction for Electrolaryngeal Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Kou Tanaka|AUTHOR Kou Tanaka]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Tomoki Toda|AUTHOR Tomoki Toda]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170719.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-4|PAPER Tue-O-3-8-4 — DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation]]</div>|<div class="cpsessionviewpapertitle">DNN-SPACE: DNN-HMM-Based Generative Model of Voice F,,0,, Contours for Statistical Phrase/Accent Command Estimation</div><div class="cpsessionviewpaperauthor">[[Nobukatsu Hojo|AUTHOR Nobukatsu Hojo]], [[Yasuhito Ohsugi|AUTHOR Yasuhito Ohsugi]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-5|PAPER Tue-O-3-8-5 — Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Controlling Prominence Realisation in Parametric DNN-Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Zofia Malisz|AUTHOR Zofia Malisz]], [[Harald Berthelsen|AUTHOR Harald Berthelsen]], [[Jonas Beskow|AUTHOR Jonas Beskow]], [[Joakim Gustafson|AUTHOR Joakim Gustafson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-3-8-6|PAPER Tue-O-3-8-6 — Increasing Recall of Lengthening Detection via Semi-Automatic Classification]]</div>|<div class="cpsessionviewpapertitle">Increasing Recall of Lengthening Detection via Semi-Automatic Classification</div><div class="cpsessionviewpaperauthor">[[Simon Betz|AUTHOR Simon Betz]], [[Jana Voße|AUTHOR Jana Voße]], [[Sina Zarrieß|AUTHOR Sina Zarrieß]], [[Petra Wagner|AUTHOR Petra Wagner]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, Aula Magna|<|
|Chair: |Peter Cahill, Rob Clark|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-1|PAPER Tue-O-4-1-1 — Speaker-Dependent WaveNet Vocoder]]</div>|<div class="cpsessionviewpapertitle">Speaker-Dependent WaveNet Vocoder</div><div class="cpsessionviewpaperauthor">[[Akira Tamamori|AUTHOR Akira Tamamori]], [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]], [[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]], [[Kazuya Takeda|AUTHOR Kazuya Takeda]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-2|PAPER Tue-O-4-1-2 — Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension]]</div>|<div class="cpsessionviewpapertitle">Waveform Modeling Using Stacked Dilated Convolutional Neural Networks for Speech Bandwidth Extension</div><div class="cpsessionviewpaperauthor">[[Yu Gu|AUTHOR Yu Gu]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-3|PAPER Tue-O-4-1-3 — Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Direct Modeling of Frequency Spectra and Waveform Generation Based on Phase Recovery for DNN-Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Shinji Takaki|AUTHOR Shinji Takaki]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170628.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-4|PAPER Tue-O-4-1-4 — A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">A Hierarchical Encoder-Decoder Model for Statistical Parametric Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Srikanth Ronanki|AUTHOR Srikanth Ronanki]], [[Oliver Watts|AUTHOR Oliver Watts]], [[Simon King|AUTHOR Simon King]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-5|PAPER Tue-O-4-1-5 — Statistical Voice Conversion with WaveNet-Based Waveform Generation]]</div>|<div class="cpsessionviewpapertitle">Statistical Voice Conversion with WaveNet-Based Waveform Generation</div><div class="cpsessionviewpaperauthor">[[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]], [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]], [[Akira Tamamori|AUTHOR Akira Tamamori]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-1-6|PAPER Tue-O-4-1-6 — Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders]]</div>|<div class="cpsessionviewpapertitle">Google’s Next-Generation Real-Time Unit-Selection Synthesizer Using Sequence-to-Sequence LSTM-Based Autoencoders</div><div class="cpsessionviewpaperauthor">[[Vincent Wan|AUTHOR Vincent Wan]], [[Yannis Agiomyrgiannakis|AUTHOR Yannis Agiomyrgiannakis]], [[Hanna Silen|AUTHOR Hanna Silen]], [[Jakub Vít|AUTHOR Jakub Vít]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, E10|<|
|Chair: |Hema Murthy, S.R.M. Prasanna|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170247.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-1|PAPER Tue-O-4-10-1 — Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities]]</div>|<div class="cpsessionviewpapertitle">Voice Conversion Using Sequence-to-Sequence Learning of Context Posterior Probabilities</div><div class="cpsessionviewpaperauthor">[[Hiroyuki Miyoshi|AUTHOR Hiroyuki Miyoshi]], [[Yuki Saito|AUTHOR Yuki Saito]], [[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-2|PAPER Tue-O-4-10-2 — Learning Latent Representations for Speech Generation and Transformation]]</div>|<div class="cpsessionviewpapertitle">Learning Latent Representations for Speech Generation and Transformation</div><div class="cpsessionviewpaperauthor">[[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]], [[Yu Zhang|AUTHOR Yu Zhang]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170961.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-3|PAPER Tue-O-4-10-3 — Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus]]</div>|<div class="cpsessionviewpapertitle">Parallel-Data-Free Many-to-Many Voice Conversion Based on DNN Integrated with Eigenspace Using a Non-Parallel Speech Corpus</div><div class="cpsessionviewpaperauthor">[[Tetsuya Hashimoto|AUTHOR Tetsuya Hashimoto]], [[Hidetsugu Uchida|AUTHOR Hidetsugu Uchida]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-4|PAPER Tue-O-4-10-4 — Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks]]</div>|<div class="cpsessionviewpapertitle">Sequence-to-Sequence Voice Conversion with Similarity Metric Learned Using Generative Adversarial Networks</div><div class="cpsessionviewpaperauthor">[[Takuhiro Kaneko|AUTHOR Takuhiro Kaneko]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Kaoru Hiramatsu|AUTHOR Kaoru Hiramatsu]], [[Kunio Kashino|AUTHOR Kunio Kashino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-5|PAPER Tue-O-4-10-5 — A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation]]</div>|<div class="cpsessionviewpapertitle">A Mouth Opening Effect Based on Pole Modification for Expressive Singing Voice Transformation</div><div class="cpsessionviewpaperauthor">[[Luc Ardaillon|AUTHOR Luc Ardaillon]], [[Axel Roebel|AUTHOR Axel Roebel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-10-6|PAPER Tue-O-4-10-6 — Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion]]</div>|<div class="cpsessionviewpapertitle">Siamese Autoencoders for Speech Style Extraction and Switching Applied to Voice Identification and Conversion</div><div class="cpsessionviewpaperauthor">[[Seyed Hamidreza Mohammadi|AUTHOR Seyed Hamidreza Mohammadi]], [[Alexander Kain|AUTHOR Alexander Kain]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, A2|<|
|Chair: |Chris Davis, Frank Zimmerer|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-1|PAPER Tue-O-4-2-1 — A Comparison of Sentence-Level Speech Intelligibility Metrics]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Sentence-Level Speech Intelligibility Metrics</div><div class="cpsessionviewpaperauthor">[[Alexander Kain|AUTHOR Alexander Kain]], [[Max Del Giudice|AUTHOR Max Del Giudice]], [[Kris Tjaden|AUTHOR Kris Tjaden]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170196.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-2|PAPER Tue-O-4-2-2 — An Auditory Model of Speaker Size Perception for Voiced Speech Sounds]]</div>|<div class="cpsessionviewpapertitle">An Auditory Model of Speaker Size Perception for Voiced Speech Sounds</div><div class="cpsessionviewpaperauthor">[[Toshio Irino|AUTHOR Toshio Irino]], [[Eri Takimoto|AUTHOR Eri Takimoto]], [[Toshie Matsui|AUTHOR Toshie Matsui]], [[Roy D. Patterson|AUTHOR Roy D. Patterson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171048.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-3|PAPER Tue-O-4-2-3 — The Recognition of Compounds: A Computational Account]]</div>|<div class="cpsessionviewpapertitle">The Recognition of Compounds: A Computational Account</div><div class="cpsessionviewpaperauthor">[[L. ten Bosch|AUTHOR L. ten Bosch]], [[L. Boves|AUTHOR L. Boves]], [[M. Ernestus|AUTHOR M. Ernestus]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-4|PAPER Tue-O-4-2-4 — Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise]]</div>|<div class="cpsessionviewpapertitle">Humans do not Maximize the Probability of Correct Decision When Recognizing DANTALE Words in Noise</div><div class="cpsessionviewpaperauthor">[[Mohsen Zareian Jahromi|AUTHOR Mohsen Zareian Jahromi]], [[Jan Østergaard|AUTHOR Jan Østergaard]], [[Jesper Jensen|AUTHOR Jesper Jensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-5|PAPER Tue-O-4-2-5 — Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Single-Ended Prediction of Listening Effort Based on Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Rainer Huber|AUTHOR Rainer Huber]], [[Constantin Spille|AUTHOR Constantin Spille]], [[Bernd T. Meyer|AUTHOR Bernd T. Meyer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-2-6|PAPER Tue-O-4-2-6 — Modeling Categorical Perception with the Receptive Fields of Auditory Neurons]]</div>|<div class="cpsessionviewpapertitle">Modeling Categorical Perception with the Receptive Fields of Auditory Neurons</div><div class="cpsessionviewpaperauthor">[[Chris Neufeld|AUTHOR Chris Neufeld]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, B4|<|
|Chair: |Mahadeva Prasanna, Géza Németh|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170830.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-1|PAPER Tue-O-4-4-1 — A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation]]</div>|<div class="cpsessionviewpapertitle">A Maximum Likelihood Approach to Deep Neural Network Based Nonlinear Spectral Mapping for Single-Channel Speech Separation</div><div class="cpsessionviewpaperauthor">[[Yannan Wang|AUTHOR Yannan Wang]], [[Jun Du|AUTHOR Jun Du]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-2|PAPER Tue-O-4-4-2 — Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources]]</div>|<div class="cpsessionviewpapertitle">Deep Clustering-Based Beamforming for Separation with Unknown Number of Sources</div><div class="cpsessionviewpaperauthor">[[Takuya Higuchi|AUTHOR Takuya Higuchi]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Kateřina Žmolíková|AUTHOR Kateřina Žmolíková]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-3|PAPER Tue-O-4-4-3 — Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues]]</div>|<div class="cpsessionviewpapertitle">Time-Frequency Masking for Blind Source Separation with Preserved Spatial Cues</div><div class="cpsessionviewpaperauthor">[[Shadi Pirhosseinloo|AUTHOR Shadi Pirhosseinloo]], [[Kostas Kokkinakis|AUTHOR Kostas Kokkinakis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170832.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-4|PAPER Tue-O-4-4-4 — Variational Recurrent Neural Networks for Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Variational Recurrent Neural Networks for Speech Separation</div><div class="cpsessionviewpaperauthor">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Kuan-Ting Kuo|AUTHOR Kuan-Ting Kuo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-5|PAPER Tue-O-4-4-5 — Detecting Overlapped Speech on Short Timeframes Using Deep Learning]]</div>|<div class="cpsessionviewpapertitle">Detecting Overlapped Speech on Short Timeframes Using Deep Learning</div><div class="cpsessionviewpaperauthor">[[Valentin Andrei|AUTHOR Valentin Andrei]], [[Horia Cucu|AUTHOR Horia Cucu]], [[Corneliu Burileanu|AUTHOR Corneliu Burileanu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-4-6|PAPER Tue-O-4-4-6 — Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions]]</div>|<div class="cpsessionviewpapertitle">Ideal Ratio Mask Estimation Using Deep Neural Networks for Monaural Speech Segregation in Noisy Reverberant Conditions</div><div class="cpsessionviewpaperauthor">[[Xu Li|AUTHOR Xu Li]], [[Junfeng Li|AUTHOR Junfeng Li]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, C6|<|
|Chair: |Mariapaola D’Imperio, Oliver Niebuhr|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-1|PAPER Tue-O-4-6-1 — The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts]]</div>|<div class="cpsessionviewpapertitle">The Vocative Chant and Beyond: German Calling Melodies Under Routine and Urgent Contexts</div><div class="cpsessionviewpaperauthor">[[Sergio I. Quiroz|AUTHOR Sergio I. Quiroz]], [[Marzena Żygis|AUTHOR Marzena Żygis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-2|PAPER Tue-O-4-6-2 — Comparing Languages Using Hierarchical Prosodic Analysis]]</div>|<div class="cpsessionviewpapertitle">Comparing Languages Using Hierarchical Prosodic Analysis</div><div class="cpsessionviewpaperauthor">[[Juraj Šimko|AUTHOR Juraj Šimko]], [[Antti Suni|AUTHOR Antti Suni]], [[Katri Hiovain|AUTHOR Katri Hiovain]], [[Martti Vainio|AUTHOR Martti Vainio]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-3|PAPER Tue-O-4-6-3 — Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones]]</div>|<div class="cpsessionviewpapertitle">Intonation Facilitates Prediction of Focus Even in the Presence of Lexical Tones</div><div class="cpsessionviewpaperauthor">[[Martin Ho Kwan Ip|AUTHOR Martin Ho Kwan Ip]], [[Anne Cutler|AUTHOR Anne Cutler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170839.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-4|PAPER Tue-O-4-6-4 — Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English]]</div>|<div class="cpsessionviewpapertitle">Mind the Peak: When Museum is Temporarily Understood as Musical in Australian English</div><div class="cpsessionviewpaperauthor">[[Katharina Zahner|AUTHOR Katharina Zahner]], [[Heather Kember|AUTHOR Heather Kember]], [[Bettina Braun|AUTHOR Bettina Braun]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-5|PAPER Tue-O-4-6-5 — Pashto Intonation Patterns]]</div>|<div class="cpsessionviewpapertitle">Pashto Intonation Patterns</div><div class="cpsessionviewpaperauthor">[[Luca Rognoni|AUTHOR Luca Rognoni]], [[Judith Bishop|AUTHOR Judith Bishop]], [[Miriam Corris|AUTHOR Miriam Corris]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170175.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-6-6|PAPER Tue-O-4-6-6 — A New Model of Final Lowering in Spontaneous Monologue]]</div>|<div class="cpsessionviewpapertitle">A New Model of Final Lowering in Spontaneous Monologue</div><div class="cpsessionviewpaperauthor">[[Kikuo Maekawa|AUTHOR Kikuo Maekawa]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, D8|<|
|Chair: |Koichi Shinoda, Anton Batliner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-1|PAPER Tue-O-4-8-1 — Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space]]</div>|<div class="cpsessionviewpapertitle">Speech Emotion Recognition with Emotion-Pair Based Framework Considering Emotion Distribution Information in Dimensional Emotion Space</div><div class="cpsessionviewpaperauthor">[[Xi Ma|AUTHOR Xi Ma]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Jia Jia|AUTHOR Jia Jia]], [[Mingxing Xu|AUTHOR Mingxing Xu]], [[Helen Meng|AUTHOR Helen Meng]], [[Lianhong Cai|AUTHOR Lianhong Cai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171421.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-2|PAPER Tue-O-4-8-2 — Adversarial Auto-Encoders for Speech Based Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Adversarial Auto-Encoders for Speech Based Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Saurabh Sahu|AUTHOR Saurabh Sahu]], [[Rahul Gupta|AUTHOR Rahul Gupta]], [[Ganesh Sivaraman|AUTHOR Ganesh Sivaraman]], [[Wael AbdAlmageed|AUTHOR Wael AbdAlmageed]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-3|PAPER Tue-O-4-8-3 — An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression]]</div>|<div class="cpsessionviewpapertitle">An Investigation of Emotion Prediction Uncertainty Using Gaussian Mixture Regression</div><div class="cpsessionviewpaperauthor">[[Ting Dang|AUTHOR Ting Dang]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Julien Epps|AUTHOR Julien Epps]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-4|PAPER Tue-O-4-8-4 — Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Capturing Long-Term Temporal Dependencies with Convolutional Networks for Continuous Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Soheil Khorram|AUTHOR Soheil Khorram]], [[Zakaria Aldeneh|AUTHOR Zakaria Aldeneh]], [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Melvin McInnis|AUTHOR Melvin McInnis]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171181.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-5|PAPER Tue-O-4-8-5 — Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings]]</div>|<div class="cpsessionviewpapertitle">Voice-to-Affect Mapping: Inferences on Language Voice Baseline Settings</div><div class="cpsessionviewpaperauthor">[[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Irena Yanushevskaya|AUTHOR Irena Yanushevskaya]], [[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170917.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-4-8-6|PAPER Tue-O-4-8-6 — Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech]]</div>|<div class="cpsessionviewpapertitle">Attentive Convolutional Neural Network Based Speech Emotion Recognition: A Study on the Impact of Input Features, Signal Length, and Acted Speech</div><div class="cpsessionviewpaperauthor">[[Michael Neumann|AUTHOR Michael Neumann]], [[Ngoc Thang Vu|AUTHOR Ngoc Thang Vu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, Aula Magna|<|
|Chair: |Mark Gales, Tara Sainath|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-1|PAPER Tue-O-5-1-1 — Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping]]</div>|<div class="cpsessionviewpapertitle">Recurrent Neural Aligner: An Encoder-Decoder Neural Network Model for Sequence to Sequence Mapping</div><div class="cpsessionviewpaperauthor">[[Haşim Sak|AUTHOR Haşim Sak]], [[Matt Shannon|AUTHOR Matt Shannon]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-2|PAPER Tue-O-5-1-2 — Highway-LSTM and Recurrent Highway Networks for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Highway-LSTM and Recurrent Highway Networks for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Golan Pundak|AUTHOR Golan Pundak]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-3|PAPER Tue-O-5-1-3 — Improving Speech Recognition by Revising Gated Recurrent Units]]</div>|<div class="cpsessionviewpapertitle">Improving Speech Recognition by Revising Gated Recurrent Units</div><div class="cpsessionviewpaperauthor">[[Mirco Ravanelli|AUTHOR Mirco Ravanelli]], [[Philemon Brakel|AUTHOR Philemon Brakel]], [[Maurizio Omologo|AUTHOR Maurizio Omologo]], [[Yoshua Bengio|AUTHOR Yoshua Bengio]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170856.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-4|PAPER Tue-O-5-1-4 — Stochastic Recurrent Neural Network for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Stochastic Recurrent Neural Network for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]], [[Chen Shen|AUTHOR Chen Shen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-5|PAPER Tue-O-5-1-5 — Frame and Segment Level Recurrent Neural Networks for Phone Classification]]</div>|<div class="cpsessionviewpapertitle">Frame and Segment Level Recurrent Neural Networks for Phone Classification</div><div class="cpsessionviewpaperauthor">[[Martin Ratajczak|AUTHOR Martin Ratajczak]], [[Sebastian Tschiatschek|AUTHOR Sebastian Tschiatschek]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-1-6|PAPER Tue-O-5-1-6 — Deep Learning-Based Telephony Speech Recognition in the Wild]]</div>|<div class="cpsessionviewpapertitle">Deep Learning-Based Telephony Speech Recognition in the Wild</div><div class="cpsessionviewpaperauthor">[[Kyu J. Han|AUTHOR Kyu J. Han]], [[Seongjun Hahm|AUTHOR Seongjun Hahm]], [[Byung-Hak Kim|AUTHOR Byung-Hak Kim]], [[Jungsuk Kim|AUTHOR Jungsuk Kim]], [[Ian Lane|AUTHOR Ian Lane]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, E10|<|
|Chair: |Julien Epps, Carlos Busso|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-1|PAPER Tue-O-5-10-1 — Inferring Stance from Prosody]]</div>|<div class="cpsessionviewpapertitle">Inferring Stance from Prosody</div><div class="cpsessionviewpaperauthor">[[Nigel G. Ward|AUTHOR Nigel G. Ward]], [[Jason C. Carlson|AUTHOR Jason C. Carlson]], [[Olac Fuentes|AUTHOR Olac Fuentes]], [[Diego Castan|AUTHOR Diego Castan]], [[Elizabeth E. Shriberg|AUTHOR Elizabeth E. Shriberg]], [[Andreas Tsiartas|AUTHOR Andreas Tsiartas]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171706.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-2|PAPER Tue-O-5-10-2 — Exploring Dynamic Measures of Stance in Spoken Interaction]]</div>|<div class="cpsessionviewpapertitle">Exploring Dynamic Measures of Stance in Spoken Interaction</div><div class="cpsessionviewpaperauthor">[[Gina-Anne Levow|AUTHOR Gina-Anne Levow]], [[Richard A. Wright|AUTHOR Richard A. Wright]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-3|PAPER Tue-O-5-10-3 — Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields]]</div>|<div class="cpsessionviewpapertitle">Opinion Dynamics Modeling for Movie Review Transcripts Classification with Hidden Conditional Random Fields</div><div class="cpsessionviewpaperauthor">[[Valentin Barriere|AUTHOR Valentin Barriere]], [[Chloé Clavel|AUTHOR Chloé Clavel]], [[Slim Essid|AUTHOR Slim Essid]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-4|PAPER Tue-O-5-10-4 — Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction]]</div>|<div class="cpsessionviewpapertitle">Transfer Learning Between Concepts for Human Behavior Modeling: An Application to Sincerity and Deception Prediction</div><div class="cpsessionviewpaperauthor">[[Qinyi Luo|AUTHOR Qinyi Luo]], [[Rahul Gupta|AUTHOR Rahul Gupta]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-5|PAPER Tue-O-5-10-5 — The Sound of Deception — What Makes a Speaker Credible?]]</div>|<div class="cpsessionviewpapertitle">The Sound of Deception — What Makes a Speaker Credible?</div><div class="cpsessionviewpaperauthor">[[Anne Schröder|AUTHOR Anne Schröder]], [[Simon Stone|AUTHOR Simon Stone]], [[Peter Birkholz|AUTHOR Peter Birkholz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-10-6|PAPER Tue-O-5-10-6 — Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection]]</div>|<div class="cpsessionviewpapertitle">Hybrid Acoustic-Lexical Deep Learning Approach for Deception Detection</div><div class="cpsessionviewpaperauthor">[[Gideon Mendels|AUTHOR Gideon Mendels]], [[Sarah Ita Levitan|AUTHOR Sarah Ita Levitan]], [[Kai-Zhan Lee|AUTHOR Kai-Zhan Lee]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, A2|<|
|Chair: |Kong Aik Lee, Rahim Saeidi|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-1|PAPER Tue-O-5-2-1 — The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016]]</div>|<div class="cpsessionviewpapertitle">The I4U Mega Fusion and Collaboration for NIST Speaker Recognition Evaluation 2016</div><div class="cpsessionviewpaperauthor">[[Kong Aik Lee|AUTHOR Kong Aik Lee]], [[SRE’16 I4U Group|AUTHOR SRE’16 I4U Group]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-2|PAPER Tue-O-5-2-2 — The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System]]</div>|<div class="cpsessionviewpapertitle">The MIT-LL, JHU and LRDE NIST 2016 Speaker Recognition Evaluation System</div><div class="cpsessionviewpaperauthor">[[Pedro A. Torres-Carrasquillo|AUTHOR Pedro A. Torres-Carrasquillo]], [[Fred Richardson|AUTHOR Fred Richardson]], [[Shahan Nercessian|AUTHOR Shahan Nercessian]], [[Douglas Sturim|AUTHOR Douglas Sturim]], [[William Campbell|AUTHOR William Campbell]], [[Youngjune Gwon|AUTHOR Youngjune Gwon]], [[Swaroop Vattam|AUTHOR Swaroop Vattam]], [[Najim Dehak|AUTHOR Najim Dehak]], [[Harish Mallidi|AUTHOR Harish Mallidi]], [[Phani Sankar Nidadavolu|AUTHOR Phani Sankar Nidadavolu]], [[Ruizhi Li|AUTHOR Ruizhi Li]], [[Reda Dehak|AUTHOR Reda Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-3|PAPER Tue-O-5-2-3 — Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System]]</div>|<div class="cpsessionviewpapertitle">Nuance - Politecnico di Torino’s 2016 NIST Speaker Recognition Evaluation System</div><div class="cpsessionviewpaperauthor">[[Daniele Colibro|AUTHOR Daniele Colibro]], [[Claudio Vair|AUTHOR Claudio Vair]], [[Emanuele Dalmasso|AUTHOR Emanuele Dalmasso]], [[Kevin Farrell|AUTHOR Kevin Farrell]], [[Gennady Karvitsky|AUTHOR Gennady Karvitsky]], [[Sandro Cumani|AUTHOR Sandro Cumani]], [[Pietro Laface|AUTHOR Pietro Laface]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-4|PAPER Tue-O-5-2-4 — UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation]]</div>|<div class="cpsessionviewpapertitle">UTD-CRSS Systems for 2016 NIST Speaker Recognition Evaluation</div><div class="cpsessionviewpaperauthor">[[Chunlei Zhang|AUTHOR Chunlei Zhang]], [[Fahimeh Bahmaninezhad|AUTHOR Fahimeh Bahmaninezhad]], [[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[Chengzhu Yu|AUTHOR Chengzhu Yu]], [[Navid Shokouhi|AUTHOR Navid Shokouhi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171498.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-5|PAPER Tue-O-5-2-5 — Analysis and Description of ABC Submission to NIST SRE 2016]]</div>|<div class="cpsessionviewpapertitle">Analysis and Description of ABC Submission to NIST SRE 2016</div><div class="cpsessionviewpaperauthor">[[Oldřich Plchot|AUTHOR Oldřich Plchot]], [[Pavel Matějka|AUTHOR Pavel Matějka]], [[Anna Silnova|AUTHOR Anna Silnova]], [[Ondřej Novotný|AUTHOR Ondřej Novotný]], [[Mireia Diez Sánchez|AUTHOR Mireia Diez Sánchez]], [[Johan Rohdin|AUTHOR Johan Rohdin]], [[Ondřej Glembek|AUTHOR Ondřej Glembek]], [[Niko Brümmer|AUTHOR Niko Brümmer]], [[Albert Swart|AUTHOR Albert Swart]], [[Jesús Jorrín-Prieto|AUTHOR Jesús Jorrín-Prieto]], [[Paola García|AUTHOR Paola García]], [[Luis Buera|AUTHOR Luis Buera]], [[Patrick Kenny|AUTHOR Patrick Kenny]], [[Jahangir Alam|AUTHOR Jahangir Alam]], [[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-2-6|PAPER Tue-O-5-2-6 — The 2016 NIST Speaker Recognition Evaluation]]</div>|<div class="cpsessionviewpapertitle">The 2016 NIST Speaker Recognition Evaluation</div><div class="cpsessionviewpaperauthor">[[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]], [[Timothée Kheyrkhah|AUTHOR Timothée Kheyrkhah]], [[Audrey Tong|AUTHOR Audrey Tong]], [[Craig Greenberg|AUTHOR Craig Greenberg]], [[Douglas Reynolds|AUTHOR Douglas Reynolds]], [[Elliot Singer|AUTHOR Elliot Singer]], [[Lisa Mason|AUTHOR Lisa Mason]], [[Jaime Hernandez-Cordero|AUTHOR Jaime Hernandez-Cordero]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, B4|<|
|Chair: |João Cabral, Thomas Drugman|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-1|PAPER Tue-O-5-4-1 — A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis]]</div>|<div class="cpsessionviewpapertitle">A New Cosine Series Antialiasing Function and its Application to Aliasing-Free Glottal Source Models for Speech and Singing Synthesis</div><div class="cpsessionviewpaperauthor">[[Hideki Kawahara|AUTHOR Hideki Kawahara]], [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]], [[Masanori Morise|AUTHOR Masanori Morise]], [[Hideki Banno|AUTHOR Hideki Banno]], [[Tomoki Toda|AUTHOR Tomoki Toda]], [[Toshio Irino|AUTHOR Toshio Irino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-2|PAPER Tue-O-5-4-2 — Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs]]</div>|<div class="cpsessionviewpapertitle">Speaking Style Conversion from Normal to Lombard Speech Using a Glottal Vocoder and Bayesian GMMs</div><div class="cpsessionviewpaperauthor">[[Ana Ramírez López|AUTHOR Ana Ramírez López]], [[Shreyas Seshadri|AUTHOR Shreyas Seshadri]], [[Lauri Juvela|AUTHOR Lauri Juvela]], [[Okko Räsänen|AUTHOR Okko Räsänen]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170848.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-3|PAPER Tue-O-5-4-3 — Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System]]</div>|<div class="cpsessionviewpapertitle">Reducing Mismatch in Training of DNN-Based Glottal Excitation Models in a Statistical Parametric Text-to-Speech System</div><div class="cpsessionviewpaperauthor">[[Lauri Juvela|AUTHOR Lauri Juvela]], [[Bajibabu Bollepalli|AUTHOR Bajibabu Bollepalli]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-4|PAPER Tue-O-5-4-4 — Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities]]</div>|<div class="cpsessionviewpapertitle">Semi Parametric Concatenative TTS with Instant Voice Modification Capabilities</div><div class="cpsessionviewpaperauthor">[[Alexander Sorin|AUTHOR Alexander Sorin]], [[Slava Shechtman|AUTHOR Slava Shechtman]], [[Asaf Rendel|AUTHOR Asaf Rendel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-5|PAPER Tue-O-5-4-5 — Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Modeling Laryngeal Muscle Activation Noise for Low-Order Physiological Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Rodrigo Manríquez|AUTHOR Rodrigo Manríquez]], [[Sean D. Peterson|AUTHOR Sean D. Peterson]], [[Pavel Prado|AUTHOR Pavel Prado]], [[Patricio Orio|AUTHOR Patricio Orio]], [[Matías Zañartu|AUTHOR Matías Zañartu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171647.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-4-6|PAPER Tue-O-5-4-6 — Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Direct Modelling of Magnitude and Phase Spectra for Statistical Parametric Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Felipe Espic|AUTHOR Felipe Espic]], [[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]], [[Simon King|AUTHOR Simon King]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, C6|<|
|Chair: |Plinio Barbosa, Pärtel Lippus|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-1|PAPER Tue-O-5-6-1 — Similar Prosodic Structure Perceived Differently in German and English]]</div>|<div class="cpsessionviewpapertitle">Similar Prosodic Structure Perceived Differently in German and English</div><div class="cpsessionviewpaperauthor">[[Heather Kember|AUTHOR Heather Kember]], [[Ann-Kathrin Grohe|AUTHOR Ann-Kathrin Grohe]], [[Katharina Zahner|AUTHOR Katharina Zahner]], [[Bettina Braun|AUTHOR Bettina Braun]], [[Andrea Weber|AUTHOR Andrea Weber]], [[Anne Cutler|AUTHOR Anne Cutler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-2|PAPER Tue-O-5-6-2 — Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures]]</div>|<div class="cpsessionviewpapertitle">Disambiguate or not? — The Role of Prosody in Unambiguous and Potentially Ambiguous Anaphora Production in Strictly Mandarin Parallel Structures</div><div class="cpsessionviewpaperauthor">[[Luying Hou|AUTHOR Luying Hou]], [[Bert Le Bruyn|AUTHOR Bert Le Bruyn]], [[René Kager|AUTHOR René Kager]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171514.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-3|PAPER Tue-O-5-6-3 — Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese]]</div>|<div class="cpsessionviewpapertitle">Acoustic Properties of Canonical and Non-Canonical Stress in French, Turkish, Armenian and Brazilian Portuguese</div><div class="cpsessionviewpaperauthor">[[Angeliki Athanasopoulou|AUTHOR Angeliki Athanasopoulou]], [[Irene Vogel|AUTHOR Irene Vogel]], [[Hossep Dolatian|AUTHOR Hossep Dolatian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170987.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-4|PAPER Tue-O-5-6-4 — Phonological Complexity, Segment Rate and Speech Tempo Perception]]</div>|<div class="cpsessionviewpapertitle">Phonological Complexity, Segment Rate and Speech Tempo Perception</div><div class="cpsessionviewpaperauthor">[[Leendert Plug|AUTHOR Leendert Plug]], [[Rachel Smith|AUTHOR Rachel Smith]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-5|PAPER Tue-O-5-6-5 — On the Duration of Mandarin Tones]]</div>|<div class="cpsessionviewpapertitle">On the Duration of Mandarin Tones</div><div class="cpsessionviewpaperauthor">[[Jing Yang|AUTHOR Jing Yang]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Aijun Li|AUTHOR Aijun Li]], [[Li Xu|AUTHOR Li Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-6-6|PAPER Tue-O-5-6-6 — The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish]]</div>|<div class="cpsessionviewpapertitle">The Formant Dynamics of Long Close Vowels in Three Varieties of Swedish</div><div class="cpsessionviewpaperauthor">[[Otto Ewald|AUTHOR Otto Ewald]], [[Eva Liina Asu|AUTHOR Eva Liina Asu]], [[Susanne Schötz|AUTHOR Susanne Schötz]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, D8|<|
|Chair: |Tatsuya Kawahara, Martin Russell|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-1|PAPER Tue-O-5-8-1 — Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech]]</div>|<div class="cpsessionviewpapertitle">Bidirectional LSTM-RNN for Improving Automated Assessment of Non-Native Children’s Speech</div><div class="cpsessionviewpaperauthor">[[Yao Qian|AUTHOR Yao Qian]], [[Keelan Evanini|AUTHOR Keelan Evanini]], [[Xinhao Wang|AUTHOR Xinhao Wang]], [[Chong Min Lee|AUTHOR Chong Min Lee]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-2|PAPER Tue-O-5-8-2 — Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW]]</div>|<div class="cpsessionviewpapertitle">Automatic Scoring of Shadowing Speech Based on DNN Posteriors and Their DTW</div><div class="cpsessionviewpaperauthor">[[Junwei Yue|AUTHOR Junwei Yue]], [[Fumiya Shiozawa|AUTHOR Fumiya Shiozawa]], [[Shohei Toyama|AUTHOR Shohei Toyama]], [[Yutaka Yamauchi|AUTHOR Yutaka Yamauchi]], [[Kayoko Ito|AUTHOR Kayoko Ito]], [[Daisuke Saito|AUTHOR Daisuke Saito]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-3|PAPER Tue-O-5-8-3 — Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Off-Topic Spoken Response Detection Using Siamese Convolutional Neural Networks</div><div class="cpsessionviewpaperauthor">[[Chong Min Lee|AUTHOR Chong Min Lee]], [[Su-Youn Yoon|AUTHOR Su-Youn Yoon]], [[Xihao Wang|AUTHOR Xihao Wang]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]], [[Ikkyu Choi|AUTHOR Ikkyu Choi]], [[Keelan Evanini|AUTHOR Keelan Evanini]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171350.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-4|PAPER Tue-O-5-8-4 — Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning]]</div>|<div class="cpsessionviewpapertitle">Phonological Feature Based Mispronunciation Detection and Diagnosis Using Multi-Task DNNs and Active Learning</div><div class="cpsessionviewpaperauthor">[[Vipul Arora|AUTHOR Vipul Arora]], [[Aditi Lahiri|AUTHOR Aditi Lahiri]], [[Henning Reetz|AUTHOR Henning Reetz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-5|PAPER Tue-O-5-8-5 — Detection of Mispronunciations and Disfluencies in Children Reading Aloud]]</div>|<div class="cpsessionviewpapertitle">Detection of Mispronunciations and Disfluencies in Children Reading Aloud</div><div class="cpsessionviewpaperauthor">[[Jorge Proença|AUTHOR Jorge Proença]], [[Carla Lopes|AUTHOR Carla Lopes]], [[Michael Tjalve|AUTHOR Michael Tjalve]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Sara Candeias|AUTHOR Sara Candeias]], [[Fernando Perdigão|AUTHOR Fernando Perdigão]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-O-5-8-6|PAPER Tue-O-5-8-6 — Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences]]</div>|<div class="cpsessionviewpapertitle">Automatic Assessment of Non-Native Prosody by Measuring Distances on Prosodic Label Sequences</div><div class="cpsessionviewpaperauthor">[[David Escudero-Mancebo|AUTHOR David Escudero-Mancebo]], [[César González-Ferreras|AUTHOR César González-Ferreras]], [[Lourdes Aguilar|AUTHOR Lourdes Aguilar]], [[Eva Estebas-Vilaplana|AUTHOR Eva Estebas-Vilaplana]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, Poster 1|<|
|Chair: |Anthony Larcher|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-1|PAPER Tue-P-3-1-1 — A Generative Model for Score Normalization in Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">A Generative Model for Score Normalization in Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Albert Swart|AUTHOR Albert Swart]], [[Niko Brümmer|AUTHOR Niko Brümmer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-2|PAPER Tue-P-3-1-2 — Content Normalization for Text-Dependent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Content Normalization for Text-Dependent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Subhadeep Dey|AUTHOR Subhadeep Dey]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Marc Ferras|AUTHOR Marc Ferras]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-3|PAPER Tue-P-3-1-3 — End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances]]</div>|<div class="cpsessionviewpapertitle">End-to-End Text-Independent Speaker Verification with Triplet Loss on Short Utterances</div><div class="cpsessionviewpaperauthor">[[Chunlei Zhang|AUTHOR Chunlei Zhang]], [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-4|PAPER Tue-P-3-1-4 — Adversarial Network Bottleneck Features for Noise Robust Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Adversarial Network Bottleneck Features for Noise Robust Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Hong Yu|AUTHOR Hong Yu]], [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]], [[Zhanyu Ma|AUTHOR Zhanyu Ma]], [[Jun Guo|AUTHOR Jun Guo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-5|PAPER Tue-P-3-1-5 — What Does the Speaker Embedding Encode?]]</div>|<div class="cpsessionviewpapertitle">What Does the Speaker Embedding Encode?</div><div class="cpsessionviewpaperauthor">[[Shuai Wang|AUTHOR Shuai Wang]], [[Yanmin Qian|AUTHOR Yanmin Qian]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-6|PAPER Tue-P-3-1-6 — Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Incorporating Local Acoustic Variability Information into Short Duration Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Jianbo Ma|AUTHOR Jianbo Ma]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-7|PAPER Tue-P-3-1-7 — DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances]]</div>|<div class="cpsessionviewpapertitle">DNN i-Vector Speaker Verification with Short, Text-Constrained Test Utterances</div><div class="cpsessionviewpaperauthor">[[Jinghua Zhong|AUTHOR Jinghua Zhong]], [[Wenping Hu|AUTHOR Wenping Hu]], [[Frank K. Soong|AUTHOR Frank K. Soong]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-8|PAPER Tue-P-3-1-8 — Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions]]</div>|<div class="cpsessionviewpapertitle">Time-Varying Autoregressions for Speaker Verification in Reverberant Conditions</div><div class="cpsessionviewpaperauthor">[[Ville Vestman|AUTHOR Ville Vestman]], [[Dhananjaya Gowda|AUTHOR Dhananjaya Gowda]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Paavo Alku|AUTHOR Paavo Alku]], [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-9|PAPER Tue-P-3-1-9 — Deep Speaker Embeddings for Short-Duration Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Deep Speaker Embeddings for Short-Duration Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Gautam Bhattacharya|AUTHOR Gautam Bhattacharya]], [[Jahangir Alam|AUTHOR Jahangir Alam]], [[Patrick Kenny|AUTHOR Patrick Kenny]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-10|PAPER Tue-P-3-1-10 — Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems]]</div>|<div class="cpsessionviewpapertitle">Using Voice Quality Features to Improve Short-Utterance, Text-Independent Speaker Verification Systems</div><div class="cpsessionviewpaperauthor">[[Soo Jin Park|AUTHOR Soo Jin Park]], [[Gary Yeung|AUTHOR Gary Yeung]], [[Jody Kreiman|AUTHOR Jody Kreiman]], [[Patricia A. Keating|AUTHOR Patricia A. Keating]], [[Abeer Alwan|AUTHOR Abeer Alwan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170108.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-11|PAPER Tue-P-3-1-11 — Gain Compensation for Fast i-Vector Extraction Over Short Duration]]</div>|<div class="cpsessionviewpapertitle">Gain Compensation for Fast i-Vector Extraction Over Short Duration</div><div class="cpsessionviewpaperauthor">[[Kong Aik Lee|AUTHOR Kong Aik Lee]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-1-12|PAPER Tue-P-3-1-12 — Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Joint Training of Expanded End-to-End DNN for Text-Dependent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Hee-soo Heo|AUTHOR Hee-soo Heo]], [[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[IL-ho Yang|AUTHOR IL-ho Yang]], [[Sung-hyun Yoon|AUTHOR Sung-hyun Yoon]], [[Ha-jin Yu|AUTHOR Ha-jin Yu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, Poster 2|<|
|Chair: |Michael Wagner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-1|PAPER Tue-P-3-2-1 — Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares]]</div>|<div class="cpsessionviewpapertitle">Speaker Verification via Estimating Total Variability Space Using Probabilistic Partial Least Squares</div><div class="cpsessionviewpaperauthor">[[Chen Chen|AUTHOR Chen Chen]], [[Jiqing Han|AUTHOR Jiqing Han]], [[Yilin Pan|AUTHOR Yilin Pan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-2|PAPER Tue-P-3-2-2 — Deep Speaker Feature Learning for Text-Independent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Deep Speaker Feature Learning for Text-Independent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Lantian Li|AUTHOR Lantian Li]], [[Yixiang Chen|AUTHOR Yixiang Chen]], [[Ying Shi|AUTHOR Ying Shi]], [[Zhiyuan Tang|AUTHOR Zhiyuan Tang]], [[Dong Wang|AUTHOR Dong Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-3|PAPER Tue-P-3-2-3 — Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Duration Mismatch Compensation Using Four-Covariance Model and Deep Neural Network for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Pierre-Michel Bousquet|AUTHOR Pierre-Michel Bousquet]], [[Mickael Rouvier|AUTHOR Mickael Rouvier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-4|PAPER Tue-P-3-2-4 — Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Extended Variability Modeling and Unsupervised Adaptation for PLDA Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Alan McCree|AUTHOR Alan McCree]], [[Gregory Sell|AUTHOR Gregory Sell]], [[Daniel Garcia-Romero|AUTHOR Daniel Garcia-Romero]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-5|PAPER Tue-P-3-2-5 — Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data]]</div>|<div class="cpsessionviewpapertitle">Improving the Effectiveness of Speaker Verification Domain Adaptation with Inadequate In-Domain Data</div><div class="cpsessionviewpaperauthor">[[Bengt J. Borgström|AUTHOR Bengt J. Borgström]], [[Elliot Singer|AUTHOR Elliot Singer]], [[Douglas Reynolds|AUTHOR Douglas Reynolds]], [[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-6|PAPER Tue-P-3-2-6 — i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">i-Vector DNN Scoring and Calibration for Noise Robust Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Zhili Tan|AUTHOR Zhili Tan]], [[Man-Wai Mak|AUTHOR Man-Wai Mak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-7|PAPER Tue-P-3-2-7 — Analysis of Score Normalization in Multilingual Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Analysis of Score Normalization in Multilingual Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Pavel Matějka|AUTHOR Pavel Matějka]], [[Ondřej Novotný|AUTHOR Ondřej Novotný]], [[Oldřich Plchot|AUTHOR Oldřich Plchot]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Mireia Diez Sánchez|AUTHOR Mireia Diez Sánchez]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-8|PAPER Tue-P-3-2-8 — Alternative Approaches to Neural Network Based Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Alternative Approaches to Neural Network Based Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Anna Silnova|AUTHOR Anna Silnova]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170219.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-9|PAPER Tue-P-3-2-9 — A Distribution Free Formulation of the Total Variability Model]]</div>|<div class="cpsessionviewpapertitle">A Distribution Free Formulation of the Total Variability Model</div><div class="cpsessionviewpaperauthor">[[Ruchir Travadi|AUTHOR Ruchir Travadi]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-3-2-10|PAPER Tue-P-3-2-10 — Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Domain Mismatch Modeling of Out-Domain i-Vectors for PLDA Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Md. Hafizur Rahman|AUTHOR Md. Hafizur Rahman]], [[Ivan Himawan|AUTHOR Ivan Himawan]], [[David Dean|AUTHOR David Dean]], [[Sridha Sridharan|AUTHOR Sridha Sridharan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, Poster 1|<|
|Chair: |Michiel Bacchiani|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-1|PAPER Tue-P-4-1-1 — An Exploration of Dropout with LSTMs]]</div>|<div class="cpsessionviewpapertitle">An Exploration of Dropout with LSTMs</div><div class="cpsessionviewpaperauthor">[[Gaofeng Cheng|AUTHOR Gaofeng Cheng]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Vimal Manohar|AUTHOR Vimal Manohar]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-2|PAPER Tue-P-4-1-2 — Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Residual LSTM: Design of a Deep Recurrent Architecture for Distant Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jaeyoung Kim|AUTHOR Jaeyoung Kim]], [[Mostafa El-Khamy|AUTHOR Mostafa El-Khamy]], [[Jungwon Lee|AUTHOR Jungwon Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170873.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-3|PAPER Tue-P-4-1-3 — Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Unfolded Deep Recurrent Convolutional Neural Network with Jump Ahead Connections for Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Dung T. Tran|AUTHOR Dung T. Tran]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Shigeki Karita|AUTHOR Shigeki Karita]], [[Michael Hentschel|AUTHOR Michael Hentschel]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170554.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-4|PAPER Tue-P-4-1-4 — Forward-Backward Convolutional LSTM for Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Forward-Backward Convolutional LSTM for Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Shigeki Karita|AUTHOR Shigeki Karita]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-5|PAPER Tue-P-4-1-5 — Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Convolutional Recurrent Neural Networks for Small-Footprint Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Sercan Ö. Arık|AUTHOR Sercan Ö. Arık]], [[Markus Kliegl|AUTHOR Markus Kliegl]], [[Rewon Child|AUTHOR Rewon Child]], [[Joel Hestness|AUTHOR Joel Hestness]], [[Andrew Gibiansky|AUTHOR Andrew Gibiansky]], [[Chris Fougner|AUTHOR Chris Fougner]], [[Ryan Prenger|AUTHOR Ryan Prenger]], [[Adam Coates|AUTHOR Adam Coates]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171233.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-6|PAPER Tue-P-4-1-6 — Deep Activation Mixture Model for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Deep Activation Mixture Model for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Chunyang Wu|AUTHOR Chunyang Wu]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170920.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-7|PAPER Tue-P-4-1-7 — Ensembles of Multi-Scale VGG Acoustic Models]]</div>|<div class="cpsessionviewpapertitle">Ensembles of Multi-Scale VGG Acoustic Models</div><div class="cpsessionviewpaperauthor">[[Michael Heck|AUTHOR Michael Heck]], [[Masayuki Suzuki|AUTHOR Masayuki Suzuki]], [[Takashi Fukuda|AUTHOR Takashi Fukuda]], [[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-8|PAPER Tue-P-4-1-8 — Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling]]</div>|<div class="cpsessionviewpapertitle">Training Context-Dependent DNN Acoustic Models Using Probabilistic Sampling</div><div class="cpsessionviewpaperauthor">[[Tamás Grósz|AUTHOR Tamás Grósz]], [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]], [[László Tóth|AUTHOR László Tóth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-1-9|PAPER Tue-P-4-1-9 — A Comparative Evaluation of GMM-Free State Tying Methods for ASR]]</div>|<div class="cpsessionviewpapertitle">A Comparative Evaluation of GMM-Free State Tying Methods for ASR</div><div class="cpsessionviewpaperauthor">[[Tamás Grósz|AUTHOR Tamás Grósz]], [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]], [[László Tóth|AUTHOR László Tóth]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, Poster 2|<|
|Chair: |Karen Livescu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-1|PAPER Tue-P-4-2-1 — Backstitch: Counteracting Finite-Sample Bias via Negative Steps]]</div>|<div class="cpsessionviewpapertitle">Backstitch: Counteracting Finite-Sample Bias via Negative Steps</div><div class="cpsessionviewpaperauthor">[[Yiming Wang|AUTHOR Yiming Wang]], [[Vijayaditya Peddinti|AUTHOR Vijayaditya Peddinti]], [[Hainan Xu|AUTHOR Hainan Xu]], [[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170779.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-2|PAPER Tue-P-4-2-2 — Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Node Pruning Based on Entropy of Weights and Node Activity for Small-Footprint Acoustic Model Based on Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Ryu Takeda|AUTHOR Ryu Takeda]], [[Kazuhiro Nakadai|AUTHOR Kazuhiro Nakadai]], [[Kazunori Komatani|AUTHOR Kazunori Komatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171284.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-3|PAPER Tue-P-4-2-3 — End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow]]</div>|<div class="cpsessionviewpapertitle">End-to-End Training of Acoustic Models for Large Vocabulary Continuous Speech Recognition with TensorFlow</div><div class="cpsessionviewpaperauthor">[[Ehsan Variani|AUTHOR Ehsan Variani]], [[Tom Bagby|AUTHOR Tom Bagby]], [[Erik McDermott|AUTHOR Erik McDermott]], [[Michiel Bacchiani|AUTHOR Michiel Bacchiani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-4|PAPER Tue-P-4-2-4 — An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication]]</div>|<div class="cpsessionviewpapertitle">An Efficient Phone N-Gram Forward-Backward Computation Using Dense Matrix Multiplication</div><div class="cpsessionviewpaperauthor">[[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Arun Narayanan|AUTHOR Arun Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-5|PAPER Tue-P-4-2-5 — Parallel Neural Network Features for Improved Tandem Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">Parallel Neural Network Features for Improved Tandem Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-2-6|PAPER Tue-P-4-2-6 — Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis]]</div>|<div class="cpsessionviewpapertitle">Acoustic Feature Learning via Deep Variational Canonical Correlation Analysis</div><div class="cpsessionviewpaperauthor">[[Qingming Tang|AUTHOR Qingming Tang]], [[Weiran Wang|AUTHOR Weiran Wang]], [[Karen Livescu|AUTHOR Karen Livescu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, Poster 3|<|
|Chair: |Kristiina Jokinen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-1|PAPER Tue-P-4-3-1 — Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks]]</div>|<div class="cpsessionviewpapertitle">Online End-of-Turn Detection from Speech Based on Stacked Time-Asynchronous Sequential Networks</div><div class="cpsessionviewpaperauthor">[[Ryo Masumura|AUTHOR Ryo Masumura]], [[Taichi Asami|AUTHOR Taichi Asami]], [[Hirokazu Masataki|AUTHOR Hirokazu Masataki]], [[Ryo Ishii|AUTHOR Ryo Ishii]], [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-2|PAPER Tue-P-4-3-2 — Improving Prediction of Speech Activity Using Multi-Participant Respiratory State]]</div>|<div class="cpsessionviewpapertitle">Improving Prediction of Speech Activity Using Multi-Participant Respiratory State</div><div class="cpsessionviewpaperauthor">[[Marcin Włodarczak|AUTHOR Marcin Włodarczak]], [[Kornel Laskowski|AUTHOR Kornel Laskowski]], [[Mattias Heldner|AUTHOR Mattias Heldner]], [[Kätlin Aare|AUTHOR Kätlin Aare]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-3|PAPER Tue-P-4-3-3 — Turn-Taking Offsets and Dialogue Context]]</div>|<div class="cpsessionviewpapertitle">Turn-Taking Offsets and Dialogue Context</div><div class="cpsessionviewpaperauthor">[[Peter A. Heeman|AUTHOR Peter A. Heeman]], [[Rebecca Lunsford|AUTHOR Rebecca Lunsford]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-4|PAPER Tue-P-4-3-4 — Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems]]</div>|<div class="cpsessionviewpapertitle">Towards Deep End-of-Turn Prediction for Situated Spoken Dialogue Systems</div><div class="cpsessionviewpaperauthor">[[Angelika Maier|AUTHOR Angelika Maier]], [[Julian Hough|AUTHOR Julian Hough]], [[David Schlangen|AUTHOR David Schlangen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-5|PAPER Tue-P-4-3-5 — End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech]]</div>|<div class="cpsessionviewpapertitle">End-of-Utterance Prediction by Prosodic Features and Phrase-Dependency Structure in Spontaneous Japanese Speech</div><div class="cpsessionviewpaperauthor">[[Yuichi Ishimoto|AUTHOR Yuichi Ishimoto]], [[Takehiro Teraoka|AUTHOR Takehiro Teraoka]], [[Mika Enomoto|AUTHOR Mika Enomoto]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-6|PAPER Tue-P-4-3-6 — Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents]]</div>|<div class="cpsessionviewpapertitle">Turn-Taking Estimation Model Based on Joint Embedding of Lexical and Prosodic Contents</div><div class="cpsessionviewpaperauthor">[[Chaoran Liu|AUTHOR Chaoran Liu]], [[Carlos Ishi|AUTHOR Carlos Ishi]], [[Hiroshi Ishiguro|AUTHOR Hiroshi Ishiguro]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-7|PAPER Tue-P-4-3-7 — Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC]]</div>|<div class="cpsessionviewpapertitle">Social Signal Detection in Spontaneous Dialogue Using Bidirectional LSTM-CTC</div><div class="cpsessionviewpaperauthor">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Koji Inoue|AUTHOR Koji Inoue]], [[Masato Mimura|AUTHOR Masato Mimura]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171568.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-8|PAPER Tue-P-4-3-8 — Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels]]</div>|<div class="cpsessionviewpapertitle">Entrainment in Multi-Party Spoken Dialogues at Multiple Linguistic Levels</div><div class="cpsessionviewpaperauthor">[[Zahra Rahimi|AUTHOR Zahra Rahimi]], [[Anish Kumar|AUTHOR Anish Kumar]], [[Diane Litman|AUTHOR Diane Litman]], [[Susannah Paletz|AUTHOR Susannah Paletz]], [[Mingzhi Yu|AUTHOR Mingzhi Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-9|PAPER Tue-P-4-3-9 — Measuring Synchrony in Task-Based Dialogues]]</div>|<div class="cpsessionviewpapertitle">Measuring Synchrony in Task-Based Dialogues</div><div class="cpsessionviewpaperauthor">[[Justine Reverdy|AUTHOR Justine Reverdy]], [[Carl Vogel|AUTHOR Carl Vogel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-10|PAPER Tue-P-4-3-10 — Sequence to Sequence Modeling for User Simulation in Dialog Systems]]</div>|<div class="cpsessionviewpapertitle">Sequence to Sequence Modeling for User Simulation in Dialog Systems</div><div class="cpsessionviewpaperauthor">[[Paul Crook|AUTHOR Paul Crook]], [[Alex Marin|AUTHOR Alex Marin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-11|PAPER Tue-P-4-3-11 — Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions]]</div>|<div class="cpsessionviewpapertitle">Human and Automated Scoring of Fluency, Pronunciation and Intonation During Human–Machine Spoken Dialog Interactions</div><div class="cpsessionviewpaperauthor">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Patrick L. Lange|AUTHOR Patrick L. Lange]], [[Keelan Evanini|AUTHOR Keelan Evanini]], [[Hillary R. Molloy|AUTHOR Hillary R. Molloy]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170725.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-12|PAPER Tue-P-4-3-12 — Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls]]</div>|<div class="cpsessionviewpapertitle">Hierarchical LSTMs with Joint Learning for Estimating Customer Satisfaction from Contact Center Calls</div><div class="cpsessionviewpaperauthor">[[Atsushi Ando|AUTHOR Atsushi Ando]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Hosana Kamiyama|AUTHOR Hosana Kamiyama]], [[Satoshi Kobashikawa|AUTHOR Satoshi Kobashikawa]], [[Yushi Aono|AUTHOR Yushi Aono]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-13|PAPER Tue-P-4-3-13 — Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning]]</div>|<div class="cpsessionviewpapertitle">Domain-Independent User Satisfaction Reward Estimation for Dialogue Policy Learning</div><div class="cpsessionviewpaperauthor">[[Stefan Ultes|AUTHOR Stefan Ultes]], [[Paweł Budzianowski|AUTHOR Paweł Budzianowski]], [[Iñigo Casanueva|AUTHOR Iñigo Casanueva]], [[Nikola Mrkšić|AUTHOR Nikola Mrkšić]], [[Lina Rojas-Barahona|AUTHOR Lina Rojas-Barahona]], [[Pei-Hao Su|AUTHOR Pei-Hao Su]], [[Tsung-Hsien Wen|AUTHOR Tsung-Hsien Wen]], [[Milica Gašić|AUTHOR Milica Gašić]], [[Steve Young|AUTHOR Steve Young]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-14|PAPER Tue-P-4-3-14 — Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions]]</div>|<div class="cpsessionviewpapertitle">Analysis of the Relationship Between Prosodic Features of Fillers and its Forms or Occurrence Positions</div><div class="cpsessionviewpaperauthor">[[Shizuka Nakamura|AUTHOR Shizuka Nakamura]], [[Ryosuke Nakanishi|AUTHOR Ryosuke Nakanishi]], [[Katsuya Takanashi|AUTHOR Katsuya Takanashi]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-4-3-15|PAPER Tue-P-4-3-15 — Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions]]</div>|<div class="cpsessionviewpapertitle">Cross-Subject Continuous Emotion Recognition Using Speech and Body Motion in Dyadic Interactions</div><div class="cpsessionviewpaperauthor">[[Syeda Narjis Fatima|AUTHOR Syeda Narjis Fatima]], [[Engin Erzin|AUTHOR Engin Erzin]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, Poster 1|<|
|Chair: |Aoju Chen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-1|PAPER Tue-P-5-1-1 — An Automatically Aligned Corpus of Child-Directed Speech]]</div>|<div class="cpsessionviewpapertitle">An Automatically Aligned Corpus of Child-Directed Speech</div><div class="cpsessionviewpaperauthor">[[Micha Elsner|AUTHOR Micha Elsner]], [[Kiwako Ito|AUTHOR Kiwako Ito]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-2|PAPER Tue-P-5-1-2 — A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Danish Listeners’ Processing Cost in Judging the Truth Value of Norwegian, Swedish, and English Sentences</div><div class="cpsessionviewpaperauthor">[[Ocke-Schwen Bohn|AUTHOR Ocke-Schwen Bohn]], [[Trine Askjær-Jørgensen|AUTHOR Trine Askjær-Jørgensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171282.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-3|PAPER Tue-P-5-1-3 — On the Role of Temporal Variability in the Acquisition of the German Vowel Length Contrast]]</div>|<div class="cpsessionviewpapertitle">On the Role of Temporal Variability in the Acquisition of the German Vowel Length Contrast</div><div class="cpsessionviewpaperauthor">[[Felicitas Kleber|AUTHOR Felicitas Kleber]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-4|PAPER Tue-P-5-1-4 — A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions]]</div>|<div class="cpsessionviewpapertitle">A Data-Driven Approach for Perceptually Validated Acoustic Features for Children’s Sibilant Fricative Productions</div><div class="cpsessionviewpaperauthor">[[Patrick F. Reidy|AUTHOR Patrick F. Reidy]], [[Mary E. Beckman|AUTHOR Mary E. Beckman]], [[Jan Edwards|AUTHOR Jan Edwards]], [[Benjamin Munson|AUTHOR Benjamin Munson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170064.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-5|PAPER Tue-P-5-1-5 — Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference]]</div>|<div class="cpsessionviewpapertitle">Proficiency Assessment of ESL Learner’s Sentence Prosody with TTS Synthesized Voice as Reference</div><div class="cpsessionviewpaperauthor">[[Yujia Xiao|AUTHOR Yujia Xiao]], [[Frank K. Soong|AUTHOR Frank K. Soong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-6|PAPER Tue-P-5-1-6 — Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers]]</div>|<div class="cpsessionviewpapertitle">Mechanisms of Tone Sandhi Rule Application by Non-Native Speakers</div><div class="cpsessionviewpaperauthor">[[Si Chen|AUTHOR Si Chen]], [[Yunjuan He|AUTHOR Yunjuan He]], [[Chun Wah Yuen|AUTHOR Chun Wah Yuen]], [[Bei Li|AUTHOR Bei Li]], [[Yike Yang|AUTHOR Yike Yang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-7|PAPER Tue-P-5-1-7 — Changes in Early L2 Cue-Weighting of Non-Native Speech: Evidence from Learners of Mandarin Chinese]]</div>|<div class="cpsessionviewpapertitle">Changes in Early L2 Cue-Weighting of Non-Native Speech: Evidence from Learners of Mandarin Chinese</div><div class="cpsessionviewpaperauthor">[[Seth Wiener|AUTHOR Seth Wiener]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-8|PAPER Tue-P-5-1-8 — Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers]]</div>|<div class="cpsessionviewpapertitle">Directing Attention During Perceptual Training: A Preliminary Study of Phonetic Learning in Southern Min by Mandarin Speakers</div><div class="cpsessionviewpaperauthor">[[Ying Chen|AUTHOR Ying Chen]], [[Eric Pederson|AUTHOR Eric Pederson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-9|PAPER Tue-P-5-1-9 — Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification]]</div>|<div class="cpsessionviewpapertitle">Prosody Analysis of L2 English for Naturalness Evaluation Through Speech Modification</div><div class="cpsessionviewpaperauthor">[[Dean Luo|AUTHOR Dean Luo]], [[Ruxin Luo|AUTHOR Ruxin Luo]], [[Lixin Wang|AUTHOR Lixin Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-10|PAPER Tue-P-5-1-10 — Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production]]</div>|<div class="cpsessionviewpapertitle">Measuring Encoding Efficiency in Swedish and English Language Learner Speech Production</div><div class="cpsessionviewpaperauthor">[[Gintarė Grigonytė|AUTHOR Gintarė Grigonytė]], [[Gerold Schneider|AUTHOR Gerold Schneider]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-11|PAPER Tue-P-5-1-11 — Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners]]</div>|<div class="cpsessionviewpapertitle">Lexical Adaptation to a Novel Accent in German: A Comparison Between German, Swedish, and Finnish Listeners</div><div class="cpsessionviewpaperauthor">[[Adriana Hanulíková|AUTHOR Adriana Hanulíková]], [[Jenny Ekström|AUTHOR Jenny Ekström]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-12|PAPER Tue-P-5-1-12 — Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer]]</div>|<div class="cpsessionviewpapertitle">Qualitative Differences in L3 Learners’ Neurophysiological Response to L1 versus L2 Transfer</div><div class="cpsessionviewpaperauthor">[[Alejandra Keidel Fernández|AUTHOR Alejandra Keidel Fernández]], [[Thomas Hörberg|AUTHOR Thomas Hörberg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-13|PAPER Tue-P-5-1-13 — Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for]]</div>|<div class="cpsessionviewpapertitle">Articulation Rate in Swedish Child-Directed Speech Increases as a Function of the Age of the Child Even When Surprisal is Controlled for</div><div class="cpsessionviewpaperauthor">[[Johan Sjons|AUTHOR Johan Sjons]], [[Thomas Hörberg|AUTHOR Thomas Hörberg]], [[Robert Östling|AUTHOR Robert Östling]], [[Johannes Bjerva|AUTHOR Johannes Bjerva]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170714.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-14|PAPER Tue-P-5-1-14 — The Relationship Between the Perception and Production of Non-Native Tones]]</div>|<div class="cpsessionviewpapertitle">The Relationship Between the Perception and Production of Non-Native Tones</div><div class="cpsessionviewpaperauthor">[[Kaile Zhang|AUTHOR Kaile Zhang]], [[Gang Peng|AUTHOR Gang Peng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-1-15|PAPER Tue-P-5-1-15 — MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech]]</div>|<div class="cpsessionviewpapertitle">MMN Responses in Adults After Exposure to Bimodal and Unimodal Frequency Distributions of Rotated Speech</div><div class="cpsessionviewpaperauthor">[[Ellen Marklund|AUTHOR Ellen Marklund]], [[Elísabet Eir Cortes|AUTHOR Elísabet Eir Cortes]], [[Johan Sjons|AUTHOR Johan Sjons]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, Poster 2|<|
|Chair: |Timothy Bunnell|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-1|PAPER Tue-P-5-2-1 — Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali]]</div>|<div class="cpsessionviewpapertitle">Float Like a Butterfly Sting Like a Bee: Changes in Speech Preceded Parkinsonism Diagnosis for Muhammad Ali</div><div class="cpsessionviewpaperauthor">[[Visar Berisha|AUTHOR Visar Berisha]], [[Julie Liss|AUTHOR Julie Liss]], [[Timothy Huston|AUTHOR Timothy Huston]], [[Alan Wisler|AUTHOR Alan Wisler]], [[Yishan Jiao|AUTHOR Yishan Jiao]], [[Jonathan Eig|AUTHOR Jonathan Eig]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-2|PAPER Tue-P-5-2-2 — Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers]]</div>|<div class="cpsessionviewpapertitle">Cepstral and Entropy Analyses in Vowels Excerpted from Continuous Speech of Dysphonic and Control Speakers</div><div class="cpsessionviewpaperauthor">[[Antonella Castellana|AUTHOR Antonella Castellana]], [[Andreas Selamtzis|AUTHOR Andreas Selamtzis]], [[Giampiero Salvi|AUTHOR Giampiero Salvi]], [[Alessio Carullo|AUTHOR Alessio Carullo]], [[Arianna Astolfi|AUTHOR Arianna Astolfi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-3|PAPER Tue-P-5-2-3 — Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment]]</div>|<div class="cpsessionviewpapertitle">Classification of Bulbar ALS from Kinematic Features of the Jaw and Lips: Towards Computer-Mediated Assessment</div><div class="cpsessionviewpaperauthor">[[Andrea Bandini|AUTHOR Andrea Bandini]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Lorne Zinman|AUTHOR Lorne Zinman]], [[Yana Yunusova|AUTHOR Yana Yunusova]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-4|PAPER Tue-P-5-2-4 — Zero Frequency Filter Based Analysis of Voice Disorders]]</div>|<div class="cpsessionviewpapertitle">Zero Frequency Filter Based Analysis of Voice Disorders</div><div class="cpsessionviewpaperauthor">[[Nagaraj Adiga|AUTHOR Nagaraj Adiga]], [[Vikram C.M.|AUTHOR Vikram C.M.]], [[Keerthi Pullela|AUTHOR Keerthi Pullela]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-5|PAPER Tue-P-5-2-5 — Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area]]</div>|<div class="cpsessionviewpapertitle">Hypernasality Severity Analysis in Cleft Lip and Palate Speech Using Vowel Space Area</div><div class="cpsessionviewpaperauthor">[[Nikitha K.|AUTHOR Nikitha K.]], [[Sishir Kalita|AUTHOR Sishir Kalita]], [[C.M. Vikram|AUTHOR C.M. Vikram]], [[M. Pushpavathi|AUTHOR M. Pushpavathi]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171363.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-6|PAPER Tue-P-5-2-6 — Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech]]</div>|<div class="cpsessionviewpapertitle">Automatic Prediction of Speech Evaluation Metrics for Dysarthric Speech</div><div class="cpsessionviewpaperauthor">[[Imed Laaridh|AUTHOR Imed Laaridh]], [[Waad Ben Kheder|AUTHOR Waad Ben Kheder]], [[Corinne Fredouille|AUTHOR Corinne Fredouille]], [[Christine Meunier|AUTHOR Christine Meunier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170416.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-7|PAPER Tue-P-5-2-7 — Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease]]</div>|<div class="cpsessionviewpapertitle">Apkinson — A Mobile Monitoring Solution for Parkinson’s Disease</div><div class="cpsessionviewpaperauthor">[[Philipp Klumpp|AUTHOR Philipp Klumpp]], [[Thomas Janu|AUTHOR Thomas Janu]], [[Tomás Arias-Vergara|AUTHOR Tomás Arias-Vergara]], [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]], [[Juan Rafael Orozco-Arroyave|AUTHOR Juan Rafael Orozco-Arroyave]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-8|PAPER Tue-P-5-2-8 — Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy]]</div>|<div class="cpsessionviewpapertitle">Dysprosody Differentiate Between Parkinson’s Disease, Progressive Supranuclear Palsy, and Multiple System Atrophy</div><div class="cpsessionviewpaperauthor">[[Jan Hlavnička|AUTHOR Jan Hlavnička]], [[Tereza Tykalová|AUTHOR Tereza Tykalová]], [[Roman Čmejla|AUTHOR Roman Čmejla]], [[Jiří Klempíř|AUTHOR Jiří Klempíř]], [[Evžen Růžička|AUTHOR Evžen Růžička]], [[Jan Rusz|AUTHOR Jan Rusz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-9|PAPER Tue-P-5-2-9 — Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Interpretable Objective Assessment of Dysarthric Speech Based on Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Ming Tu|AUTHOR Ming Tu]], [[Visar Berisha|AUTHOR Visar Berisha]], [[Julie Liss|AUTHOR Julie Liss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-10|PAPER Tue-P-5-2-10 — Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Deep Autoencoder Based Speech Features for Improved Dysarthric Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Bhavik Vachhani|AUTHOR Bhavik Vachhani]], [[Chitralekha Bhat|AUTHOR Chitralekha Bhat]], [[Biswajit Das|AUTHOR Biswajit Das]], [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-11|PAPER Tue-P-5-2-11 — Prediction of Speech Delay from Acoustic Measurements]]</div>|<div class="cpsessionviewpapertitle">Prediction of Speech Delay from Acoustic Measurements</div><div class="cpsessionviewpaperauthor">[[Jason Lilley|AUTHOR Jason Lilley]], [[Madhavi Ratnagiri|AUTHOR Madhavi Ratnagiri]], [[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-12|PAPER Tue-P-5-2-12 — The Frequency Range of “The Ling Six Sounds” in Standard Chinese]]</div>|<div class="cpsessionviewpapertitle">The Frequency Range of “The Ling Six Sounds” in Standard Chinese</div><div class="cpsessionviewpaperauthor">[[Aijun Li|AUTHOR Aijun Li]], [[Hua Zhang|AUTHOR Hua Zhang]], [[Wen Sun|AUTHOR Wen Sun]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-2-13|PAPER Tue-P-5-2-13 — Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children]]</div>|<div class="cpsessionviewpapertitle">Production of Sustained Vowels and Categorical Perception of Tones in Mandarin Among Cochlear-Implanted Children</div><div class="cpsessionviewpaperauthor">[[Wentao Gu|AUTHOR Wentao Gu]], [[Jiao Yin|AUTHOR Jiao Yin]], [[James Mahshie|AUTHOR James Mahshie]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, Poster 3|<|
|Chair: |Tom Bäckström|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-1|PAPER Tue-P-5-3-1 — Audio Content Based Geotagging in Multimedia]]</div>|<div class="cpsessionviewpapertitle">Audio Content Based Geotagging in Multimedia</div><div class="cpsessionviewpaperauthor">[[Anurag Kumar|AUTHOR Anurag Kumar]], [[Benjamin Elizalde|AUTHOR Benjamin Elizalde]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-2|PAPER Tue-P-5-3-2 — Time Delay Histogram Based Speech Source Separation Using a Planar Array]]</div>|<div class="cpsessionviewpapertitle">Time Delay Histogram Based Speech Source Separation Using a Planar Array</div><div class="cpsessionviewpaperauthor">[[Zhaoqiong Huang|AUTHOR Zhaoqiong Huang]], [[Zhanzhong Cao|AUTHOR Zhanzhong Cao]], [[Dongwen Ying|AUTHOR Dongwen Ying]], [[Jielin Pan|AUTHOR Jielin Pan]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-3|PAPER Tue-P-5-3-3 — Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence]]</div>|<div class="cpsessionviewpapertitle">Excitation Source Features for Improving the Detection of Vowel Onset and Offset Points in a Speech Sequence</div><div class="cpsessionviewpaperauthor">[[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]], [[Avinash Kumar|AUTHOR Avinash Kumar]], [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-4|PAPER Tue-P-5-3-4 — A Contrast Function and Algorithm for Blind Separation of Audio Signals]]</div>|<div class="cpsessionviewpapertitle">A Contrast Function and Algorithm for Blind Separation of Audio Signals</div><div class="cpsessionviewpaperauthor">[[Wei Gao|AUTHOR Wei Gao]], [[Roberto Togneri|AUTHOR Roberto Togneri]], [[Victor Sreeram|AUTHOR Victor Sreeram]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-5|PAPER Tue-P-5-3-5 — Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source]]</div>|<div class="cpsessionviewpapertitle">Weighted Spatial Covariance Matrix Estimation for MUSIC Based TDOA Estimation of Speech Source</div><div class="cpsessionviewpaperauthor">[[Chenglin Xu|AUTHOR Chenglin Xu]], [[Xiong Xiao|AUTHOR Xiong Xiao]], [[Sining Sun|AUTHOR Sining Sun]], [[Wei Rao|AUTHOR Wei Rao]], [[Eng Siong Chng|AUTHOR Eng Siong Chng]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-6|PAPER Tue-P-5-3-6 — Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern]]</div>|<div class="cpsessionviewpapertitle">Speaker Direction-of-Arrival Estimation Based on Frequency-Independent Beampattern</div><div class="cpsessionviewpaperauthor">[[Feng Guo|AUTHOR Feng Guo]], [[Yuhang Cao|AUTHOR Yuhang Cao]], [[Zheng Liu|AUTHOR Zheng Liu]], [[Jiaen Liang|AUTHOR Jiaen Liang]], [[Baoqing Li|AUTHOR Baoqing Li]], [[Xiaobing Yuan|AUTHOR Xiaobing Yuan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170271.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-7|PAPER Tue-P-5-3-7 — A Mask Estimation Method Integrating Data Field Model for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">A Mask Estimation Method Integrating Data Field Model for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Xianyun Wang|AUTHOR Xianyun Wang]], [[Changchun Bao|AUTHOR Changchun Bao]], [[Feng Bao|AUTHOR Feng Bao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-8|PAPER Tue-P-5-3-8 — Improved End-of-Query Detection for Streaming Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Improved End-of-Query Detection for Streaming Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Matt Shannon|AUTHOR Matt Shannon]], [[Gabor Simko|AUTHOR Gabor Simko]], [[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Carolina Parada|AUTHOR Carolina Parada]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170593.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-9|PAPER Tue-P-5-3-9 — Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED]]</div>|<div class="cpsessionviewpapertitle">Using Approximated Auditory Roughness as a Pre-Filtering Feature for Human Screaming and Affective Speech AED</div><div class="cpsessionviewpaperauthor">[[Di He|AUTHOR Di He]], [[Zuofu Cheng|AUTHOR Zuofu Cheng]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]], [[Deming Chen|AUTHOR Deming Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-10|PAPER Tue-P-5-3-10 — Improving Source Separation via Multi-Speaker Representations]]</div>|<div class="cpsessionviewpapertitle">Improving Source Separation via Multi-Speaker Representations</div><div class="cpsessionviewpaperauthor">[[Jeroen Zegers|AUTHOR Jeroen Zegers]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170940.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-11|PAPER Tue-P-5-3-11 — Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector]]</div>|<div class="cpsessionviewpapertitle">Multiple Sound Source Counting and Localization Based on Spatial Principal Eigenvector</div><div class="cpsessionviewpaperauthor">[[Bing Yang|AUTHOR Bing Yang]], [[Hong Liu|AUTHOR Hong Liu]], [[Cheng Pang|AUTHOR Cheng Pang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170954.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-12|PAPER Tue-P-5-3-12 — Subband Selection for Binaural Speech Source Localization]]</div>|<div class="cpsessionviewpapertitle">Subband Selection for Binaural Speech Source Localization</div><div class="cpsessionviewpaperauthor">[[Girija Ramesan Karthik|AUTHOR Girija Ramesan Karthik]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-13|PAPER Tue-P-5-3-13 — Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings]]</div>|<div class="cpsessionviewpapertitle">Unmixing Convolutive Mixtures by Exploiting Amplitude Co-Modulation: Methods and Evaluation on Mandarin Speech Recordings</div><div class="cpsessionviewpaperauthor">[[Bo-Rui Chen|AUTHOR Bo-Rui Chen]], [[Huang-Yi Lee|AUTHOR Huang-Yi Lee]], [[Yi-Wen Liu|AUTHOR Yi-Wen Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-14|PAPER Tue-P-5-3-14 — Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection]]</div>|<div class="cpsessionviewpapertitle">Bimodal Recurrent Neural Network for Audiovisual Voice Activity Detection</div><div class="cpsessionviewpaperauthor">[[Fei Tao|AUTHOR Fei Tao]], [[Carlos Busso|AUTHOR Carlos Busso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171673.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-15|PAPER Tue-P-5-3-15 — Domain-Specific Utterance End-Point Detection for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Domain-Specific Utterance End-Point Detection for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Roland Maas|AUTHOR Roland Maas]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]], [[Kyle Goehner|AUTHOR Kyle Goehner]], [[Gautam Tiwari|AUTHOR Gautam Tiwari]], [[Shaun Joseph|AUTHOR Shaun Joseph]], [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171760.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-3-16|PAPER Tue-P-5-3-16 — Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments]]</div>|<div class="cpsessionviewpapertitle">Speech Detection and Enhancement Using Single Microphone for Distant Speech Applications in Reverberant Environments</div><div class="cpsessionviewpaperauthor">[[Vinay Kothapally|AUTHOR Vinay Kothapally]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, Poster 4|<|
|Chair: |Timo Gerkmann|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-1|PAPER Tue-P-5-4-1 — A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">A Post-Filtering Approach Based on Locally Linear Embedding Difference Compensation for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Hsin-Te Hwang|AUTHOR Hsin-Te Hwang]], [[Syu-Siang Wang|AUTHOR Syu-Siang Wang]], [[Chin-Cheng Hsu|AUTHOR Chin-Cheng Hsu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-2|PAPER Tue-P-5-4-2 — Multi-Target Ensemble Learning for Monaural Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Multi-Target Ensemble Learning for Monaural Speech Separation</div><div class="cpsessionviewpaperauthor">[[Hui Zhang|AUTHOR Hui Zhang]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]], [[Guanglai Gao|AUTHOR Guanglai Gao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170543.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-3|PAPER Tue-P-5-4-3 — Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search]]</div>|<div class="cpsessionviewpapertitle">Improved Example-Based Speech Enhancement by Using Deep Neural Network Acoustic Model for Noise Robust Example Search</div><div class="cpsessionviewpaperauthor">[[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-4|PAPER Tue-P-5-4-4 — Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Subjective Intelligibility of Deep Neural Network-Based Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Femke B. Gelderblom|AUTHOR Femke B. Gelderblom]], [[Tron V. Tronstad|AUTHOR Tron V. Tronstad]], [[Erlend Magnus Viggen|AUTHOR Erlend Magnus Viggen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171157.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-5|PAPER Tue-P-5-4-5 — Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility]]</div>|<div class="cpsessionviewpapertitle">Real-Time Modulation Enhancement of Temporal Envelopes for Increasing Speech Intelligibility</div><div class="cpsessionviewpaperauthor">[[Maria Koutsogiannaki|AUTHOR Maria Koutsogiannaki]], [[Holly Francois|AUTHOR Holly Francois]], [[Kihyun Choo|AUTHOR Kihyun Choo]], [[Eunmi Oh|AUTHOR Eunmi Oh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-6|PAPER Tue-P-5-4-6 — On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals]]</div>|<div class="cpsessionviewpapertitle">On the Influence of Modifying Magnitude and Phase Spectrum to Enhance Noisy Speech Signals</div><div class="cpsessionviewpaperauthor">[[Hans-Günter Hirsch|AUTHOR Hans-Günter Hirsch]], [[Michael Gref|AUTHOR Michael Gref]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-7|PAPER Tue-P-5-4-7 — MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">MixMax Approximation as a Super-Gaussian Log-Spectral Amplitude Estimator for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Robert Rehr|AUTHOR Robert Rehr]], [[Timo Gerkmann|AUTHOR Timo Gerkmann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171257.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-8|PAPER Tue-P-5-4-8 — Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Binary Mask Estimation Strategies for Constrained Imputation-Based Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Ricard Marxer|AUTHOR Ricard Marxer]], [[Jon Barker|AUTHOR Jon Barker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-9|PAPER Tue-P-5-4-9 — A Fully Convolutional Neural Network for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">A Fully Convolutional Neural Network for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Se Rim Park|AUTHOR Se Rim Park]], [[Jin Won Lee|AUTHOR Jin Won Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-10|PAPER Tue-P-5-4-10 — Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization]]</div>|<div class="cpsessionviewpapertitle">Speech Enhancement Using Non-Negative Spectrogram Models with Mel-Generalized Cepstral Regularization</div><div class="cpsessionviewpaperauthor">[[Li Li|AUTHOR Li Li]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Tomoki Toda|AUTHOR Tomoki Toda]], [[Shoji Makino|AUTHOR Shoji Makino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-11|PAPER Tue-P-5-4-11 — A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Perceptually Motivated Loss Functions for Binary Mask Estimation in Speech Separation</div><div class="cpsessionviewpaperauthor">[[Danny Websdale|AUTHOR Danny Websdale]], [[Ben Milner|AUTHOR Ben Milner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171620.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-12|PAPER Tue-P-5-4-12 — Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Conditional Generative Adversarial Networks for Speech Enhancement and Noise-Robust Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Daniel Michelsanti|AUTHOR Daniel Michelsanti]], [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-13|PAPER Tue-P-5-4-13 — Speech Enhancement Using Bayesian Wavenet]]</div>|<div class="cpsessionviewpapertitle">Speech Enhancement Using Bayesian Wavenet</div><div class="cpsessionviewpaperauthor">[[Kaizhi Qian|AUTHOR Kaizhi Qian]], [[Yang Zhang|AUTHOR Yang Zhang]], [[Shiyu Chang|AUTHOR Shiyu Chang]], [[Xuesong Yang|AUTHOR Xuesong Yang]], [[Dinei Flor^encio|AUTHOR Dinei Flor^encio]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-14|PAPER Tue-P-5-4-14 — Binaural Reverberant Speech Separation Based on Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Binaural Reverberant Speech Separation Based on Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Xueliang Zhang|AUTHOR Xueliang Zhang]], [[DeLiang Wang|AUTHOR DeLiang Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-P-5-4-15|PAPER Tue-P-5-4-15 — On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement]]</div>|<div class="cpsessionviewpapertitle">On the Quality and Intelligibility of Noisy Speech Processed for Near-End Listening Enhancement</div><div class="cpsessionviewpaperauthor">[[Tudor-Cătălin Zorilă|AUTHOR Tudor-Cătălin Zorilă]], [[Yannis Stylianou|AUTHOR Yannis Stylianou]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, 13:30–15:30, Tuesday, 22 Aug. 2017, E306|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-1|PAPER Tue-S&T-3-A-1 — Applications of the BBN Sage Speech Processing Platform]]</div>|<div class="cpsessionviewpapertitle">Applications of the BBN Sage Speech Processing Platform</div><div class="cpsessionviewpaperauthor">[[Ralf Meermeier|AUTHOR Ralf Meermeier]], [[Sean Colbath|AUTHOR Sean Colbath]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-2|PAPER Tue-S&T-3-A-2 — Bob Speaks Kaldi]]</div>|<div class="cpsessionviewpapertitle">Bob Speaks Kaldi</div><div class="cpsessionviewpaperauthor">[[Milos Cernak|AUTHOR Milos Cernak]], [[Alain Komaty|AUTHOR Alain Komaty]], [[Amir Mohammadi|AUTHOR Amir Mohammadi]], [[André Anjos|AUTHOR André Anjos]], [[Sébastien Marcel|AUTHOR Sébastien Marcel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-3|PAPER Tue-S&T-3-A-3 — Real Time Pitch Shifting with Formant Structure Preservation Using the Phase Vocoder]]</div>|<div class="cpsessionviewpapertitle">Real Time Pitch Shifting with Formant Structure Preservation Using the Phase Vocoder</div><div class="cpsessionviewpaperauthor">[[Michał Lenarczyk|AUTHOR Michał Lenarczyk]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-4|PAPER Tue-S&T-3-A-4 — A Signal Processing Approach for Speaker Separation Using SFF Analysis]]</div>|<div class="cpsessionviewpapertitle">A Signal Processing Approach for Speaker Separation Using SFF Analysis</div><div class="cpsessionviewpaperauthor">[[Nivedita Chennupati|AUTHOR Nivedita Chennupati]], [[B.H.V.S. Narayana Murthy|AUTHOR B.H.V.S. Narayana Murthy]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-5|PAPER Tue-S&T-3-A-5 — Speech Recognition and Understanding on Hardware-Accelerated DSP]]</div>|<div class="cpsessionviewpapertitle">Speech Recognition and Understanding on Hardware-Accelerated DSP</div><div class="cpsessionviewpaperauthor">[[Georg Stemmer|AUTHOR Georg Stemmer]], [[Munir Georges|AUTHOR Munir Georges]], [[Joachim Hofer|AUTHOR Joachim Hofer]], [[Piotr Rozen|AUTHOR Piotr Rozen]], [[Josef Bauer|AUTHOR Josef Bauer]], [[Jakub Nowicki|AUTHOR Jakub Nowicki]], [[Tobias Bocklet|AUTHOR Tobias Bocklet]], [[Hannah R. Colett|AUTHOR Hannah R. Colett]], [[Ohad Falik|AUTHOR Ohad Falik]], [[Michael Deisher|AUTHOR Michael Deisher]], [[Sylvia J. Downing|AUTHOR Sylvia J. Downing]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-A-6|PAPER Tue-S&T-3-A-6 — MetaLab: A Repository for Meta-Analyses on Language Development, and More]]</div>|<div class="cpsessionviewpapertitle">MetaLab: A Repository for Meta-Analyses on Language Development, and More</div><div class="cpsessionviewpaperauthor">[[Sho Tsuji|AUTHOR Sho Tsuji]], [[Christina Bergmann|AUTHOR Christina Bergmann]], [[Molly Lewis|AUTHOR Molly Lewis]], [[Mika Braginsky|AUTHOR Mika Braginsky]], [[Page Piccinini|AUTHOR Page Piccinini]], [[Michael C. Frank|AUTHOR Michael C. Frank]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00-12:00, 13:30–15:30, Tuesday, 22 Aug. 2017, E397|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172030.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-1|PAPER Tue-S&T-3-B-1 — Evolving Recurrent Neural Networks That Process and Classify Raw Audio in a Streaming Fashion]]</div>|<div class="cpsessionviewpapertitle">Evolving Recurrent Neural Networks That Process and Classify Raw Audio in a Streaming Fashion</div><div class="cpsessionviewpaperauthor">[[Adrien Daniel|AUTHOR Adrien Daniel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-2|PAPER Tue-S&T-3-B-2 — Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Combining Gaussian Mixture Models and Segmental Feature Models for Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Milana Milošević|AUTHOR Milana Milošević]], [[Ulrike Glavitsch|AUTHOR Ulrike Glavitsch]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-3|PAPER Tue-S&T-3-B-3 — “Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers]]</div>|<div class="cpsessionviewpapertitle">“Did you laugh enough today?” — Deep Neural Networks for Mobile and Wearable Laughter Trackers</div><div class="cpsessionviewpaperauthor">[[Gerhard Hagerer|AUTHOR Gerhard Hagerer]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Florian Eyben|AUTHOR Florian Eyben]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-4|PAPER Tue-S&T-3-B-4 — Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation]]</div>|<div class="cpsessionviewpapertitle">Low-Frequency Ultrasonic Communication for Speech Broadcasting in Public Transportation</div><div class="cpsessionviewpaperauthor">[[Kwang Myung Jeon|AUTHOR Kwang Myung Jeon]], [[Nam Kyun Kim|AUTHOR Nam Kyun Kim]], [[Chan Woong Kwak|AUTHOR Chan Woong Kwak]], [[Jung Min Moon|AUTHOR Jung Min Moon]], [[Hong Kook Kim|AUTHOR Hong Kook Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-5|PAPER Tue-S&T-3-B-5 — Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson]]</div>|<div class="cpsessionviewpapertitle">Real-Time Speech Enhancement with GCC-NMF: Demonstration on the Raspberry Pi and NVIDIA Jetson</div><div class="cpsessionviewpaperauthor">[[Sean U.N. Wood|AUTHOR Sean U.N. Wood]], [[Jean Rouat|AUTHOR Jean Rouat]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-S&T-3-B-6|PAPER Tue-S&T-3-B-6 — Reading Validation for Pronunciation Evaluation in the Digitala Project]]</div>|<div class="cpsessionviewpapertitle">Reading Validation for Pronunciation Evaluation in the Digitala Project</div><div class="cpsessionviewpaperauthor">[[Aku Rouhe|AUTHOR Aku Rouhe]], [[Reima Karhila|AUTHOR Reima Karhila]], [[Peter Smit|AUTHOR Peter Smit]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Tuesday, 22 Aug. 2017, F11|<|
|Chair: |Gérard Bailly, Gabriel Skantze|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-8|PAPER Tue-SS-3-11-8 — Introduction]]</div>|<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-1|PAPER Tue-SS-3-11-1 — Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect]]</div>|<div class="cpsessionviewpapertitle">Elicitation Design for Acoustic Depression Classification: An Investigation of Articulation Effort, Linguistic Complexity, and Word Affect</div><div class="cpsessionviewpaperauthor">[[Brian Stasak|AUTHOR Brian Stasak]], [[Julien Epps|AUTHOR Julien Epps]], [[Roland Goecke|AUTHOR Roland Goecke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-2|PAPER Tue-SS-3-11-2 — Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction]]</div>|<div class="cpsessionviewpapertitle">Robustness Over Time-Varying Channels in DNN-HMM ASR Based Human-Robot Interaction</div><div class="cpsessionviewpaperauthor">[[José Novoa|AUTHOR José Novoa]], [[Jorge Wuth|AUTHOR Jorge Wuth]], [[Juan Pablo Escudero|AUTHOR Juan Pablo Escudero]], [[Josué Fredes|AUTHOR Josué Fredes]], [[Rodrigo Mahu|AUTHOR Rodrigo Mahu]], [[Richard M. Stern|AUTHOR Richard M. Stern]], [[Nestor Becerra Yoma|AUTHOR Nestor Becerra Yoma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-3|PAPER Tue-SS-3-11-3 — Analysis of Engagement and User Experience with a Laughter Responsive Social Robot]]</div>|<div class="cpsessionviewpapertitle">Analysis of Engagement and User Experience with a Laughter Responsive Social Robot</div><div class="cpsessionviewpaperauthor">[[Bekir Berker Türker|AUTHOR Bekir Berker Türker]], [[Zana Buçinca|AUTHOR Zana Buçinca]], [[Engin Erzin|AUTHOR Engin Erzin]], [[Yücel Yemez|AUTHOR Yücel Yemez]], [[Metin Sezgin|AUTHOR Metin Sezgin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-4|PAPER Tue-SS-3-11-4 — Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results]]</div>|<div class="cpsessionviewpapertitle">Automatic Classification of Autistic Child Vocalisations: A Novel Database and Results</div><div class="cpsessionviewpaperauthor">[[Alice Baird|AUTHOR Alice Baird]], [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Alyssa M. Alcorn|AUTHOR Alyssa M. Alcorn]], [[Anton Batliner|AUTHOR Anton Batliner]], [[Sergey Pugachevskiy|AUTHOR Sergey Pugachevskiy]], [[Michael Freitag|AUTHOR Michael Freitag]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170926.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-5|PAPER Tue-SS-3-11-5 — Crowd-Sourced Design of Artificial Attentive Listeners]]</div>|<div class="cpsessionviewpapertitle">Crowd-Sourced Design of Artificial Attentive Listeners</div><div class="cpsessionviewpaperauthor">[[Catharine Oertel|AUTHOR Catharine Oertel]], [[Patrik Jonell|AUTHOR Patrik Jonell]], [[Dimosthenis Kontogiorgos|AUTHOR Dimosthenis Kontogiorgos]], [[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Jonas Beskow|AUTHOR Jonas Beskow]], [[Joakim Gustafson|AUTHOR Joakim Gustafson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-6|PAPER Tue-SS-3-11-6 — Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions]]</div>|<div class="cpsessionviewpapertitle">Studying the Link Between Inter-Speaker Coordination and Speech Imitation Through Human-Machine Interactions</div><div class="cpsessionviewpaperauthor">[[Leonardo Lancia|AUTHOR Leonardo Lancia]], [[Thierry Chaminade|AUTHOR Thierry Chaminade]], [[Noël Nguyen|AUTHOR Noël Nguyen]], [[Laurent Prévot|AUTHOR Laurent Prévot]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-SS-3-11-7|PAPER Tue-SS-3-11-7 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday, 22 Aug. 2017, F11|<|
|Chair: |Timo Baumann, Ingmar Steiner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-7|PAPER Tue-SS-4-11-7 — Introduction]]</div>|<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170396.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-1|PAPER Tue-SS-4-11-1 — Adjusting the Frame: Biphasic Performative Control of Speech Rhythm]]</div>|<div class="cpsessionviewpapertitle">Adjusting the Frame: Biphasic Performative Control of Speech Rhythm</div><div class="cpsessionviewpaperauthor">[[Samuel Delalez|AUTHOR Samuel Delalez]], [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-2|PAPER Tue-SS-4-11-2 — Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing]]</div>|<div class="cpsessionviewpapertitle">Attentional Factors in Listeners’ Uptake of Gesture Cues During Speech Processing</div><div class="cpsessionviewpaperauthor">[[Raheleh Saryazdi|AUTHOR Raheleh Saryazdi]], [[Craig G. Chambers|AUTHOR Craig G. Chambers]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-3|PAPER Tue-SS-4-11-3 — Motion Analysis in Vocalized Surprise Expressions]]</div>|<div class="cpsessionviewpapertitle">Motion Analysis in Vocalized Surprise Expressions</div><div class="cpsessionviewpaperauthor">[[Carlos Ishi|AUTHOR Carlos Ishi]], [[Takashi Minato|AUTHOR Takashi Minato]], [[Hiroshi Ishiguro|AUTHOR Hiroshi Ishiguro]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171606.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-4|PAPER Tue-SS-4-11-4 — Enhancing Backchannel Prediction Using Word Embeddings]]</div>|<div class="cpsessionviewpapertitle">Enhancing Backchannel Prediction Using Word Embeddings</div><div class="cpsessionviewpaperauthor">[[Robin Ruede|AUTHOR Robin Ruede]], [[Markus Müller|AUTHOR Markus Müller]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-5|PAPER Tue-SS-4-11-5 — A Computational Model for Phonetically Responsive Spoken Dialogue Systems]]</div>|<div class="cpsessionviewpapertitle">A Computational Model for Phonetically Responsive Spoken Dialogue Systems</div><div class="cpsessionviewpaperauthor">[[Eran Raveh|AUTHOR Eran Raveh]], [[Ingmar Steiner|AUTHOR Ingmar Steiner]], [[Bernd Möbius|AUTHOR Bernd Möbius]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-6|PAPER Tue-SS-4-11-6 — Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification]]</div>|<div class="cpsessionviewpapertitle">Incremental Dialogue Act Recognition: Token- vs Chunk-Based Classification</div><div class="cpsessionviewpaperauthor">[[Eustace Ebhotemhen|AUTHOR Eustace Ebhotemhen]], [[Volha Petukhova|AUTHOR Volha Petukhova]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-SS-4-11-8|PAPER Tue-SS-4-11-8 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Tuesday, 22 Aug. 2017, F11|<|
|Chair: |Stefanie Jannedy, Melanie Weirich|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-10|PAPER Tue-SS-5-11-10 — Introduction]]</div>|<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-1|PAPER Tue-SS-5-11-1 — Clear Speech — Mere Speech? How Segmental and Prosodic Speech Reduction Shape the Impression That Speakers Create on Listeners]]</div>|<div class="cpsessionviewpapertitle">Clear Speech — Mere Speech? How Segmental and Prosodic Speech Reduction Shape the Impression That Speakers Create on Listeners</div><div class="cpsessionviewpaperauthor">[[Oliver Niebuhr|AUTHOR Oliver Niebuhr]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170293.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-2|PAPER Tue-SS-5-11-2 — Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates]]</div>|<div class="cpsessionviewpapertitle">Relationships Between Speech Timing and Perceived Hostility in a French Corpus of Political Debates</div><div class="cpsessionviewpaperauthor">[[Charlotte Kouklia|AUTHOR Charlotte Kouklia]], [[Nicolas Audibert|AUTHOR Nicolas Audibert]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-3|PAPER Tue-SS-5-11-3 — Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution]]</div>|<div class="cpsessionviewpapertitle">Towards Speaker Characterization: Identifying and Predicting Dimensions of Person Attribution</div><div class="cpsessionviewpaperauthor">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]], [[Benjamin Weiss|AUTHOR Benjamin Weiss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-4|PAPER Tue-SS-5-11-4 — Prosodic Analysis of Attention-Drawing Speech]]</div>|<div class="cpsessionviewpapertitle">Prosodic Analysis of Attention-Drawing Speech</div><div class="cpsessionviewpaperauthor">[[Carlos Ishi|AUTHOR Carlos Ishi]], [[Jun Arai|AUTHOR Jun Arai]], [[Norihiro Hagita|AUTHOR Norihiro Hagita]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-5|PAPER Tue-SS-5-11-5 — Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice]]</div>|<div class="cpsessionviewpapertitle">Perceptual and Acoustic CorreLates of Gender in the Prepubertal Voice</div><div class="cpsessionviewpaperauthor">[[Adrian P. Simpson|AUTHOR Adrian P. Simpson]], [[Riccarda Funk|AUTHOR Riccarda Funk]], [[Frederik Palmer|AUTHOR Frederik Palmer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-6|PAPER Tue-SS-5-11-6 — To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation]]</div>|<div class="cpsessionviewpapertitle">To See or not to See: Interlocutor Visibility and Likeability Influence Convergence in Intonation</div><div class="cpsessionviewpaperauthor">[[Katrin Schweitzer|AUTHOR Katrin Schweitzer]], [[Michael Walsh|AUTHOR Michael Walsh]], [[Antje Schweitzer|AUTHOR Antje Schweitzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-7|PAPER Tue-SS-5-11-7 — Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents]]</div>|<div class="cpsessionviewpapertitle">Acoustic Correlates of Parental Role and Gender Identity in the Speech of Expecting Parents</div><div class="cpsessionviewpaperauthor">[[Melanie Weirich|AUTHOR Melanie Weirich]], [[Adrian P. Simpson|AUTHOR Adrian P. Simpson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-8|PAPER Tue-SS-5-11-8 — A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains]]</div>|<div class="cpsessionviewpapertitle">A Semi-Supervised Learning Approach for Acoustic-Prosodic Personality Perception in Under-Resourced Domains</div><div class="cpsessionviewpaperauthor">[[Rubén Solera-Ureña|AUTHOR Rubén Solera-Ureña]], [[Helena Moniz|AUTHOR Helena Moniz]], [[Fernando Batista|AUTHOR Fernando Batista]], [[Vera Cabarrão|AUTHOR Vera Cabarrão]], [[Anna Pompili|AUTHOR Anna Pompili]], [[Ramon Fernandez Astudillo|AUTHOR Ramon Fernandez Astudillo]], [[Joana Campos|AUTHOR Joana Campos]], [[Ana Paiva|AUTHOR Ana Paiva]], [[Isabel Trancoso|AUTHOR Isabel Trancoso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-SS-5-11-9|PAPER Tue-SS-5-11-9 — Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions]]</div>|<div class="cpsessionviewpapertitle">Effects of Talker Dialect, Gender & Race on Accuracy of Bing Speech and YouTube Automatic Captions</div><div class="cpsessionviewpaperauthor">[[Rachael Tatman|AUTHOR Rachael Tatman]], [[Conner Kasten|AUTHOR Conner Kasten]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|08:30–09:30, Wednesday, 23 Aug. 2017, Aula Magna|<|
|Chair: |Björn Granström|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS173003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-K3-1|PAPER Wed-K3-1 — Conversing with Social Agents That Smile and Laugh]]</div>|<div class="cpsessionviewpapertitle">Conversing with Social Agents That Smile and Laugh</div><div class="cpsessionviewpaperauthor">[[Catherine Pelachaud|AUTHOR Catherine Pelachaud]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, Aula Magna|<|
|Chair: |Felicitas Kleber, Elizabeth Godoy|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-1|PAPER Wed-O-6-1-1 — Aerodynamic Features of French Fricatives]]</div>|<div class="cpsessionviewpapertitle">Aerodynamic Features of French Fricatives</div><div class="cpsessionviewpaperauthor">[[Rosario Signorello|AUTHOR Rosario Signorello]], [[Sergio Hassid|AUTHOR Sergio Hassid]], [[Didier Demolin|AUTHOR Didier Demolin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-2|PAPER Wed-O-6-1-2 — Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French]]</div>|<div class="cpsessionviewpapertitle">Inter-Speaker Variability: Speaker Normalisation and Quantitative Estimation of Articulatory Invariants in Speech Production for French</div><div class="cpsessionviewpaperauthor">[[Antoine Serrurier|AUTHOR Antoine Serrurier]], [[Pierre Badin|AUTHOR Pierre Badin]], [[Louis-Jean Boë|AUTHOR Louis-Jean Boë]], [[Laurent Lamalle|AUTHOR Laurent Lamalle]], [[Christiane Neuschaefer-Rube|AUTHOR Christiane Neuschaefer-Rube]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-3|PAPER Wed-O-6-1-3 — Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging]]</div>|<div class="cpsessionviewpapertitle">Comparison of Basic Beatboxing Articulations Between Expert and Novice Artists Using Real-Time Magnetic Resonance Imaging</div><div class="cpsessionviewpaperauthor">[[Nimisha Patil|AUTHOR Nimisha Patil]], [[Timothy Greer|AUTHOR Timothy Greer]], [[Reed Blaylock|AUTHOR Reed Blaylock]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-4|PAPER Wed-O-6-1-4 — Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI]]</div>|<div class="cpsessionviewpapertitle">Speaker-Specific Biomechanical Model-Based Investigation of a Simple Speech Task Based on Tagged-MRI</div><div class="cpsessionviewpaperauthor">[[Keyi Tang|AUTHOR Keyi Tang]], [[Negar M. Harandi|AUTHOR Negar M. Harandi]], [[Jonghye Woo|AUTHOR Jonghye Woo]], [[Georges El Fakhri|AUTHOR Georges El Fakhri]], [[Maureen Stone|AUTHOR Maureen Stone]], [[Sidney Fels|AUTHOR Sidney Fels]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-5|PAPER Wed-O-6-1-5 — Sounds of the Human Vocal Tract]]</div>|<div class="cpsessionviewpapertitle">Sounds of the Human Vocal Tract</div><div class="cpsessionviewpaperauthor">[[Reed Blaylock|AUTHOR Reed Blaylock]], [[Nimisha Patil|AUTHOR Nimisha Patil]], [[Timothy Greer|AUTHOR Timothy Greer]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171675.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-1-6|PAPER Wed-O-6-1-6 — A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants]]</div>|<div class="cpsessionviewpapertitle">A Simulation Study on the Effect of Glottal Boundary Conditions on Vocal Tract Formants</div><div class="cpsessionviewpaperauthor">[[Yasufumi Uezu|AUTHOR Yasufumi Uezu]], [[Tokihiko Kaburagi|AUTHOR Tokihiko Kaburagi]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, E10|<|
|Chair: |Catherine Breslin, George Saon|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-1|PAPER Wed-O-6-10-1 — Large-Scale Domain Adaptation via Teacher-Student Learning]]</div>|<div class="cpsessionviewpapertitle">Large-Scale Domain Adaptation via Teacher-Student Learning</div><div class="cpsessionviewpaperauthor">[[Jinyu Li|AUTHOR Jinyu Li]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]], [[Xi Wang|AUTHOR Xi Wang]], [[Rui Zhao|AUTHOR Rui Zhao]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170302.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-2|PAPER Wed-O-6-10-2 — Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion]]</div>|<div class="cpsessionviewpapertitle">Improving Children’s Speech Recognition Through Explicit Pitch Scaling Based on Iterative Spectrogram Inversion</div><div class="cpsessionviewpaperauthor">[[W. Ahmad|AUTHOR W. Ahmad]], [[S. Shahnawazuddin|AUTHOR S. Shahnawazuddin]], [[H.K. Kathania|AUTHOR H.K. Kathania]], [[Gayadhar Pradhan|AUTHOR Gayadhar Pradhan]], [[A.B. Samaddar|AUTHOR A.B. Samaddar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170368.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-3|PAPER Wed-O-6-10-3 — RNN-LDA Clustering for Feature Based DNN Adaptation]]</div>|<div class="cpsessionviewpapertitle">RNN-LDA Clustering for Feature Based DNN Adaptation</div><div class="cpsessionviewpaperauthor">[[Xurong Xie|AUTHOR Xurong Xie]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Tan Lee|AUTHOR Tan Lee]], [[Lan Wang|AUTHOR Lan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171342.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-4|PAPER Wed-O-6-10-4 — Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants]]</div>|<div class="cpsessionviewpapertitle">Robust Online i-Vectors for Unsupervised Adaptation of DNN Acoustic Models: A Study in the Context of Digital Voice Assistants</div><div class="cpsessionviewpaperauthor">[[Harish Arsikere|AUTHOR Harish Arsikere]], [[Sri Garimella|AUTHOR Sri Garimella]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-5|PAPER Wed-O-6-10-5 — Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervised Learning with Semantic Knowledge Extraction for Improved Speech Recognition in Air Traffic Control</div><div class="cpsessionviewpaperauthor">[[Ajay Srinivasamurthy|AUTHOR Ajay Srinivasamurthy]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Ivan Himawan|AUTHOR Ivan Himawan]], [[György Szaszák|AUTHOR György Szaszák]], [[Youssef Oualil|AUTHOR Youssef Oualil]], [[Hartmut Helmke|AUTHOR Hartmut Helmke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-10-6|PAPER Wed-O-6-10-6 — Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Dynamic Layer Normalization for Adaptive Neural Acoustic Modeling in Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Taesup Kim|AUTHOR Taesup Kim]], [[Inchul Song|AUTHOR Inchul Song]], [[Yoshua Bengio|AUTHOR Yoshua Bengio]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, B4|<|
|Chair: |Abeer Alwan, Franz Pernkopf|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-1|PAPER Wed-O-6-4-1 — A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction]]</div>|<div class="cpsessionviewpapertitle">A Robust and Alternative Approach to Zero Frequency Filtering Method for Epoch Extraction</div><div class="cpsessionviewpaperauthor">[[P. Gangamohan|AUTHOR P. Gangamohan]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-2|PAPER Wed-O-6-4-2 — Improving YANGsaf F0 Estimator with Adaptive Kalman Filter]]</div>|<div class="cpsessionviewpapertitle">Improving YANGsaf F0 Estimator with Adaptive Kalman Filter</div><div class="cpsessionviewpaperauthor">[[Kanru Hua|AUTHOR Kanru Hua]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-3|PAPER Wed-O-6-4-3 — A Spectro-Temporal Demodulation Technique for Pitch Estimation]]</div>|<div class="cpsessionviewpapertitle">A Spectro-Temporal Demodulation Technique for Pitch Estimation</div><div class="cpsessionviewpaperauthor">[[Jitendra Kumar Dhiman|AUTHOR Jitendra Kumar Dhiman]], [[Nagaraj Adiga|AUTHOR Nagaraj Adiga]], [[Chandra Sekhar Seelamantula|AUTHOR Chandra Sekhar Seelamantula]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-4|PAPER Wed-O-6-4-4 — Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal]]</div>|<div class="cpsessionviewpapertitle">Robust Method for Estimating F,,0,, of Complex Tone Based on Pitch Perception of Amplitude Modulated Signal</div><div class="cpsessionviewpaperauthor">[[Kenichiro Miwa|AUTHOR Kenichiro Miwa]], [[Masashi Unoki|AUTHOR Masashi Unoki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171254.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-5|PAPER Wed-O-6-4-5 — Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra]]</div>|<div class="cpsessionviewpapertitle">Low-Complexity Pitch Estimation Based on Phase Differences Between Low-Resolution Spectra</div><div class="cpsessionviewpaperauthor">[[Simon Graf|AUTHOR Simon Graf]], [[Tobias Herbig|AUTHOR Tobias Herbig]], [[Markus Buck|AUTHOR Markus Buck]], [[Gerhard Schmidt|AUTHOR Gerhard Schmidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170068.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-4-6|PAPER Wed-O-6-4-6 — Harvest: A High-Performance Fundamental Frequency Estimator from Speech Signals]]</div>|<div class="cpsessionviewpapertitle">Harvest: A High-Performance Fundamental Frequency Estimator from Speech Signals</div><div class="cpsessionviewpaperauthor">[[Masanori Morise|AUTHOR Masanori Morise]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, C6|<|
|Chair: |Julia Hirschberg, Rolf Carlson|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-1|PAPER Wed-O-6-6-1 — Prosodic Event Recognition Using Convolutional Neural Networks with Context Information]]</div>|<div class="cpsessionviewpapertitle">Prosodic Event Recognition Using Convolutional Neural Networks with Context Information</div><div class="cpsessionviewpaperauthor">[[Sabrina Stehwien|AUTHOR Sabrina Stehwien]], [[Ngoc Thang Vu|AUTHOR Ngoc Thang Vu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170453.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-2|PAPER Wed-O-6-6-2 — Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements]]</div>|<div class="cpsessionviewpapertitle">Prosodic Facilitation and Interference While Judging on the Veracity of Synthesized Statements</div><div class="cpsessionviewpaperauthor">[[Ramiro H. Gálvez|AUTHOR Ramiro H. Gálvez]], [[Štefan Beňuš|AUTHOR Štefan Beňuš]], [[Agustín Gravano|AUTHOR Agustín Gravano]], [[Marian Trnka|AUTHOR Marian Trnka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-3|PAPER Wed-O-6-6-3 — An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German]]</div>|<div class="cpsessionviewpapertitle">An Investigation of Pitch Matching Across Adjacent Turns in a Corpus of Spontaneous German</div><div class="cpsessionviewpaperauthor">[[Margaret Zellers|AUTHOR Margaret Zellers]], [[Antje Schweitzer|AUTHOR Antje Schweitzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170795.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-4|PAPER Wed-O-6-6-4 — The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction]]</div>|<div class="cpsessionviewpapertitle">The Relationship Between F0 Synchrony and Speech Convergence in Dyadic Interaction</div><div class="cpsessionviewpaperauthor">[[Sankar Mukherjee|AUTHOR Sankar Mukherjee]], [[Alessandro D’Ausilio|AUTHOR Alessandro D’Ausilio]], [[Noël Nguyen|AUTHOR Noël Nguyen]], [[Luciano Fadiga|AUTHOR Luciano Fadiga]], [[Leonardo Badino|AUTHOR Leonardo Badino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170424.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-5|PAPER Wed-O-6-6-5 — The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls]]</div>|<div class="cpsessionviewpapertitle">The Role of Linguistic and Prosodic Cues on the Prediction of Self-Reported Satisfaction in Contact Centre Phone Calls</div><div class="cpsessionviewpaperauthor">[[Jordi Luque|AUTHOR Jordi Luque]], [[Carlos Segura|AUTHOR Carlos Segura]], [[Ariadna Sánchez|AUTHOR Ariadna Sánchez]], [[Martí Umbert|AUTHOR Martí Umbert]], [[Luis Angel Galindo|AUTHOR Luis Angel Galindo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-6-6|PAPER Wed-O-6-6-6 — Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish]]</div>|<div class="cpsessionviewpapertitle">Cross-Linguistic Study of the Production of Turn-Taking Cues in American English and Argentine Spanish</div><div class="cpsessionviewpaperauthor">[[Pablo Brusco|AUTHOR Pablo Brusco]], [[Juan Manuel Pérez|AUTHOR Juan Manuel Pérez]], [[Agustín Gravano|AUTHOR Agustín Gravano]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, D8|<|
|Chair: |Khiet Truong, Nigel Ward|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-1|PAPER Wed-O-6-8-1 — Emotional Features for Speech Overlaps Classification]]</div>|<div class="cpsessionviewpapertitle">Emotional Features for Speech Overlaps Classification</div><div class="cpsessionviewpaperauthor">[[Olga Egorow|AUTHOR Olga Egorow]], [[Andreas Wendemuth|AUTHOR Andreas Wendemuth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-2|PAPER Wed-O-6-8-2 — Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder]]</div>|<div class="cpsessionviewpapertitle">Computing Multimodal Dyadic Behaviors During Spontaneous Diagnosis Interviews Toward Automatic Categorization of Autism Spectrum Disorder</div><div class="cpsessionviewpaperauthor">[[Chin-Po Chen|AUTHOR Chin-Po Chen]], [[Xian-Hong Tseng|AUTHOR Xian-Hong Tseng]], [[Susan Shur-Fen Gau|AUTHOR Susan Shur-Fen Gau]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-3|PAPER Wed-O-6-8-3 — Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features]]</div>|<div class="cpsessionviewpapertitle">Deriving Dyad-Level Interaction Representation Using Interlocutors Structural and Expressive Multimodal Behavior Features</div><div class="cpsessionviewpaperauthor">[[Yun-Shao Lin|AUTHOR Yun-Shao Lin]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-4|PAPER Wed-O-6-8-4 — Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective]]</div>|<div class="cpsessionviewpapertitle">Spotting Social Signals in Conversational Speech over IP: A Deep Learning Perspective</div><div class="cpsessionviewpaperauthor">[[Raymond Brueckner|AUTHOR Raymond Brueckner]], [[Maximilian Schmitt|AUTHOR Maximilian Schmitt]], [[Maja Pantic|AUTHOR Maja Pantic]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-5|PAPER Wed-O-6-8-5 — Optimized Time Series Filters for Detecting Laughter and Filler Events]]</div>|<div class="cpsessionviewpapertitle">Optimized Time Series Filters for Detecting Laughter and Filler Events</div><div class="cpsessionviewpaperauthor">[[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-6-8-6|PAPER Wed-O-6-8-6 — Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks]]</div>|<div class="cpsessionviewpapertitle">Visual, Laughter, Applause and Spoken Expression Features for Predicting Engagement Within TED Talks</div><div class="cpsessionviewpaperauthor">[[Fasih Haider|AUTHOR Fasih Haider]], [[Fahim A. Salim|AUTHOR Fahim A. Salim]], [[Saturnino Luz|AUTHOR Saturnino Luz]], [[Carl Vogel|AUTHOR Carl Vogel]], [[Owen Conlan|AUTHOR Owen Conlan]], [[Nick Campbell|AUTHOR Nick Campbell]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, Aula Magna|<|
|Chair: |Odette Scharenborg, Tanja Schultz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-1|PAPER Wed-O-7-1-1 — An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech]]</div>|<div class="cpsessionviewpapertitle">An Entrained Rhythm’s Frequency, Not Phase, Influences Temporal Sampling of Speech</div><div class="cpsessionviewpaperauthor">[[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]], [[Anne Kösem|AUTHOR Anne Kösem]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-2|PAPER Wed-O-7-1-2 — Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials]]</div>|<div class="cpsessionviewpapertitle">Context Regularity Indexed by Auditory N1 and P2 Event-Related Potentials</div><div class="cpsessionviewpaperauthor">[[Xiao Wang|AUTHOR Xiao Wang]], [[Yanhui Zhang|AUTHOR Yanhui Zhang]], [[Gang Peng|AUTHOR Gang Peng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-3|PAPER Wed-O-7-1-3 — Discovering Language in Marmoset Vocalization]]</div>|<div class="cpsessionviewpapertitle">Discovering Language in Marmoset Vocalization</div><div class="cpsessionviewpaperauthor">[[Sakshi Verma|AUTHOR Sakshi Verma]], [[K.L. Prateek|AUTHOR K.L. Prateek]], [[Karthik Pandia|AUTHOR Karthik Pandia]], [[Nauman Dawalatabad|AUTHOR Nauman Dawalatabad]], [[Rogier Landman|AUTHOR Rogier Landman]], [[Jitendra Sharma|AUTHOR Jitendra Sharma]], [[Mriganka Sur|AUTHOR Mriganka Sur]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170854.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-4|PAPER Wed-O-7-1-4 — Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception]]</div>|<div class="cpsessionviewpapertitle">Subject-Independent Classification of Japanese Spoken Sentences by Multiple Frequency Bands Phase Pattern of EEG Response During Speech Perception</div><div class="cpsessionviewpaperauthor">[[Hiroki Watanabe|AUTHOR Hiroki Watanabe]], [[Hiroki Tanaka|AUTHOR Hiroki Tanaka]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170934.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-5|PAPER Wed-O-7-1-5 — The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study]]</div>|<div class="cpsessionviewpapertitle">The Phonological Status of the French Initial Accent and its Role in Semantic Processing: An Event-Related Potentials Study</div><div class="cpsessionviewpaperauthor">[[Noémie te Rietmolen|AUTHOR Noémie te Rietmolen]], [[Radouane El Yagoubi|AUTHOR Radouane El Yagoubi]], [[Alain Ghio|AUTHOR Alain Ghio]], [[Corine Astésano|AUTHOR Corine Astésano]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-1-6|PAPER Wed-O-7-1-6 — A Neuro-Experimental Evidence for the Motor Theory of Speech Perception]]</div>|<div class="cpsessionviewpapertitle">A Neuro-Experimental Evidence for the Motor Theory of Speech Perception</div><div class="cpsessionviewpaperauthor">[[Bin Zhao|AUTHOR Bin Zhao]], [[Jianwu Dang|AUTHOR Jianwu Dang]], [[Gaoyan Zhang|AUTHOR Gaoyan Zhang]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, E10|<|
|Chair: |Yao Qian, Vidhyasaharan Sethu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171334.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-1|PAPER Wed-O-7-10-1 — Spoken Language Identification Using LSTM-Based Angular Proximity]]</div>|<div class="cpsessionviewpapertitle">Spoken Language Identification Using LSTM-Based Angular Proximity</div><div class="cpsessionviewpaperauthor">[[G. Gelly|AUTHOR G. Gelly]], [[J.L. Gauvain|AUTHOR J.L. Gauvain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-2|PAPER Wed-O-7-10-2 — End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling]]</div>|<div class="cpsessionviewpapertitle">End-to-End Language Identification Using High-Order Utterance Representation with Bilinear Pooling</div><div class="cpsessionviewpaperauthor">[[Ma Jin|AUTHOR Ma Jin]], [[Yan Song|AUTHOR Yan Song]], [[Ian McLoughlin|AUTHOR Ian McLoughlin]], [[Wu Guo|AUTHOR Wu Guo]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170576.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-3|PAPER Wed-O-7-10-3 — Dialect Recognition Based on Unsupervised Bottleneck Features]]</div>|<div class="cpsessionviewpapertitle">Dialect Recognition Based on Unsupervised Bottleneck Features</div><div class="cpsessionviewpaperauthor">[[Qian Zhang|AUTHOR Qian Zhang]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170596.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-4|PAPER Wed-O-7-10-4 — Investigating Scalability in Hierarchical Language Identification System]]</div>|<div class="cpsessionviewpapertitle">Investigating Scalability in Hierarchical Language Identification System</div><div class="cpsessionviewpaperauthor">[[Saad Irtza|AUTHOR Saad Irtza]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-5|PAPER Wed-O-7-10-5 — Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech]]</div>|<div class="cpsessionviewpapertitle">Improving Sub-Phone Modeling for Better Native Language Identification with Non-Native English Speech</div><div class="cpsessionviewpaperauthor">[[Yao Qian|AUTHOR Yao Qian]], [[Keelan Evanini|AUTHOR Keelan Evanini]], [[Xinhao Wang|AUTHOR Xinhao Wang]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]], [[Robert A. Pugh|AUTHOR Robert A. Pugh]], [[Patrick L. Lange|AUTHOR Patrick L. Lange]], [[Hillary R. Molloy|AUTHOR Hillary R. Molloy]], [[Frank K. Soong|AUTHOR Frank K. Soong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-10-6|PAPER Wed-O-7-10-6 — QMDIS: QCRI-MIT Advanced Dialect Identification System]]</div>|<div class="cpsessionviewpapertitle">QMDIS: QCRI-MIT Advanced Dialect Identification System</div><div class="cpsessionviewpaperauthor">[[Sameer Khurana|AUTHOR Sameer Khurana]], [[Maryam Najafian|AUTHOR Maryam Najafian]], [[Ahmed Ali|AUTHOR Ahmed Ali]], [[Tuka Al Hanai|AUTHOR Tuka Al Hanai]], [[Yonatan Belinkov|AUTHOR Yonatan Belinkov]], [[James Glass|AUTHOR James Glass]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, A2|<|
|Chair: |Yifan Gong, Izhak Shafran|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170901.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-1|PAPER Wed-O-7-2-1 — Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR]]</div>|<div class="cpsessionviewpapertitle">Speech Representation Learning Using Unsupervised Data-Driven Modulation Filtering for Robust ASR</div><div class="cpsessionviewpaperauthor">[[Purvi Agrawal|AUTHOR Purvi Agrawal]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170642.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-2|PAPER Wed-O-7-2-2 — Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Combined Multi-Channel NMF-Based Robust Beamforming for Noisy Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Masato Mimura|AUTHOR Masato Mimura]], [[Yoshiaki Bando|AUTHOR Yoshiaki Bando]], [[Kazuki Shimada|AUTHOR Kazuki Shimada]], [[Shinsuke Sakai|AUTHOR Shinsuke Sakai]], [[Kazuyoshi Yoshii|AUTHOR Kazuyoshi Yoshii]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-3|PAPER Wed-O-7-2-3 — Recognizing Multi-Talker Speech with Permutation Invariant Training]]</div>|<div class="cpsessionviewpapertitle">Recognizing Multi-Talker Speech with Permutation Invariant Training</div><div class="cpsessionviewpaperauthor">[[Dong Yu|AUTHOR Dong Yu]], [[Xuankai Chang|AUTHOR Xuankai Chang]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170061.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-4|PAPER Wed-O-7-2-4 — Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information]]</div>|<div class="cpsessionviewpapertitle">Coupled Initialization of Multi-Channel Non-Negative Matrix Factorization Based on Spatial and Spectral Information</div><div class="cpsessionviewpaperauthor">[[Yuuki Tachioka|AUTHOR Yuuki Tachioka]], [[Tomohiro Narita|AUTHOR Tomohiro Narita]], [[Iori Miura|AUTHOR Iori Miura]], [[Takanobu Uramoto|AUTHOR Takanobu Uramoto]], [[Natsuki Monta|AUTHOR Natsuki Monta]], [[Shingo Uenohara|AUTHOR Shingo Uenohara]], [[Ken’ichi Furuya|AUTHOR Ken’ichi Furuya]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-5|PAPER Wed-O-7-2-5 — Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR]]</div>|<div class="cpsessionviewpapertitle">Channel Compensation in the Generalised Vector Taylor Series Approach to Robust ASR</div><div class="cpsessionviewpaperauthor">[[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Jon Barker|AUTHOR Jon Barker]], [[Thomas Hain|AUTHOR Thomas Hain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-2-6|PAPER Wed-O-7-2-6 — Robust Speech Recognition via Anchor Word Representations]]</div>|<div class="cpsessionviewpapertitle">Robust Speech Recognition via Anchor Word Representations</div><div class="cpsessionviewpaperauthor">[[Brian King|AUTHOR Brian King]], [[I-Fan Chen|AUTHOR I-Fan Chen]], [[Yonatan Vaizman|AUTHOR Yonatan Vaizman]], [[Yuzong Liu|AUTHOR Yuzong Liu]], [[Roland Maas|AUTHOR Roland Maas]], [[Sree Hari Krishnan Parthasarathi|AUTHOR Sree Hari Krishnan Parthasarathi]], [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, B4|<|
|Chair: |Ville Hautamaki, Lin-shan Lee|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170518.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-1|PAPER Wed-O-7-4-1 — Towards Zero-Shot Frame Semantic Parsing for Domain Scaling]]</div>|<div class="cpsessionviewpapertitle">Towards Zero-Shot Frame Semantic Parsing for Domain Scaling</div><div class="cpsessionviewpaperauthor">[[Ankur Bapna|AUTHOR Ankur Bapna]], [[Gokhan Tür|AUTHOR Gokhan Tür]], [[Dilek Hakkani-Tür|AUTHOR Dilek Hakkani-Tür]], [[Larry Heck|AUTHOR Larry Heck]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-2|PAPER Wed-O-7-4-2 — ClockWork-RNN Based Architectures for Slot Filling]]</div>|<div class="cpsessionviewpapertitle">ClockWork-RNN Based Architectures for Slot Filling</div><div class="cpsessionviewpaperauthor">[[Despoina Georgiadou|AUTHOR Despoina Georgiadou]], [[Vassilios Diakoloukas|AUTHOR Vassilios Diakoloukas]], [[Vassilios Tsiaras|AUTHOR Vassilios Tsiaras]], [[Vassilios Digalakis|AUTHOR Vassilios Digalakis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-3|PAPER Wed-O-7-4-3 — Investigating the Effect of ASR Tuning on Named Entity Recognition]]</div>|<div class="cpsessionviewpapertitle">Investigating the Effect of ASR Tuning on Named Entity Recognition</div><div class="cpsessionviewpaperauthor">[[Mohamed Ameur Ben Jannet|AUTHOR Mohamed Ameur Ben Jannet]], [[Olivier Galibert|AUTHOR Olivier Galibert]], [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]], [[Sophie Rosset|AUTHOR Sophie Rosset]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171480.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-4|PAPER Wed-O-7-4-4 — Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">Label-Dependency Coding in Simple Recurrent Networks for Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Marco Dinarelli|AUTHOR Marco Dinarelli]], [[Vedran Vukotic|AUTHOR Vedran Vukotic]], [[Christian Raymond|AUTHOR Christian Raymond]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-5|PAPER Wed-O-7-4-5 — Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech]]</div>|<div class="cpsessionviewpapertitle">Minimum Semantic Error Cost Training of Deep Long Short-Term Memory Networks for Topic Spotting on Conversational Speech</div><div class="cpsessionviewpaperauthor">[[Zhong Meng|AUTHOR Zhong Meng]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171093.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-4-6|PAPER Wed-O-7-4-6 — Topic Identification for Speech Without ASR]]</div>|<div class="cpsessionviewpapertitle">Topic Identification for Speech Without ASR</div><div class="cpsessionviewpaperauthor">[[Chunxi Liu|AUTHOR Chunxi Liu]], [[Jan Trmal|AUTHOR Jan Trmal]], [[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Craig Harman|AUTHOR Craig Harman]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, C6|<|
|Chair: |Gabriel Skantze, Timo Baumann|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-1|PAPER Wed-O-7-6-1 — An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog]]</div>|<div class="cpsessionviewpapertitle">An End-to-End Trainable Neural Network Model with Belief Tracking for Task-Oriented Dialog</div><div class="cpsessionviewpaperauthor">[[Bing Liu|AUTHOR Bing Liu]], [[Ian Lane|AUTHOR Ian Lane]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-2|PAPER Wed-O-7-6-2 — Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates]]</div>|<div class="cpsessionviewpapertitle">Deep Reinforcement Learning of Dialogue Policies with Less Weight Updates</div><div class="cpsessionviewpaperauthor">[[Heriberto Cuayáhuitl|AUTHOR Heriberto Cuayáhuitl]], [[Seunghak Yu|AUTHOR Seunghak Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-3|PAPER Wed-O-7-6-3 — Towards End-to-End Spoken Dialogue Systems with Turn Embeddings]]</div>|<div class="cpsessionviewpapertitle">Towards End-to-End Spoken Dialogue Systems with Turn Embeddings</div><div class="cpsessionviewpaperauthor">[[Ali Orkan Bayer|AUTHOR Ali Orkan Bayer]], [[Evgeny A. Stepanov|AUTHOR Evgeny A. Stepanov]], [[Giuseppe Riccardi|AUTHOR Giuseppe Riccardi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-4|PAPER Wed-O-7-6-4 — Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction]]</div>|<div class="cpsessionviewpapertitle">Speech and Text Analysis for Multimodal Addressee Detection in Human-Human-Computer Interaction</div><div class="cpsessionviewpaperauthor">[[Oleg Akhtiamov|AUTHOR Oleg Akhtiamov]], [[Maxim Sidorov|AUTHOR Maxim Sidorov]], [[Alexey A. Karpov|AUTHOR Alexey A. Karpov]], [[Wolfgang Minker|AUTHOR Wolfgang Minker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171205.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-5|PAPER Wed-O-7-6-5 — Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?]]</div>|<div class="cpsessionviewpapertitle"> Rushing to Judgement: How do Laypeople Rate Caller Engagement in Thin-Slice Videos of Human–Machine Dialog?</div><div class="cpsessionviewpaperauthor">[[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Chee Wee Leong|AUTHOR Chee Wee Leong]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-6-6|PAPER Wed-O-7-6-6 — Hyperarticulation of Corrections in Multilingual Dialogue Systems]]</div>|<div class="cpsessionviewpapertitle">Hyperarticulation of Corrections in Multilingual Dialogue Systems</div><div class="cpsessionviewpaperauthor">[[Ivan Kraljevski|AUTHOR Ivan Kraljevski]], [[Diane Hirschfeld|AUTHOR Diane Hirschfeld]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, D8|<|
|Chair: |Izhak Shafran, Helen Meng|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-1|PAPER Wed-O-7-8-1 — Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion]]</div>|<div class="cpsessionviewpapertitle">Multitask Sequence-to-Sequence Models for Grapheme-to-Phoneme Conversion</div><div class="cpsessionviewpaperauthor">[[Benjamin Milde|AUTHOR Benjamin Milde]], [[Christoph Schmidt|AUTHOR Christoph Schmidt]], [[Joachim Köhler|AUTHOR Joachim Köhler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-2|PAPER Wed-O-7-8-2 — Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework]]</div>|<div class="cpsessionviewpapertitle">Acoustic Data-Driven Lexicon Learning Based on a Greedy Pronunciation Selection Framework</div><div class="cpsessionviewpaperauthor">[[Xiaohui Zhang|AUTHOR Xiaohui Zhang]], [[Vimal Manohar|AUTHOR Vimal Manohar]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171081.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-3|PAPER Wed-O-7-8-3 — Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervised Learning of a Pronunciation Dictionary from Disjoint Phonemic Transcripts and Text</div><div class="cpsessionviewpaperauthor">[[Takahiro Shinozaki|AUTHOR Takahiro Shinozaki]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Daichi Mochihashi|AUTHOR Daichi Mochihashi]], [[Graham Neubig|AUTHOR Graham Neubig]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-4|PAPER Wed-O-7-8-4 — Improved Subword Modeling for WFST-Based Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Improved Subword Modeling for WFST-Based Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Peter Smit|AUTHOR Peter Smit]], [[Sami Virpioja|AUTHOR Sami Virpioja]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-5|PAPER Wed-O-7-8-5 — Pronunciation Learning with RNN-Transducers]]</div>|<div class="cpsessionviewpapertitle">Pronunciation Learning with RNN-Transducers</div><div class="cpsessionviewpaperauthor">[[Antoine Bruguier|AUTHOR Antoine Bruguier]], [[Danushen Gnanapragasam|AUTHOR Danushen Gnanapragasam]], [[Leif Johnson|AUTHOR Leif Johnson]], [[Kanishka Rao|AUTHOR Kanishka Rao]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-7-8-6|PAPER Wed-O-7-8-6 — Learning Similarity Functions for Pronunciation Variations]]</div>|<div class="cpsessionviewpapertitle">Learning Similarity Functions for Pronunciation Variations</div><div class="cpsessionviewpaperauthor">[[Einat Naaman|AUTHOR Einat Naaman]], [[Yossi Adi|AUTHOR Yossi Adi]], [[Joseph Keshet|AUTHOR Joseph Keshet]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, Aula Magna|<|
|Chair: |Nicholas Evans, Karthika Vajayan|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-1|PAPER Wed-O-8-1-1 — Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients]]</div>|<div class="cpsessionviewpapertitle">Detection of Replay Attacks Using Single Frequency Filtering Cepstral Coefficients</div><div class="cpsessionviewpaperauthor">[[K.N.R.K. Raju Alluri|AUTHOR K.N.R.K. Raju Alluri]], [[Sivanand Achanta|AUTHOR Sivanand Achanta]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[Anil Kumar Vuppala|AUTHOR Anil Kumar Vuppala]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171393.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-2|PAPER Wed-O-8-1-2 — Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Representation Learning Using Convolutional Restricted Boltzmann Machine for Spoof Speech Detection</div><div class="cpsessionviewpaperauthor">[[Hardik B. Sailor|AUTHOR Hardik B. Sailor]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-3|PAPER Wed-O-8-1-3 — Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection]]</div>|<div class="cpsessionviewpapertitle">Independent Modelling of High and Low Energy Speech Frames for Spoofing Detection</div><div class="cpsessionviewpaperauthor">[[Gajan Suthokumar|AUTHOR Gajan Suthokumar]], [[Kaavya Sriskandaraja|AUTHOR Kaavya Sriskandaraja]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Chamith Wijenayake|AUTHOR Chamith Wijenayake]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-4|PAPER Wed-O-8-1-4 — Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data]]</div>|<div class="cpsessionviewpapertitle">Improving Speaker Verification Performance in Presence of Spoofing Attacks Using Out-of-Domain Spoofed Data</div><div class="cpsessionviewpaperauthor">[[Achintya Kr. Sarkar|AUTHOR Achintya Kr. Sarkar]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]], [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-5|PAPER Wed-O-8-1-5 — VoxCeleb: A Large-Scale Speaker Identification Dataset]]</div>|<div class="cpsessionviewpapertitle">VoxCeleb: A Large-Scale Speaker Identification Dataset</div><div class="cpsessionviewpaperauthor">[[Arsha Nagrani|AUTHOR Arsha Nagrani]], [[Joon Son Chung|AUTHOR Joon Son Chung]], [[Andrew Zisserman|AUTHOR Andrew Zisserman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171521.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-1-6|PAPER Wed-O-8-1-6 — Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology]]</div>|<div class="cpsessionviewpapertitle">Call My Net Corpus: A Multilingual Corpus for Evaluation of Speaker Recognition Technology</div><div class="cpsessionviewpaperauthor">[[Karen Jones|AUTHOR Karen Jones]], [[Stephanie Strassel|AUTHOR Stephanie Strassel]], [[Kevin Walker|AUTHOR Kevin Walker]], [[David Graff|AUTHOR David Graff]], [[Jonathan Wright|AUTHOR Jonathan Wright]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, E10|<|
|Chair: |Yannick Estève, Dilek Hakkani-Tür|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-1|PAPER Wed-O-8-10-1 — Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals]]</div>|<div class="cpsessionviewpapertitle">Effectively Building Tera Scale MaxEnt Language Models Incorporating Non-Linguistic Signals</div><div class="cpsessionviewpaperauthor">[[Fadi Biadsy|AUTHOR Fadi Biadsy]], [[Mohammadreza Ghodsi|AUTHOR Mohammadreza Ghodsi]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-2|PAPER Wed-O-8-10-2 — Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervised Adaptation of RNNLMs by Fine-Tuning with Domain-Specific Auxiliary Features</div><div class="cpsessionviewpaperauthor">[[Salil Deena|AUTHOR Salil Deena]], [[Raymond W.M. Ng|AUTHOR Raymond W.M. Ng]], [[Pranava Madhyastha|AUTHOR Pranava Madhyastha]], [[Lucia Specia|AUTHOR Lucia Specia]], [[Thomas Hain|AUTHOR Thomas Hain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-3|PAPER Wed-O-8-10-3 — Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Approximated and Domain-Adapted LSTM Language Models for First-Pass Decoding in Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Mittul Singh|AUTHOR Mittul Singh]], [[Youssef Oualil|AUTHOR Youssef Oualil]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-4|PAPER Wed-O-8-10-4 — Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap]]</div>|<div class="cpsessionviewpapertitle">Sparse Non-Negative Matrix Language Modeling: Maximum Entropy Flexibility on the Cheap</div><div class="cpsessionviewpaperauthor">[[Ciprian Chelba|AUTHOR Ciprian Chelba]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]], [[Fadi Biadsy|AUTHOR Fadi Biadsy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170426.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-5|PAPER Wed-O-8-10-5 — Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions]]</div>|<div class="cpsessionviewpapertitle">Multi-Scale Context Adaptation for Improving Child Automatic Speech Recognition in Child-Adult Spoken Interactions</div><div class="cpsessionviewpaperauthor">[[Manoj Kumar|AUTHOR Manoj Kumar]], [[Daniel Bone|AUTHOR Daniel Bone]], [[Kelly McWilliams|AUTHOR Kelly McWilliams]], [[Shanna Williams|AUTHOR Shanna Williams]], [[Thomas D. Lyon|AUTHOR Thomas D. Lyon]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171790.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-10-6|PAPER Wed-O-8-10-6 — Using Knowledge Graph and Search Query Click Logs in Statistical Language Model for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Using Knowledge Graph and Search Query Click Logs in Statistical Language Model for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Weiwu Zhu|AUTHOR Weiwu Zhu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, B4|<|
|Chair: |Isabel Trancoso, Nicholas Ruiz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-4-1|PAPER Wed-O-8-4-1 — Sequence-to-Sequence Models Can Directly Translate Foreign Speech]]</div>|<div class="cpsessionviewpapertitle">Sequence-to-Sequence Models Can Directly Translate Foreign Speech</div><div class="cpsessionviewpaperauthor">[[Ron J. Weiss|AUTHOR Ron J. Weiss]], [[Jan Chorowski|AUTHOR Jan Chorowski]], [[Navdeep Jaitly|AUTHOR Navdeep Jaitly]], [[Yonghui Wu|AUTHOR Yonghui Wu]], [[Zhifeng Chen|AUTHOR Zhifeng Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-4-2|PAPER Wed-O-8-4-2 — Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation]]</div>|<div class="cpsessionviewpapertitle">Structured-Based Curriculum Learning for End-to-End English-Japanese Speech Translation</div><div class="cpsessionviewpaperauthor">[[Takatomo Kano|AUTHOR Takatomo Kano]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-4-3|PAPER Wed-O-8-4-3 — Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors]]</div>|<div class="cpsessionviewpapertitle">Assessing the Tolerance of Neural Machine Translation Systems Against Speech Recognition Errors</div><div class="cpsessionviewpaperauthor">[[Nicholas Ruiz|AUTHOR Nicholas Ruiz]], [[Mattia Antonino Di Gangi|AUTHOR Mattia Antonino Di Gangi]], [[Nicola Bertoldi|AUTHOR Nicola Bertoldi]], [[Marcello Federico|AUTHOR Marcello Federico]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-4-4|PAPER Wed-O-8-4-4 — Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis]]</div>|<div class="cpsessionviewpapertitle">Toward Expressive Speech Translation: A Unified Sequence-to-Sequence LSTMs Approach for Translating Words and Emphasis</div><div class="cpsessionviewpaperauthor">[[Quoc Truong Do|AUTHOR Quoc Truong Do]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-4-5|PAPER Wed-O-8-4-5 — NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation]]</div>|<div class="cpsessionviewpapertitle">NMT-Based Segmentation and Punctuation Insertion for Real-Time Spoken Language Translation</div><div class="cpsessionviewpaperauthor">[[Eunah Cho|AUTHOR Eunah Cho]], [[Jan Niehues|AUTHOR Jan Niehues]], [[Alex Waibel|AUTHOR Alex Waibel]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, C6|<|
|Chair: |Hynek Boril, Reinhold Haeb-Umbach|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-1|PAPER Wed-O-8-6-1 — Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings]]</div>|<div class="cpsessionviewpapertitle">Tight Integration of Spatial and Spectral Features for BSS with Deep Clustering Embeddings</div><div class="cpsessionviewpaperauthor">[[Lukas Drude|AUTHOR Lukas Drude]], [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170667.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-2|PAPER Wed-O-8-6-2 — Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures]]</div>|<div class="cpsessionviewpapertitle">Speaker-Aware Neural Network Based Beamformer for Speaker Extraction in Speech Mixtures</div><div class="cpsessionviewpaperauthor">[[Kateřina Žmolíková|AUTHOR Kateřina Žmolíková]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Takuya Higuchi|AUTHOR Takuya Higuchi]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-3|PAPER Wed-O-8-6-3 — Eigenvector-Based Speech Mask Estimation Using Logistic Regression]]</div>|<div class="cpsessionviewpapertitle">Eigenvector-Based Speech Mask Estimation Using Logistic Regression</div><div class="cpsessionviewpaperauthor">[[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]], [[Matthias Zöhrer|AUTHOR Matthias Zöhrer]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-4|PAPER Wed-O-8-6-4 — Real-Time Speech Enhancement with GCC-NMF]]</div>|<div class="cpsessionviewpapertitle">Real-Time Speech Enhancement with GCC-NMF</div><div class="cpsessionviewpaperauthor">[[Sean U.N. Wood|AUTHOR Sean U.N. Wood]], [[Jean Rouat|AUTHOR Jean Rouat]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-5|PAPER Wed-O-8-6-5 — Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment]]</div>|<div class="cpsessionviewpapertitle">Coherence-Based Dual-Channel Noise Reduction Algorithm in a Complex Noisy Environment</div><div class="cpsessionviewpaperauthor">[[Youna Ji|AUTHOR Youna Ji]], [[Jun Byun|AUTHOR Jun Byun]], [[Young-cheol Park|AUTHOR Young-cheol Park]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-6-6|PAPER Wed-O-8-6-6 — Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays]]</div>|<div class="cpsessionviewpapertitle">Glottal Model Based Speech Beamforming for ad-hoc Microphone Arrays</div><div class="cpsessionviewpaperauthor">[[Yang Zhang|AUTHOR Yang Zhang]], [[Dinei Flor^encio|AUTHOR Dinei Flor^encio]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, D8|<|
|Chair: |Phil Green, Torbjørn Svendsen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-1|PAPER Wed-O-8-8-1 — Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features]]</div>|<div class="cpsessionviewpapertitle">Acoustic Assessment of Disordered Voice with Continuous Speech Based on Utterance-Level ASR Posterior Features</div><div class="cpsessionviewpaperauthor">[[Yuanyuan Liu|AUTHOR Yuanyuan Liu]], [[Tan Lee|AUTHOR Tan Lee]], [[P.C. Ching|AUTHOR P.C. Ching]], [[Thomas K.T. Law|AUTHOR Thomas K.T. Law]], [[Kathy Y.S. Lee|AUTHOR Kathy Y.S. Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-2|PAPER Wed-O-8-8-2 — Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech]]</div>|<div class="cpsessionviewpapertitle">Multi-Stage DNN Training for Automatic Recognition of Dysarthric Speech</div><div class="cpsessionviewpaperauthor">[[Emre Yılmaz|AUTHOR Emre Yılmaz]], [[Mario Ganzeboom|AUTHOR Mario Ganzeboom]], [[Catia Cucchiarini|AUTHOR Catia Cucchiarini]], [[Helmer Strik|AUTHOR Helmer Strik]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170455.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-3|PAPER Wed-O-8-8-3 — Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech]]</div>|<div class="cpsessionviewpapertitle">Improving Child Speech Disorder Assessment by Incorporating Out-of-Domain Adult Speech</div><div class="cpsessionviewpaperauthor">[[Daniel Smith|AUTHOR Daniel Smith]], [[Alex Sneddon|AUTHOR Alex Sneddon]], [[Lauren Ward|AUTHOR Lauren Ward]], [[Andreas Duenser|AUTHOR Andreas Duenser]], [[Jill Freyne|AUTHOR Jill Freyne]], [[David Silvera-Tawil|AUTHOR David Silvera-Tawil]], [[Angela Morgan|AUTHOR Angela Morgan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-4|PAPER Wed-O-8-8-4 — On Improving Acoustic Models for TORGO Dysarthric Speech Database]]</div>|<div class="cpsessionviewpapertitle">On Improving Acoustic Models for TORGO Dysarthric Speech Database</div><div class="cpsessionviewpaperauthor">[[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]], [[S. Umesh|AUTHOR S. Umesh]], [[Basil Abraham|AUTHOR Basil Abraham]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171251.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-5|PAPER Wed-O-8-8-5 — Glottal Source Features for Automatic Speech-Based Depression Assessment]]</div>|<div class="cpsessionviewpapertitle">Glottal Source Features for Automatic Speech-Based Depression Assessment</div><div class="cpsessionviewpaperauthor">[[Olympia Simantiraki|AUTHOR Olympia Simantiraki]], [[Paulos Charonyktakis|AUTHOR Paulos Charonyktakis]], [[Anastasia Pampouchidou|AUTHOR Anastasia Pampouchidou]], [[Manolis Tsiknakis|AUTHOR Manolis Tsiknakis]], [[Martin Cooke|AUTHOR Martin Cooke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-O-8-8-6|PAPER Wed-O-8-8-6 — Speech Processing Approach for Diagnosing Dementia in an Early Stage]]</div>|<div class="cpsessionviewpapertitle">Speech Processing Approach for Diagnosing Dementia in an Early Stage</div><div class="cpsessionviewpaperauthor">[[Roozbeh Sadeghian|AUTHOR Roozbeh Sadeghian]], [[J. David Schaffer|AUTHOR J. David Schaffer]], [[Stephen A. Zahorian|AUTHOR Stephen A. Zahorian]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, Poster 1|<|
|Chair: |Kris Demuynck|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170166.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-1|PAPER Wed-P-6-1-1 — Developing On-Line Speaker Diarization System]]</div>|<div class="cpsessionviewpapertitle">Developing On-Line Speaker Diarization System</div><div class="cpsessionviewpaperauthor">[[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Petr Fousek|AUTHOR Petr Fousek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-2|PAPER Wed-P-6-1-2 — Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing]]</div>|<div class="cpsessionviewpapertitle">Comparison of Non-Parametric Bayesian Mixture Models for Syllable Clustering and Zero-Resource Speech Processing</div><div class="cpsessionviewpaperauthor">[[Shreyas Seshadri|AUTHOR Shreyas Seshadri]], [[Ulpu Remes|AUTHOR Ulpu Remes]], [[Okko Räsänen|AUTHOR Okko Räsänen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-3|PAPER Wed-P-6-1-3 — Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords]]</div>|<div class="cpsessionviewpapertitle">Automatic Evaluation of Children Reading Aloud on Sentences and Pseudowords</div><div class="cpsessionviewpaperauthor">[[Jorge Proença|AUTHOR Jorge Proença]], [[Carla Lopes|AUTHOR Carla Lopes]], [[Michael Tjalve|AUTHOR Michael Tjalve]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Sara Candeias|AUTHOR Sara Candeias]], [[Fernando Perdigão|AUTHOR Fernando Perdigão]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170388.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-4|PAPER Wed-P-6-1-4 — Off-Topic Spoken Response Detection with Word Embeddings]]</div>|<div class="cpsessionviewpapertitle">Off-Topic Spoken Response Detection with Word Embeddings</div><div class="cpsessionviewpaperauthor">[[Su-Youn Yoon|AUTHOR Su-Youn Yoon]], [[Chong Min Lee|AUTHOR Chong Min Lee]], [[Ikkyu Choi|AUTHOR Ikkyu Choi]], [[Xinhao Wang|AUTHOR Xinhao Wang]], [[Matthew Mulholland|AUTHOR Matthew Mulholland]], [[Keelan Evanini|AUTHOR Keelan Evanini]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-5|PAPER Wed-P-6-1-5 — Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models]]</div>|<div class="cpsessionviewpapertitle">Improving Mispronunciation Detection for Non-Native Learners with Multisource Information and LSTM-Based Deep Models</div><div class="cpsessionviewpaperauthor">[[Wei Li|AUTHOR Wei Li]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]], [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-6|PAPER Wed-P-6-1-6 — Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides]]</div>|<div class="cpsessionviewpapertitle">Automatic Explanation Spot Estimation Method Targeted at Text and Figures in Lecture Slides</div><div class="cpsessionviewpaperauthor">[[Shoko Tsujimura|AUTHOR Shoko Tsujimura]], [[Kazumasa Yamamoto|AUTHOR Kazumasa Yamamoto]], [[Seiichi Nakagawa|AUTHOR Seiichi Nakagawa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170952.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-7|PAPER Wed-P-6-1-7 — Multiview Representation Learning via Deep CCA for Silent Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multiview Representation Learning via Deep CCA for Silent Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Myungjong Kim|AUTHOR Myungjong Kim]], [[Beiming Cao|AUTHOR Beiming Cao]], [[Ted Mau|AUTHOR Ted Mau]], [[Jun Wang|AUTHOR Jun Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-8|PAPER Wed-P-6-1-8 — Use of Graphemic Lexicons for Spoken Language Assessment]]</div>|<div class="cpsessionviewpapertitle">Use of Graphemic Lexicons for Spoken Language Assessment</div><div class="cpsessionviewpaperauthor">[[K.M. Knill|AUTHOR K.M. Knill]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]], [[K. Kyriakopoulos|AUTHOR K. Kyriakopoulos]], [[A. Ragni|AUTHOR A. Ragni]], [[Y. Wang|AUTHOR Y. Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-9|PAPER Wed-P-6-1-9 — Distilling Knowledge from an Ensemble of Models for Punctuation Prediction]]</div>|<div class="cpsessionviewpapertitle">Distilling Knowledge from an Ensemble of Models for Punctuation Prediction</div><div class="cpsessionviewpaperauthor">[[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]], [[Ya Li|AUTHOR Ya Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-10|PAPER Wed-P-6-1-10 — A Mostly Data-Driven Approach to Inverse Text Normalization]]</div>|<div class="cpsessionviewpapertitle">A Mostly Data-Driven Approach to Inverse Text Normalization</div><div class="cpsessionviewpaperauthor">[[Ernest Pusateri|AUTHOR Ernest Pusateri]], [[Bharat Ram Ambati|AUTHOR Bharat Ram Ambati]], [[Elizabeth Brooks|AUTHOR Elizabeth Brooks]], [[Ondrej Platek|AUTHOR Ondrej Platek]], [[Donald McAllaster|AUTHOR Donald McAllaster]], [[Venki Nagesha|AUTHOR Venki Nagesha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-11|PAPER Wed-P-6-1-11 — Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach]]</div>|<div class="cpsessionviewpapertitle">Mismatched Crowdsourcing from Multiple Annotator Languages for Recognizing Zero-Resourced Languages: A Nullspace Clustering Approach</div><div class="cpsessionviewpaperauthor">[[Wenda Chen|AUTHOR Wenda Chen]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]], [[Boon Pang Lim|AUTHOR Boon Pang Lim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-12|PAPER Wed-P-6-1-12 — Experiments in Character-Level Neural Network Models for Punctuation]]</div>|<div class="cpsessionviewpapertitle">Experiments in Character-Level Neural Network Models for Punctuation</div><div class="cpsessionviewpaperauthor">[[William Gale|AUTHOR William Gale]], [[Sarangarajan Parthasarathy|AUTHOR Sarangarajan Parthasarathy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-1-13|PAPER Wed-P-6-1-13 — Multi-Channel Apollo Mission Speech Transcripts Calibration]]</div>|<div class="cpsessionviewpapertitle">Multi-Channel Apollo Mission Speech Transcripts Calibration</div><div class="cpsessionviewpaperauthor">[[Lakshmish Kaushik|AUTHOR Lakshmish Kaushik]], [[Abhijeet Sangwan|AUTHOR Abhijeet Sangwan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, Poster 2|<|
|Chair: |Mitchell McLaren|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170530.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-1|PAPER Wed-P-6-2-1 — Calibration Approaches for Language Detection]]</div>|<div class="cpsessionviewpapertitle">Calibration Approaches for Language Detection</div><div class="cpsessionviewpaperauthor">[[Mitchell McLaren|AUTHOR Mitchell McLaren]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]], [[Diego Castan|AUTHOR Diego Castan]], [[Aaron Lawson|AUTHOR Aaron Lawson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-2|PAPER Wed-P-6-2-2 — Bidirectional Modelling for Short Duration Language Identification]]</div>|<div class="cpsessionviewpapertitle">Bidirectional Modelling for Short Duration Language Identification</div><div class="cpsessionviewpaperauthor">[[Sarith Fernando|AUTHOR Sarith Fernando]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]], [[Julien Epps|AUTHOR Julien Epps]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-3|PAPER Wed-P-6-2-3 — Conditional Generative Adversarial Nets Classifier for Spoken Language Identification]]</div>|<div class="cpsessionviewpapertitle">Conditional Generative Adversarial Nets Classifier for Spoken Language Identification</div><div class="cpsessionviewpaperauthor">[[Peng Shen|AUTHOR Peng Shen]], [[Xugang Lu|AUTHOR Xugang Lu]], [[Sheng Li|AUTHOR Sheng Li]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-4|PAPER Wed-P-6-2-4 — Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Tied Hidden Factors in Neural Networks for End-to-End Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Antonio Miguel|AUTHOR Antonio Miguel]], [[Jorge Llombart|AUTHOR Jorge Llombart]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-5|PAPER Wed-P-6-2-5 — Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels]]</div>|<div class="cpsessionviewpapertitle">Speaker Clustering by Iteratively Finding Discriminative Feature Space and Cluster Labels</div><div class="cpsessionviewpaperauthor">[[Sungrack Yun|AUTHOR Sungrack Yun]], [[Hye Jin Jang|AUTHOR Hye Jin Jang]], [[Taesu Kim|AUTHOR Taesu Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-6|PAPER Wed-P-6-2-6 — Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering]]</div>|<div class="cpsessionviewpapertitle">Domain Adaptation of PLDA Models in Broadcast Diarization by Means of Unsupervised Speaker Clustering</div><div class="cpsessionviewpaperauthor">[[Ignacio Viñals|AUTHOR Ignacio Viñals]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Antonio Miguel|AUTHOR Antonio Miguel]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-7|PAPER Wed-P-6-2-7 — LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling]]</div>|<div class="cpsessionviewpapertitle">LSTM Neural Network-Based Speaker Segmentation Using Acoustic and Language Modelling</div><div class="cpsessionviewpaperauthor">[[Miquel India|AUTHOR Miquel India]], [[José A.R. Fonollosa|AUTHOR José A.R. Fonollosa]], [[Javier Hernando|AUTHOR Javier Hernando]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171311.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-8|PAPER Wed-P-6-2-8 — Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization]]</div>|<div class="cpsessionviewpapertitle">Acoustic Pairing of Original and Dubbed Voices in the Context of Video Game Localization</div><div class="cpsessionviewpaperauthor">[[Adrien Gresse|AUTHOR Adrien Gresse]], [[Mickael Rouvier|AUTHOR Mickael Rouvier]], [[Richard Dufour|AUTHOR Richard Dufour]], [[Vincent Labatut|AUTHOR Vincent Labatut]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170152.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-9|PAPER Wed-P-6-2-9 — Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison]]</div>|<div class="cpsessionviewpapertitle">Homogeneity Measure Impact on Target and Non-Target Trials in Forensic Voice Comparison</div><div class="cpsessionviewpaperauthor">[[Moez Ajili|AUTHOR Moez Ajili]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]], [[Waad Ben Kheder|AUTHOR Waad Ben Kheder]], [[Solange Rossato|AUTHOR Solange Rossato]], [[Juliette Kahn|AUTHOR Juliette Kahn]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-10|PAPER Wed-P-6-2-10 — Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Null-Hypothesis LLR: A Proposal for Forensic Automatic Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Yosef A. Solewicz|AUTHOR Yosef A. Solewicz]], [[Michael Jessen|AUTHOR Michael Jessen]], [[David van der Vloed|AUTHOR David van der Vloed]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170997.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-11|PAPER Wed-P-6-2-11 — The Opensesame NIST 2016 Speaker Recognition Evaluation System]]</div>|<div class="cpsessionviewpapertitle">The Opensesame NIST 2016 Speaker Recognition Evaluation System</div><div class="cpsessionviewpaperauthor">[[Gang Liu|AUTHOR Gang Liu]], [[Qi Qian|AUTHOR Qi Qian]], [[Zhibin Wang|AUTHOR Zhibin Wang]], [[Qingen Zhao|AUTHOR Qingen Zhao]], [[Tianzhou Wang|AUTHOR Tianzhou Wang]], [[Hao Li|AUTHOR Hao Li]], [[Jian Xue|AUTHOR Jian Xue]], [[Shenghuo Zhu|AUTHOR Shenghuo Zhu]], [[Rong Jin|AUTHOR Rong Jin]], [[Tuo Zhao|AUTHOR Tuo Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-12|PAPER Wed-P-6-2-12 — IITG-Indigo System for NIST 2016 SRE Challenge]]</div>|<div class="cpsessionviewpapertitle">IITG-Indigo System for NIST 2016 SRE Challenge</div><div class="cpsessionviewpaperauthor">[[Nagendra Kumar|AUTHOR Nagendra Kumar]], [[Rohan Kumar Das|AUTHOR Rohan Kumar Das]], [[Sarfaraz Jelil|AUTHOR Sarfaraz Jelil]], [[Dhanush B.K.|AUTHOR Dhanush B.K.]], [[H. Kashyap|AUTHOR H. Kashyap]], [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]], [[Rohit Sinha|AUTHOR Rohit Sinha]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-13|PAPER Wed-P-6-2-13 — Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Locally Weighted Linear Discriminant Analysis for Robust Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Abhinav Misra|AUTHOR Abhinav Misra]], [[Shivesh Ranjan|AUTHOR Shivesh Ranjan]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-2-14|PAPER Wed-P-6-2-14 — Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition]]</div>|<div class="cpsessionviewpapertitle">Recursive Whitening Transformation for Speaker Recognition on Language Mismatched Condition</div><div class="cpsessionviewpaperauthor">[[Suwon Shon|AUTHOR Suwon Shon]], [[Seongkyu Mun|AUTHOR Seongkyu Mun]], [[Hanseok Ko|AUTHOR Hanseok Ko]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, Poster 3|<|
|Chair: |Roland Kuhn|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-1|PAPER Wed-P-6-3-1 — Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings]]</div>|<div class="cpsessionviewpapertitle">Query-by-Example Search with Discriminative Neural Acoustic Word Embeddings</div><div class="cpsessionviewpaperauthor">[[Shane Settle|AUTHOR Shane Settle]], [[Keith Levin|AUTHOR Keith Levin]], [[Herman Kamper|AUTHOR Herman Kamper]], [[Karen Livescu|AUTHOR Karen Livescu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-2|PAPER Wed-P-6-3-2 — Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection]]</div>|<div class="cpsessionviewpapertitle">Constructing Acoustic Distances Between Subwords and States Obtained from a Deep Neural Network for Spoken Term Detection</div><div class="cpsessionviewpaperauthor">[[Daisuke Kaneko|AUTHOR Daisuke Kaneko]], [[Ryota Konno|AUTHOR Ryota Konno]], [[Kazunori Kojima|AUTHOR Kazunori Kojima]], [[Kazuyo Tanaka|AUTHOR Kazuyo Tanaka]], [[Shi-wook Lee|AUTHOR Shi-wook Lee]], [[Yoshiaki Itoh|AUTHOR Yoshiaki Itoh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-3|PAPER Wed-P-6-3-3 — Fast and Accurate OOV Decoder on High-Level Features]]</div>|<div class="cpsessionviewpapertitle">Fast and Accurate OOV Decoder on High-Level Features</div><div class="cpsessionviewpaperauthor">[[Yuri Khokhlov|AUTHOR Yuri Khokhlov]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Ivan Medennikov|AUTHOR Ivan Medennikov]], [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170612.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-4|PAPER Wed-P-6-3-4 — Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval]]</div>|<div class="cpsessionviewpapertitle">Exploring the Use of Significant Words Language Modeling for Spoken Document Retrieval</div><div class="cpsessionviewpaperauthor">[[Ying-Wen Chen|AUTHOR Ying-Wen Chen]], [[Kuan-Yu Chen|AUTHOR Kuan-Yu Chen]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]], [[Berlin Chen|AUTHOR Berlin Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170893.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-5|PAPER Wed-P-6-3-5 — Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval]]</div>|<div class="cpsessionviewpapertitle">Incorporating Acoustic Features for Spontaneous Speech Driven Content Retrieval</div><div class="cpsessionviewpaperauthor">[[Hiroto Tasaki|AUTHOR Hiroto Tasaki]], [[Tomoyosi Akiba|AUTHOR Tomoyosi Akiba]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-6|PAPER Wed-P-6-3-6 — Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification]]</div>|<div class="cpsessionviewpapertitle">Order-Preserving Abstractive Summarization for Spoken Content Based on Connectionist Temporal Classification</div><div class="cpsessionviewpaperauthor">[[Bo-Ru Lu|AUTHOR Bo-Ru Lu]], [[Frank Shyu|AUTHOR Frank Shyu]], [[Yun-Nung Chen|AUTHOR Yun-Nung Chen]], [[Hung-Yi Lee|AUTHOR Hung-Yi Lee]], [[Lin-Shan Lee|AUTHOR Lin-Shan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171752.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-7|PAPER Wed-P-6-3-7 — Automatic Alignment Between Classroom Lecture Utterances and Slide Components]]</div>|<div class="cpsessionviewpapertitle">Automatic Alignment Between Classroom Lecture Utterances and Slide Components</div><div class="cpsessionviewpaperauthor">[[Masatoshi Tsuchiya|AUTHOR Masatoshi Tsuchiya]], [[Ryo Minamiguchi|AUTHOR Ryo Minamiguchi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171183.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-8|PAPER Wed-P-6-3-8 — Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Compensating Gender Variability in Query-by-Example Search on Speech Using Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Paula Lopez-Otero|AUTHOR Paula Lopez-Otero]], [[Laura Docio-Fernandez|AUTHOR Laura Docio-Fernandez]], [[Carmen Garcia-Mateo|AUTHOR Carmen Garcia-Mateo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-9|PAPER Wed-P-6-3-9 — Zero-Shot Learning Across Heterogeneous Overlapping Domains]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Learning Across Heterogeneous Overlapping Domains</div><div class="cpsessionviewpaperauthor">[[Anjishnu Kumar|AUTHOR Anjishnu Kumar]], [[Pavankumar Reddy Muddireddy|AUTHOR Pavankumar Reddy Muddireddy]], [[Markus Dreyer|AUTHOR Markus Dreyer]], [[Björn Hoffmeister|AUTHOR Björn Hoffmeister]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170392.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-10|PAPER Wed-P-6-3-10 — Hierarchical Recurrent Neural Network for Story Segmentation]]</div>|<div class="cpsessionviewpapertitle">Hierarchical Recurrent Neural Network for Story Segmentation</div><div class="cpsessionviewpaperauthor">[[Emiru Tsunoo|AUTHOR Emiru Tsunoo]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-11|PAPER Wed-P-6-3-11 — Evaluating Automatic Topic Segmentation as a Segment Retrieval Task]]</div>|<div class="cpsessionviewpapertitle">Evaluating Automatic Topic Segmentation as a Segment Retrieval Task</div><div class="cpsessionviewpaperauthor">[[Abdessalam Bouchekif|AUTHOR Abdessalam Bouchekif]], [[Delphine Charlet|AUTHOR Delphine Charlet]], [[Géraldine Damnati|AUTHOR Géraldine Damnati]], [[Nathalie Camelin|AUTHOR Nathalie Camelin]], [[Yannick Estève|AUTHOR Yannick Estève]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-12|PAPER Wed-P-6-3-12 — Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps]]</div>|<div class="cpsessionviewpapertitle">Improving Speech Recognizers by Refining Broadcast Data with Inaccurate Subtitle Timestamps</div><div class="cpsessionviewpaperauthor">[[Jeong-Uk Bang|AUTHOR Jeong-Uk Bang]], [[Mu-Yeol Choi|AUTHOR Mu-Yeol Choi]], [[Sang-Hun Kim|AUTHOR Sang-Hun Kim]], [[Oh-Wook Kwon|AUTHOR Oh-Wook Kwon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-3-13|PAPER Wed-P-6-3-13 — A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings]]</div>|<div class="cpsessionviewpapertitle">A Relevance Score Estimation for Spoken Term Detection Based on RNN-Generated Pronunciation Embeddings</div><div class="cpsessionviewpaperauthor">[[Jan Švec|AUTHOR Jan Švec]], [[Josef V. Psutka|AUTHOR Josef V. Psutka]], [[Luboš Šmídl|AUTHOR Luboš Šmídl]], [[Jan Trmal|AUTHOR Jan Trmal]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, Poster 4|<|
|Chair: |Preeti Rao|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-1|PAPER Wed-P-6-4-1 — Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores]]</div>|<div class="cpsessionviewpapertitle">Predicting Automatic Speech Recognition Performance Over Communication Channels from Instrumental Speech Quality and Intelligibility Scores</div><div class="cpsessionviewpaperauthor">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]], [[Sebastian Möller|AUTHOR Sebastian Möller]], [[John Beerends|AUTHOR John Beerends]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-2|PAPER Wed-P-6-4-2 — Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age]]</div>|<div class="cpsessionviewpapertitle">Speech Intelligibility in Cars: The Effect of Speaking Style, Noise and Listener Age</div><div class="cpsessionviewpaperauthor">[[Cassia Valentini Botinhao|AUTHOR Cassia Valentini Botinhao]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170170.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-3|PAPER Wed-P-6-4-3 — Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio]]</div>|<div class="cpsessionviewpapertitle">Predicting Speech Intelligibility Using a Gammachirp Envelope Distortion Index Based on the Signal-to-Distortion Ratio</div><div class="cpsessionviewpaperauthor">[[Katsuhiko Yamamoto|AUTHOR Katsuhiko Yamamoto]], [[Toshio Irino|AUTHOR Toshio Irino]], [[Toshie Matsui|AUTHOR Toshie Matsui]], [[Shoko Araki|AUTHOR Shoko Araki]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-4|PAPER Wed-P-6-4-4 — Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”]]</div>|<div class="cpsessionviewpapertitle">Intelligibilities of Mandarin Chinese Sentences with Spectral “Holes”</div><div class="cpsessionviewpaperauthor">[[Yafan Chen|AUTHOR Yafan Chen]], [[Yong Xu|AUTHOR Yong Xu]], [[Jun Yang|AUTHOR Jun Yang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-5|PAPER Wed-P-6-4-5 — The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise]]</div>|<div class="cpsessionviewpapertitle">The Effect of Situation-Specific Non-Speech Acoustic Cues on the Intelligibility of Speech in Noise</div><div class="cpsessionviewpaperauthor">[[Lauren Ward|AUTHOR Lauren Ward]], [[Ben Shirley|AUTHOR Ben Shirley]], [[Yan Tang|AUTHOR Yan Tang]], [[William J. Davies|AUTHOR William J. Davies]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-6|PAPER Wed-P-6-4-6 — On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure]]</div>|<div class="cpsessionviewpapertitle">On the Use of Band Importance Weighting in the Short-Time Objective Intelligibility Measure</div><div class="cpsessionviewpaperauthor">[[Asger Heidemann Andersen|AUTHOR Asger Heidemann Andersen]], [[Jan Mark de Haan|AUTHOR Jan Mark de Haan]], [[Zheng-Hua Tan|AUTHOR Zheng-Hua Tan]], [[Jesper Jensen|AUTHOR Jesper Jensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-6-4-7|PAPER Wed-P-6-4-7 — Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines]]</div>|<div class="cpsessionviewpapertitle">Listening in the Dips: Comparing Relevant Features for Speech Recognition in Humans and Machines</div><div class="cpsessionviewpaperauthor">[[Constantin Spille|AUTHOR Constantin Spille]], [[Bernd T. Meyer|AUTHOR Bernd T. Meyer]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, Poster 2|<|
|Chair: |Rachid Ridouane|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-1|PAPER Wed-P-7-2-1 — Mental Representation of Japanese Mora; Focusing on its Intrinsic Duration]]</div>|<div class="cpsessionviewpapertitle">Mental Representation of Japanese Mora; Focusing on its Intrinsic Duration</div><div class="cpsessionviewpaperauthor">[[Kosuke Sugai|AUTHOR Kosuke Sugai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-2|PAPER Wed-P-7-2-2 — Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English]]</div>|<div class="cpsessionviewpapertitle">Temporal Dynamics of Lateral Channel Formation in /l/: 3D EMA Data from Australian English</div><div class="cpsessionviewpaperauthor">[[Jia Ying|AUTHOR Jia Ying]], [[Christopher Carignan|AUTHOR Christopher Carignan]], [[Jason A. Shaw|AUTHOR Jason A. Shaw]], [[Michael Proctor|AUTHOR Michael Proctor]], [[Donald Derrick|AUTHOR Donald Derrick]], [[Catherine T. Best|AUTHOR Catherine T. Best]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-3|PAPER Wed-P-7-2-3 — Vowel and Consonant Sequences in three Bavarian Dialects of Austria]]</div>|<div class="cpsessionviewpapertitle">Vowel and Consonant Sequences in three Bavarian Dialects of Austria</div><div class="cpsessionviewpaperauthor">[[Nicola Klingler|AUTHOR Nicola Klingler]], [[Sylvia Moosmüller|AUTHOR Sylvia Moosmüller]], [[Hannes Scheutz|AUTHOR Hannes Scheutz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-4|PAPER Wed-P-7-2-4 — Acoustic Cues to the Singleton-Geminate Contrast: The Case of Libyan Arabic Sonorants]]</div>|<div class="cpsessionviewpapertitle">Acoustic Cues to the Singleton-Geminate Contrast: The Case of Libyan Arabic Sonorants</div><div class="cpsessionviewpaperauthor">[[Amel Issa|AUTHOR Amel Issa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-5|PAPER Wed-P-7-2-5 — Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts]]</div>|<div class="cpsessionviewpapertitle">Mel-Cepstral Distortion of German Vowels in Different Information Density Contexts</div><div class="cpsessionviewpaperauthor">[[Erika Brandt|AUTHOR Erika Brandt]], [[Frank Zimmerer|AUTHOR Frank Zimmerer]], [[Bistra Andreeva|AUTHOR Bistra Andreeva]], [[Bernd Möbius|AUTHOR Bernd Möbius]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-6|PAPER Wed-P-7-2-6 — Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Effect of Formant and F0 Discontinuity on Perceived Vowel Duration: Impacts for Concatenative Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Tomáš Bořil|AUTHOR Tomáš Bořil]], [[Pavel Šturm|AUTHOR Pavel Šturm]], [[Radek Skarnitzl|AUTHOR Radek Skarnitzl]], [[Jan Volín|AUTHOR Jan Volín]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-7|PAPER Wed-P-7-2-7 — An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables]]</div>|<div class="cpsessionviewpapertitle">An Ultrasound Study of Alveolar and Retroflex Consonants in Arrernte: Stressed and Unstressed Syllables</div><div class="cpsessionviewpaperauthor">[[Marija Tabain|AUTHOR Marija Tabain]], [[Richard Beare|AUTHOR Richard Beare]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-8|PAPER Wed-P-7-2-8 — Reshaping the Transformed LF Model: Generating the Glottal Source from the Waveshape Parameter R,,d,,]]</div>|<div class="cpsessionviewpapertitle">Reshaping the Transformed LF Model: Generating the Glottal Source from the Waveshape Parameter R,,d,,</div><div class="cpsessionviewpaperauthor">[[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-9|PAPER Wed-P-7-2-9 — Kinematic Signatures of Prosody in Lombard Speech]]</div>|<div class="cpsessionviewpapertitle">Kinematic Signatures of Prosody in Lombard Speech</div><div class="cpsessionviewpaperauthor">[[Štefan Beňuš|AUTHOR Štefan Beňuš]], [[Juraj Šimko|AUTHOR Juraj Šimko]], [[Mona Lehtinen|AUTHOR Mona Lehtinen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-10|PAPER Wed-P-7-2-10 — What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology]]</div>|<div class="cpsessionviewpapertitle">What do Finnish and Central Bavarian Have in Common? Towards an Acoustically Based Quantity Typology</div><div class="cpsessionviewpaperauthor">[[Markus Jochim|AUTHOR Markus Jochim]], [[Felicitas Kleber|AUTHOR Felicitas Kleber]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-11|PAPER Wed-P-7-2-11 — Locating Burst Onsets Using SFF Envelope and Phase Information]]</div>|<div class="cpsessionviewpapertitle">Locating Burst Onsets Using SFF Envelope and Phase Information</div><div class="cpsessionviewpaperauthor">[[Bhanu Teja Nellore|AUTHOR Bhanu Teja Nellore]], [[RaviShankar Prasad|AUTHOR RaviShankar Prasad]], [[Sudarsana Reddy Kadiri|AUTHOR Sudarsana Reddy Kadiri]], [[Suryakanth V. Gangashetty|AUTHOR Suryakanth V. Gangashetty]], [[B. Yegnanarayana|AUTHOR B. Yegnanarayana]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170876.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-12|PAPER Wed-P-7-2-12 — A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese]]</div>|<div class="cpsessionviewpapertitle">A Preliminary Phonetic Investigation of Alphabetic Words in Mandarin Chinese</div><div class="cpsessionviewpaperauthor">[[Hongwei Ding|AUTHOR Hongwei Ding]], [[Yuanyuan Zhang|AUTHOR Yuanyuan Zhang]], [[Hongchao Liu|AUTHOR Hongchao Liu]], [[Chu-Ren Huang|AUTHOR Chu-Ren Huang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-2-13|PAPER Wed-P-7-2-13 — A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability]]</div>|<div class="cpsessionviewpapertitle">A Quantitative Measure of the Impact of Coarticulation on Phone Discriminability</div><div class="cpsessionviewpaperauthor">[[Thomas Schatz|AUTHOR Thomas Schatz]], [[Rory Turnbull|AUTHOR Rory Turnbull]], [[Francis Bach|AUTHOR Francis Bach]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, Poster 3|<|
|Chair: |Prasanta Ghosh, Unto Laine|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-1|PAPER Wed-P-7-3-1 — Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference]]</div>|<div class="cpsessionviewpapertitle">Sinusoidal Partials Tracking for Singing Analysis Using the Heuristic of the Minimal Frequency and Magnitude Difference</div><div class="cpsessionviewpaperauthor">[[Kin Wah Edward Lin|AUTHOR Kin Wah Edward Lin]], [[Hans Anderson|AUTHOR Hans Anderson]], [[Clifford So|AUTHOR Clifford So]], [[Simon Lui|AUTHOR Simon Lui]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-2|PAPER Wed-P-7-3-2 — Audio Scene Classification with Deep Recurrent Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Audio Scene Classification with Deep Recurrent Neural Networks</div><div class="cpsessionviewpaperauthor">[[Huy Phan|AUTHOR Huy Phan]], [[Philipp Koch|AUTHOR Philipp Koch]], [[Fabrice Katzberg|AUTHOR Fabrice Katzberg]], [[Marco Maass|AUTHOR Marco Maass]], [[Radoslaw Mazur|AUTHOR Radoslaw Mazur]], [[Alfred Mertins|AUTHOR Alfred Mertins]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-3|PAPER Wed-P-7-3-3 — Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram]]</div>|<div class="cpsessionviewpapertitle">Automatic Time-Frequency Analysis of Echolocation Signals Using the Matched Gaussian Multitaper Spectrogram</div><div class="cpsessionviewpaperauthor">[[Maria Sandsten|AUTHOR Maria Sandsten]], [[Isabella Reinhold|AUTHOR Isabella Reinhold]], [[Josefin Starkhammar|AUTHOR Josefin Starkhammar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170213.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-4|PAPER Wed-P-7-3-4 — Classification-Based Detection of Glottal Closure Instants from Speech Signals]]</div>|<div class="cpsessionviewpapertitle">Classification-Based Detection of Glottal Closure Instants from Speech Signals</div><div class="cpsessionviewpaperauthor">[[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170222.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-5|PAPER Wed-P-7-3-5 — A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network]]</div>|<div class="cpsessionviewpapertitle">A Domain Knowledge-Assisted Nonlinear Model for Head-Related Transfer Functions Based on Bottleneck Deep Neural Network</div><div class="cpsessionviewpaperauthor">[[Xiaoke Qi|AUTHOR Xiaoke Qi]], [[Jianhua Tao|AUTHOR Jianhua Tao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-6|PAPER Wed-P-7-3-6 — Laryngeal Articulation During Trumpet Performance: An Exploratory Study]]</div>|<div class="cpsessionviewpapertitle">Laryngeal Articulation During Trumpet Performance: An Exploratory Study</div><div class="cpsessionviewpaperauthor">[[Luis M.T. Jesus|AUTHOR Luis M.T. Jesus]], [[Bruno Rocha|AUTHOR Bruno Rocha]], [[Andreia Hall|AUTHOR Andreia Hall]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-7|PAPER Wed-P-7-3-7 — Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling]]</div>|<div class="cpsessionviewpapertitle">Matrix of Polynomials Model Based Polynomial Dictionary Learning Method for Acoustic Impulse Response Modeling</div><div class="cpsessionviewpaperauthor">[[Jian Guan|AUTHOR Jian Guan]], [[Xuan Wang|AUTHOR Xuan Wang]], [[Pengming Feng|AUTHOR Pengming Feng]], [[Jing Dong|AUTHOR Jing Dong]], [[Wenwu Wang|AUTHOR Wenwu Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170431.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-8|PAPER Wed-P-7-3-8 — Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features]]</div>|<div class="cpsessionviewpapertitle">Acoustic Scene Classification Using a CNN-SuperVector System Trained with Auditory and Spectrogram Image Features</div><div class="cpsessionviewpaperauthor">[[Rakib Hyder|AUTHOR Rakib Hyder]], [[Shabnam Ghaffarzadegan|AUTHOR Shabnam Ghaffarzadegan]], [[Zhe Feng|AUTHOR Zhe Feng]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]], [[Taufiq Hasan|AUTHOR Taufiq Hasan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170485.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-9|PAPER Wed-P-7-3-9 — An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification]]</div>|<div class="cpsessionviewpapertitle">An Environmental Feature Representation for Robust Speech Recognition and for Environment Identification</div><div class="cpsessionviewpaperauthor">[[Xue Feng|AUTHOR Xue Feng]], [[Brigitte Richardson|AUTHOR Brigitte Richardson]], [[Scott Amman|AUTHOR Scott Amman]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170486.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-10|PAPER Wed-P-7-3-10 — Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging]]</div>|<div class="cpsessionviewpapertitle">Attention and Localization Based on a Deep Convolutional Recurrent Model for Weakly Supervised Audio Tagging</div><div class="cpsessionviewpaperauthor">[[Yong Xu|AUTHOR Yong Xu]], [[Qiuqiang Kong|AUTHOR Qiuqiang Kong]], [[Qiang Huang|AUTHOR Qiang Huang]], [[Wenwu Wang|AUTHOR Wenwu Wang]], [[Mark D. Plumbley|AUTHOR Mark D. Plumbley]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-11|PAPER Wed-P-7-3-11 — An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">An Audio Based Piano Performance Evaluation Method Using Deep Neural Network Based Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Jing Pan|AUTHOR Jing Pan]], [[Ming Li|AUTHOR Ming Li]], [[Zhanmei Song|AUTHOR Zhanmei Song]], [[Xin Li|AUTHOR Xin Li]], [[Xiaolin Liu|AUTHOR Xiaolin Liu]], [[Hua Yi|AUTHOR Hua Yi]], [[Manman Zhu|AUTHOR Manman Zhu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-12|PAPER Wed-P-7-3-12 — Music Tempo Estimation Using Sub-Band Synchrony]]</div>|<div class="cpsessionviewpapertitle">Music Tempo Estimation Using Sub-Band Synchrony</div><div class="cpsessionviewpaperauthor">[[Shreyan Chowdhury|AUTHOR Shreyan Chowdhury]], [[Tanaya Guha|AUTHOR Tanaya Guha]], [[Rajesh M. Hegde|AUTHOR Rajesh M. Hegde]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-13|PAPER Wed-P-7-3-13 — A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification]]</div>|<div class="cpsessionviewpapertitle">A Transfer Learning Based Feature Extractor for Polyphonic Sound Event Detection Using Connectionist Temporal Classification</div><div class="cpsessionviewpaperauthor">[[Yun Wang|AUTHOR Yun Wang]], [[Florian Metze|AUTHOR Florian Metze]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-14|PAPER Wed-P-7-3-14 — A Note Based Query By Humming System Using Convolutional Neural Network]]</div>|<div class="cpsessionviewpapertitle">A Note Based Query By Humming System Using Convolutional Neural Network</div><div class="cpsessionviewpaperauthor">[[Naziba Mostafa|AUTHOR Naziba Mostafa]], [[Pascale Fung|AUTHOR Pascale Fung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-15|PAPER Wed-P-7-3-15 — Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Filterbank Learning Using Convolutional Restricted Boltzmann Machine for Environmental Sound Classification</div><div class="cpsessionviewpaperauthor">[[Hardik B. Sailor|AUTHOR Hardik B. Sailor]], [[Dharmesh M. Agrawal|AUTHOR Dharmesh M. Agrawal]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-3-16|PAPER Wed-P-7-3-16 — Novel Shifted Real Spectrum for Exact Signal Reconstruction]]</div>|<div class="cpsessionviewpapertitle">Novel Shifted Real Spectrum for Exact Signal Reconstruction</div><div class="cpsessionviewpaperauthor">[[Meet H. Soni|AUTHOR Meet H. Soni]], [[Rishabh Tak|AUTHOR Rishabh Tak]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, Poster 4|<|
|Chair: |Jan Rusz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170112.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-1|PAPER Wed-P-7-4-1 — Manual and Automatic Transcriptions in Dementia Detection from Speech]]</div>|<div class="cpsessionviewpapertitle">Manual and Automatic Transcriptions in Dementia Detection from Speech</div><div class="cpsessionviewpaperauthor">[[Jochen Weiner|AUTHOR Jochen Weiner]], [[Mathis Engelbart|AUTHOR Mathis Engelbart]], [[Tanja Schultz|AUTHOR Tanja Schultz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-2|PAPER Wed-P-7-4-2 — An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks]]</div>|<div class="cpsessionviewpapertitle">An Affect Prediction Approach Through Depression Severity Parameter Incorporation in Neural Networks</div><div class="cpsessionviewpaperauthor">[[Rahul Gupta|AUTHOR Rahul Gupta]], [[Saurabh Sahu|AUTHOR Saurabh Sahu]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-3|PAPER Wed-P-7-4-3 — Cross-Database Models for the Classification of Dysarthria Presence]]</div>|<div class="cpsessionviewpapertitle">Cross-Database Models for the Classification of Dysarthria Presence</div><div class="cpsessionviewpaperauthor">[[Stephanie Gillespie|AUTHOR Stephanie Gillespie]], [[Yash-Yee Logan|AUTHOR Yash-Yee Logan]], [[Elliot Moore|AUTHOR Elliot Moore]], [[Jacqueline Laures-Gore|AUTHOR Jacqueline Laures-Gore]], [[Scott Russell|AUTHOR Scott Russell]], [[Rupal Patel|AUTHOR Rupal Patel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-4|PAPER Wed-P-7-4-4 — Acoustic Evaluation of Nasality in Cerebellar Syndromes]]</div>|<div class="cpsessionviewpapertitle">Acoustic Evaluation of Nasality in Cerebellar Syndromes</div><div class="cpsessionviewpaperauthor">[[M. Novotný|AUTHOR M. Novotný]], [[Jan Rusz|AUTHOR Jan Rusz]], [[K. Spálenka|AUTHOR K. Spálenka]], [[Jiří Klempíř|AUTHOR Jiří Klempíř]], [[D. Horáková|AUTHOR D. Horáková]], [[Evžen Růžička|AUTHOR Evžen Růžička]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-5|PAPER Wed-P-7-4-5 — Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings]]</div>|<div class="cpsessionviewpapertitle">Emotional Speech of Mentally and Physically Disabled Individuals: Introducing the EmotAsS Database and First Findings</div><div class="cpsessionviewpaperauthor">[[Simone Hantke|AUTHOR Simone Hantke]], [[Hesam Sagha|AUTHOR Hesam Sagha]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-6|PAPER Wed-P-7-4-6 — Phonological Markers of Oxytocin and MDMA Ingestion]]</div>|<div class="cpsessionviewpapertitle">Phonological Markers of Oxytocin and MDMA Ingestion</div><div class="cpsessionviewpaperauthor">[[Carla Agurto|AUTHOR Carla Agurto]], [[Raquel Norel|AUTHOR Raquel Norel]], [[Rachel Ostrand|AUTHOR Rachel Ostrand]], [[Gillinder Bedi|AUTHOR Gillinder Bedi]], [[Harriet de Wit|AUTHOR Harriet de Wit]], [[Matthew J. Baggott|AUTHOR Matthew J. Baggott]], [[Matthew G. Kirkpatrick|AUTHOR Matthew G. Kirkpatrick]], [[Margaret Wardle|AUTHOR Margaret Wardle]], [[Guillermo A. Cecchi|AUTHOR Guillermo A. Cecchi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-7|PAPER Wed-P-7-4-7 — An Avatar-Based System for Identifying Individuals Likely to Develop Dementia]]</div>|<div class="cpsessionviewpapertitle">An Avatar-Based System for Identifying Individuals Likely to Develop Dementia</div><div class="cpsessionviewpaperauthor">[[Bahman Mirheidari|AUTHOR Bahman Mirheidari]], [[Daniel Blackburn|AUTHOR Daniel Blackburn]], [[Kirsty Harkness|AUTHOR Kirsty Harkness]], [[Traci Walker|AUTHOR Traci Walker]], [[Annalena Venneri|AUTHOR Annalena Venneri]], [[Markus Reuber|AUTHOR Markus Reuber]], [[Heidi Christensen|AUTHOR Heidi Christensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-8|PAPER Wed-P-7-4-8 — Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation]]</div>|<div class="cpsessionviewpapertitle">Cross-Domain Classification of Drowsiness in Speech: The Case of Alcohol Intoxication and Sleep Deprivation</div><div class="cpsessionviewpaperauthor">[[Yue Zhang|AUTHOR Yue Zhang]], [[Felix Weninger|AUTHOR Felix Weninger]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-9|PAPER Wed-P-7-4-9 — Depression Detection Using Automatic Transcriptions of De-Identified Speech]]</div>|<div class="cpsessionviewpapertitle">Depression Detection Using Automatic Transcriptions of De-Identified Speech</div><div class="cpsessionviewpaperauthor">[[Paula Lopez-Otero|AUTHOR Paula Lopez-Otero]], [[Laura Docio-Fernandez|AUTHOR Laura Docio-Fernandez]], [[Alberto Abad|AUTHOR Alberto Abad]], [[Carmen Garcia-Mateo|AUTHOR Carmen Garcia-Mateo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-10|PAPER Wed-P-7-4-10 — An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language]]</div>|<div class="cpsessionviewpapertitle">An N-Gram Based Approach to the Automatic Diagnosis of Alzheimer’s Disease from Spoken Language</div><div class="cpsessionviewpaperauthor">[[Sebastian Wankerl|AUTHOR Sebastian Wankerl]], [[Elmar Nöth|AUTHOR Elmar Nöth]], [[Stefan Evert|AUTHOR Stefan Evert]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-11|PAPER Wed-P-7-4-11 — Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy]]</div>|<div class="cpsessionviewpapertitle">Exploiting Intra-Annotator Rating Consistency Through Copeland’s Method for Estimation of Ground Truth Labels in Couples’ Therapy</div><div class="cpsessionviewpaperauthor">[[Karel Mundnich|AUTHOR Karel Mundnich]], [[Md. Nasir|AUTHOR Md. Nasir]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-7-4-12|PAPER Wed-P-7-4-12 — Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish]]</div>|<div class="cpsessionviewpapertitle">Rhythmic Characteristics of Parkinsonian Speech: A Study on Mandarin and Polish</div><div class="cpsessionviewpaperauthor">[[Massimo Pettorino|AUTHOR Massimo Pettorino]], [[Wentao Gu|AUTHOR Wentao Gu]], [[Paweł Półrola|AUTHOR Paweł Półrola]], [[Ping Fan|AUTHOR Ping Fan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, Poster 1|<|
|Chair: |Stefanie Jannedy|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-1|PAPER Wed-P-8-1-1 — Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers]]</div>|<div class="cpsessionviewpapertitle">Trisyllabic Tone 3 Sandhi Patterns in Mandarin Produced by Cantonese Speakers</div><div class="cpsessionviewpaperauthor">[[Jung-Yueh Tu|AUTHOR Jung-Yueh Tu]], [[Janice Wing-Sze Wong|AUTHOR Janice Wing-Sze Wong]], [[Jih-Ho Cha|AUTHOR Jih-Ho Cha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-2|PAPER Wed-P-8-1-2 — Intonation of Contrastive Topic in Estonian]]</div>|<div class="cpsessionviewpapertitle">Intonation of Contrastive Topic in Estonian</div><div class="cpsessionviewpaperauthor">[[Heete Sahkai|AUTHOR Heete Sahkai]], [[Meelis Mihkla|AUTHOR Meelis Mihkla]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-3|PAPER Wed-P-8-1-3 — Reanalyze Fundamental Frequency Peak Delay in Mandarin]]</div>|<div class="cpsessionviewpapertitle">Reanalyze Fundamental Frequency Peak Delay in Mandarin</div><div class="cpsessionviewpaperauthor">[[Lixia Hao|AUTHOR Lixia Hao]], [[Wei Zhang|AUTHOR Wei Zhang]], [[Yanlu Xie|AUTHOR Yanlu Xie]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-4|PAPER Wed-P-8-1-4 — How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?]]</div>|<div class="cpsessionviewpapertitle">How Does the Absence of Shared Knowledge Between Interlocutors Affect the Production of French Prosodic Forms?</div><div class="cpsessionviewpaperauthor">[[Amandine Michelas|AUTHOR Amandine Michelas]], [[Cecile Cau|AUTHOR Cecile Cau]], [[Maud Champagne-Lavau|AUTHOR Maud Champagne-Lavau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171500.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-5|PAPER Wed-P-8-1-5 — Three Dimensions of Sentence Prosody and Their (Non-)Interactions]]</div>|<div class="cpsessionviewpapertitle">Three Dimensions of Sentence Prosody and Their (Non-)Interactions</div><div class="cpsessionviewpaperauthor">[[Michael Wagner|AUTHOR Michael Wagner]], [[Michael McAuliffe|AUTHOR Michael McAuliffe]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-6|PAPER Wed-P-8-1-6 — Using Prosody to Classify Discourse Relations]]</div>|<div class="cpsessionviewpapertitle">Using Prosody to Classify Discourse Relations</div><div class="cpsessionviewpaperauthor">[[Janine Kleinhans|AUTHOR Janine Kleinhans]], [[Mireia Farrús|AUTHOR Mireia Farrús]], [[Agustín Gravano|AUTHOR Agustín Gravano]], [[Juan Manuel Pérez|AUTHOR Juan Manuel Pérez]], [[Catherine Lai|AUTHOR Catherine Lai]], [[Leo Wanner|AUTHOR Leo Wanner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-7|PAPER Wed-P-8-1-7 — Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech]]</div>|<div class="cpsessionviewpapertitle">Canonical Correlation Analysis and Prediction of Perceived Rhythmic Prominences and Pitch Tones in Speech</div><div class="cpsessionviewpaperauthor">[[Elizabeth Godoy|AUTHOR Elizabeth Godoy]], [[James R. Williamson|AUTHOR James R. Williamson]], [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-8|PAPER Wed-P-8-1-8 — Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions]]</div>|<div class="cpsessionviewpapertitle">Evaluation of Spectral Tilt Measures for Sentence Prominence Under Different Noise Conditions</div><div class="cpsessionviewpaperauthor">[[Sofoklis Kakouros|AUTHOR Sofoklis Kakouros]], [[Okko Räsänen|AUTHOR Okko Räsänen]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-9|PAPER Wed-P-8-1-9 — Creaky Voice as a Function of Tonal Categories and Prosodic Boundaries]]</div>|<div class="cpsessionviewpapertitle">Creaky Voice as a Function of Tonal Categories and Prosodic Boundaries</div><div class="cpsessionviewpaperauthor">[[Jianjing Kuang|AUTHOR Jianjing Kuang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-10|PAPER Wed-P-8-1-10 — The Acoustics of Word Stress in Czech as a Function of Speaking Style]]</div>|<div class="cpsessionviewpapertitle">The Acoustics of Word Stress in Czech as a Function of Speaking Style</div><div class="cpsessionviewpaperauthor">[[Radek Skarnitzl|AUTHOR Radek Skarnitzl]], [[Anders Eriksson|AUTHOR Anders Eriksson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-11|PAPER Wed-P-8-1-11 — What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction]]</div>|<div class="cpsessionviewpapertitle">What You See is What You Get Prosodically Less — Visibility Shapes Prosodic Prominence Production in Spontaneous Interaction</div><div class="cpsessionviewpaperauthor">[[Petra Wagner|AUTHOR Petra Wagner]], [[Nataliya Bryhadyr|AUTHOR Nataliya Bryhadyr]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-12|PAPER Wed-P-8-1-12 — Focus Acoustics in Mandarin Nominals]]</div>|<div class="cpsessionviewpapertitle">Focus Acoustics in Mandarin Nominals</div><div class="cpsessionviewpaperauthor">[[Yu-Yin Hsu|AUTHOR Yu-Yin Hsu]], [[Anqi Xu|AUTHOR Anqi Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-13|PAPER Wed-P-8-1-13 — Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents]]</div>|<div class="cpsessionviewpapertitle">Exploring Multidimensionality: Acoustic and Articulatory Correlates of Swedish Word Accents</div><div class="cpsessionviewpaperauthor">[[Malin Svensson Lundmark|AUTHOR Malin Svensson Lundmark]], [[Gilbert Ambrazaitis|AUTHOR Gilbert Ambrazaitis]], [[Otto Ewald|AUTHOR Otto Ewald]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171279.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-1-14|PAPER Wed-P-8-1-14 — The Perception of English Intonation Patterns by German L2 Speakers of English]]</div>|<div class="cpsessionviewpapertitle">The Perception of English Intonation Patterns by German L2 Speakers of English</div><div class="cpsessionviewpaperauthor">[[Karin Puga|AUTHOR Karin Puga]], [[Robert Fuchs|AUTHOR Robert Fuchs]], [[Jane Setter|AUTHOR Jane Setter]], [[Peggy Mok|AUTHOR Peggy Mok]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, Poster 2|<|
|Chair: |Emily Provost|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-1|PAPER Wed-P-8-2-1 — The Perception of Emotions in Noisified Nonsense Speech]]</div>|<div class="cpsessionviewpapertitle">The Perception of Emotions in Noisified Nonsense Speech</div><div class="cpsessionviewpaperauthor">[[Emilia Parada-Cabaleiro|AUTHOR Emilia Parada-Cabaleiro]], [[Alice Baird|AUTHOR Alice Baird]], [[Anton Batliner|AUTHOR Anton Batliner]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Simone Hantke|AUTHOR Simone Hantke]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-2|PAPER Wed-P-8-2-2 — Attention Networks for Modeling Behaviors in Addiction Counseling]]</div>|<div class="cpsessionviewpapertitle">Attention Networks for Modeling Behaviors in Addiction Counseling</div><div class="cpsessionviewpaperauthor">[[James Gibson|AUTHOR James Gibson]], [[Doğan Can|AUTHOR Doğan Can]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]], [[David C. Atkins|AUTHOR David C. Atkins]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-3|PAPER Wed-P-8-2-3 — Computational Analysis of Acoustic Descriptors in Psychotic Patients]]</div>|<div class="cpsessionviewpapertitle">Computational Analysis of Acoustic Descriptors in Psychotic Patients</div><div class="cpsessionviewpaperauthor">[[Torsten Wörtwein|AUTHOR Torsten Wörtwein]], [[Tadas Baltrušaitis|AUTHOR Tadas Baltrušaitis]], [[Eugene Laksana|AUTHOR Eugene Laksana]], [[Luciana Pennant|AUTHOR Luciana Pennant]], [[Elizabeth S. Liebson|AUTHOR Elizabeth S. Liebson]], [[Dost Öngür|AUTHOR Dost Öngür]], [[Justin T. Baker|AUTHOR Justin T. Baker]], [[Louis-Philippe Morency|AUTHOR Louis-Philippe Morency]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-4|PAPER Wed-P-8-2-4 — Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Modeling Perceivers Neural-Responses Using Lobe-Dependent Convolutional Neural Network to Improve Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Ya-Tse Wu|AUTHOR Ya-Tse Wu]], [[Hsuan-Yu Chen|AUTHOR Hsuan-Yu Chen]], [[Yu-Hsien Liao|AUTHOR Yu-Hsien Liao]], [[Li-Wei Kuo|AUTHOR Li-Wei Kuo]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-5|PAPER Wed-P-8-2-5 — Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition]]</div>|<div class="cpsessionviewpapertitle">Implementing Gender-Dependent Vowel-Level Analysis for Boosting Speech-Based Depression Recognition</div><div class="cpsessionviewpaperauthor">[[Bogdan Vlasenko|AUTHOR Bogdan Vlasenko]], [[Hesam Sagha|AUTHOR Hesam Sagha]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[Björn Schuller|AUTHOR Björn Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-6|PAPER Wed-P-8-2-6 — Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets]]</div>|<div class="cpsessionviewpapertitle">Bilingual Word Embeddings for Cross-Lingual Personality Recognition Using Convolutional Neural Nets</div><div class="cpsessionviewpaperauthor">[[Farhad Bin Siddique|AUTHOR Farhad Bin Siddique]], [[Pascale Fung|AUTHOR Pascale Fung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-7|PAPER Wed-P-8-2-7 — Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling]]</div>|<div class="cpsessionviewpapertitle">Emotion Category Mapping to Emotional Space by Cross-Corpus Emotion Labeling</div><div class="cpsessionviewpaperauthor">[[Yoshiko Arimoto|AUTHOR Yoshiko Arimoto]], [[Hiroki Mori|AUTHOR Hiroki Mori]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171194.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-8|PAPER Wed-P-8-2-8 — Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus]]</div>|<div class="cpsessionviewpapertitle">Big Five vs. Prosodic Features as Cues to Detect Abnormality in SSPNET-Personality Corpus</div><div class="cpsessionviewpaperauthor">[[Cedric Fayet|AUTHOR Cedric Fayet]], [[Arnaud Delhay|AUTHOR Arnaud Delhay]], [[Damien Lolive|AUTHOR Damien Lolive]], [[Pierre-François Marteau|AUTHOR Pierre-François Marteau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-9|PAPER Wed-P-8-2-9 — Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task]]</div>|<div class="cpsessionviewpapertitle">Speech Rate Comparison When Talking to a System and Talking to a Human: A Study from a Speech-to-Speech, Machine Translation Mediated Map Task</div><div class="cpsessionviewpaperauthor">[[Hayakawa Akira|AUTHOR Hayakawa Akira]], [[Carl Vogel|AUTHOR Carl Vogel]], [[Saturnino Luz|AUTHOR Saturnino Luz]], [[Nick Campbell|AUTHOR Nick Campbell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-10|PAPER Wed-P-8-2-10 — Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings]]</div>|<div class="cpsessionviewpapertitle">Approaching Human Performance in Behavior Estimation in Couples Therapy Using Deep Sentence Embeddings</div><div class="cpsessionviewpaperauthor">[[Shao-Yen Tseng|AUTHOR Shao-Yen Tseng]], [[Brian Baucom|AUTHOR Brian Baucom]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-11|PAPER Wed-P-8-2-11 — Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews]]</div>|<div class="cpsessionviewpapertitle">Complexity in Speech and its Relation to Emotional Bond in Therapist-Patient Interactions During Suicide Risk Assessment Interviews</div><div class="cpsessionviewpaperauthor">[[Md. Nasir|AUTHOR Md. Nasir]], [[Brian Baucom|AUTHOR Brian Baucom]], [[Craig J. Bryan|AUTHOR Craig J. Bryan]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-2-12|PAPER Wed-P-8-2-12 — An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction]]</div>|<div class="cpsessionviewpapertitle">An Investigation of Emotion Dynamics and Kalman Filtering for Speech-Based Emotion Prediction</div><div class="cpsessionviewpaperauthor">[[Zhaocheng Huang|AUTHOR Zhaocheng Huang]], [[Julien Epps|AUTHOR Julien Epps]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, Poster 3|<|
|Chair: |Jose David Lopes, Heriberto Cuayahuitl|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170638.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-1|PAPER Wed-P-8-3-1 — Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Learning for Natural Language Understanding Using Domain-Independent Sequential Structure and Question Types</div><div class="cpsessionviewpaperauthor">[[Kugatsu Sadamitsu|AUTHOR Kugatsu Sadamitsu]], [[Yukinori Homma|AUTHOR Yukinori Homma]], [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]], [[Yoshihiro Matsuo|AUTHOR Yoshihiro Matsuo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170269.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-2|PAPER Wed-P-8-3-2 — Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification]]</div>|<div class="cpsessionviewpapertitle">Parallel Hierarchical Attention Networks with Shared Memory Reader for Multi-Stream Conversational Document Classification</div><div class="cpsessionviewpaperauthor">[[Naoki Sawada|AUTHOR Naoki Sawada]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-3|PAPER Wed-P-8-3-3 — Internal Memory Gate for Recurrent Neural Networks with Application to Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">Internal Memory Gate for Recurrent Neural Networks with Application to Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Mohamed Morchid|AUTHOR Mohamed Morchid]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-4|PAPER Wed-P-8-3-4 — Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions]]</div>|<div class="cpsessionviewpapertitle">Character-Based Embedding Models and Reranking Strategies for Understanding Natural Language Meal Descriptions</div><div class="cpsessionviewpaperauthor">[[Mandy Korpusik|AUTHOR Mandy Korpusik]], [[Zachary Collins|AUTHOR Zachary Collins]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-5|PAPER Wed-P-8-3-5 — Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations]]</div>|<div class="cpsessionviewpapertitle">Quaternion Denoising Encoder-Decoder for Theme Identification of Telephone Conversations</div><div class="cpsessionviewpaperauthor">[[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Mohamed Morchid|AUTHOR Mohamed Morchid]], [[Georges Linarès|AUTHOR Georges Linarès]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-6|PAPER Wed-P-8-3-6 — ASR Error Management for Improving Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">ASR Error Management for Improving Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Edwin Simonnet|AUTHOR Edwin Simonnet]], [[Sahar Ghannay|AUTHOR Sahar Ghannay]], [[Nathalie Camelin|AUTHOR Nathalie Camelin]], [[Yannick Estève|AUTHOR Yannick Estève]], [[Renato De Mori|AUTHOR Renato De Mori]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171321.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-7|PAPER Wed-P-8-3-7 — Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Jointly Trained Sequential Labeling and Classification by Sparse Attention Neural Networks</div><div class="cpsessionviewpaperauthor">[[Mingbo Ma|AUTHOR Mingbo Ma]], [[Kai Zhao|AUTHOR Kai Zhao]], [[Liang Huang|AUTHOR Liang Huang]], [[Bing Xiang|AUTHOR Bing Xiang]], [[Bowen Zhou|AUTHOR Bowen Zhou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-8|PAPER Wed-P-8-3-8 — To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation]]</div>|<div class="cpsessionviewpapertitle">To Plan or not to Plan? Discourse Planning in Slot-Value Informed Sequence to Sequence Models for Language Generation</div><div class="cpsessionviewpaperauthor">[[Neha Nayak|AUTHOR Neha Nayak]], [[Dilek Hakkani-Tür|AUTHOR Dilek Hakkani-Tür]], [[Marilyn Walker|AUTHOR Marilyn Walker]], [[Larry Heck|AUTHOR Larry Heck]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-9|PAPER Wed-P-8-3-9 — Online Adaptation of an Attention-Based Neural Network for Natural Language Generation]]</div>|<div class="cpsessionviewpapertitle">Online Adaptation of an Attention-Based Neural Network for Natural Language Generation</div><div class="cpsessionviewpaperauthor">[[Matthieu Riou|AUTHOR Matthieu Riou]], [[Bassam Jabaian|AUTHOR Bassam Jabaian]], [[Stéphane Huet|AUTHOR Stéphane Huet]], [[Fabrice Lefèvre|AUTHOR Fabrice Lefèvre]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-10|PAPER Wed-P-8-3-10 — Spanish Sign Language Recognition with Different Topology Hidden Markov Models]]</div>|<div class="cpsessionviewpapertitle">Spanish Sign Language Recognition with Different Topology Hidden Markov Models</div><div class="cpsessionviewpaperauthor">[[Carlos-D. Martínez-Hinarejos|AUTHOR Carlos-D. Martínez-Hinarejos]], [[Zuzanna Parcheta|AUTHOR Zuzanna Parcheta]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171382.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-11|PAPER Wed-P-8-3-11 — OpenMM: An Open-Source Multimodal Feature Extraction Tool]]</div>|<div class="cpsessionviewpapertitle">OpenMM: An Open-Source Multimodal Feature Extraction Tool</div><div class="cpsessionviewpaperauthor">[[Michelle Renee Morales|AUTHOR Michelle Renee Morales]], [[Stefan Scherer|AUTHOR Stefan Scherer]], [[Rivka Levitan|AUTHOR Rivka Levitan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-3-12|PAPER Wed-P-8-3-12 — Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition]]</div>|<div class="cpsessionviewpapertitle">Speaker Dependency Analysis, Audiovisual Fusion Cues and a Multimodal BLSTM for Conversational Engagement Recognition</div><div class="cpsessionviewpaperauthor">[[Yuyun Huang|AUTHOR Yuyun Huang]], [[Emer Gilmartin|AUTHOR Emer Gilmartin]], [[Nick Campbell|AUTHOR Nick Campbell]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, Poster 4|<|
|Chair: |Chandra Sekhar Seelamantula|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-1|PAPER Wed-P-8-4-1 — Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks]]</div>|<div class="cpsessionviewpapertitle">Voice Conversion from Unaligned Corpora Using Variational Autoencoding Wasserstein Generative Adversarial Networks</div><div class="cpsessionviewpaperauthor">[[Chin-Cheng Hsu|AUTHOR Chin-Cheng Hsu]], [[Hsin-Te Hwang|AUTHOR Hsin-Te Hwang]], [[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-2|PAPER Wed-P-8-4-2 — CAB: An Energy-Based Speaker Clustering Model for Rapid Adaptation in Non-Parallel Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">CAB: An Energy-Based Speaker Clustering Model for Rapid Adaptation in Non-Parallel Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Toru Nakashika|AUTHOR Toru Nakashika]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-3|PAPER Wed-P-8-4-3 — Phoneme-Discriminative Features for Dysarthric Speech Conversion]]</div>|<div class="cpsessionviewpapertitle">Phoneme-Discriminative Features for Dysarthric Speech Conversion</div><div class="cpsessionviewpaperauthor">[[Ryo Aihara|AUTHOR Ryo Aihara]], [[Tetsuya Takiguchi|AUTHOR Tetsuya Takiguchi]], [[Yasuo Ariki|AUTHOR Yasuo Ariki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-4|PAPER Wed-P-8-4-4 — Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Denoising Recurrent Neural Network for Deep Bidirectional LSTM Based Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Jie Wu|AUTHOR Jie Wu]], [[D.-Y. Huang|AUTHOR D.-Y. Huang]], [[Lei Xie|AUTHOR Lei Xie]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-5|PAPER Wed-P-8-4-5 — Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Speaker Dependent Approach for Enhancing a Glossectomy Patient’s Speech via GMM-Based Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Kei Tanaka|AUTHOR Kei Tanaka]], [[Sunao Hara|AUTHOR Sunao Hara]], [[Masanobu Abe|AUTHOR Masanobu Abe]], [[Masaaki Sato|AUTHOR Masaaki Sato]], [[Shogo Minagi|AUTHOR Shogo Minagi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-6|PAPER Wed-P-8-4-6 — Generative Adversarial Network-Based Postfilter for STFT Spectrograms]]</div>|<div class="cpsessionviewpapertitle">Generative Adversarial Network-Based Postfilter for STFT Spectrograms</div><div class="cpsessionviewpaperauthor">[[Takuhiro Kaneko|AUTHOR Takuhiro Kaneko]], [[Shinji Takaki|AUTHOR Shinji Takaki]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-7|PAPER Wed-P-8-4-7 — Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Generative Adversarial Network-Based Glottal Waveform Model for Statistical Parametric Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Bajibabu Bollepalli|AUTHOR Bajibabu Bollepalli]], [[Lauri Juvela|AUTHOR Lauri Juvela]], [[Paavo Alku|AUTHOR Paavo Alku]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-8|PAPER Wed-P-8-4-8 — Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data]]</div>|<div class="cpsessionviewpapertitle">Emotional Voice Conversion with Adaptive Scales F0 Based on Wavelet Transform Using Limited Amount of Emotional Data</div><div class="cpsessionviewpaperauthor">[[Zhaojie Luo|AUTHOR Zhaojie Luo]], [[Jinhui Chen|AUTHOR Jinhui Chen]], [[Tetsuya Takiguchi|AUTHOR Tetsuya Takiguchi]], [[Yasuo Ariki|AUTHOR Yasuo Ariki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-9|PAPER Wed-P-8-4-9 — Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors]]</div>|<div class="cpsessionviewpapertitle">Speaker Adaptation in DNN-Based Speech Synthesis Using d-Vectors</div><div class="cpsessionviewpaperauthor">[[Rama Doddipatla|AUTHOR Rama Doddipatla]], [[Norbert Braunschweiler|AUTHOR Norbert Braunschweiler]], [[Ranniery Maia|AUTHOR Ranniery Maia]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-10|PAPER Wed-P-8-4-10 — Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Spectro-Temporal Modelling with Time-Frequency LSTM and Structured Output Layer for Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Runnan Li|AUTHOR Runnan Li]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Yishuang Ning|AUTHOR Yishuang Ning]], [[Lifa Sun|AUTHOR Lifa Sun]], [[Helen Meng|AUTHOR Helen Meng]], [[Lianhong Cai|AUTHOR Lianhong Cai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-P-8-4-11|PAPER Wed-P-8-4-11 — Segment Level Voice Conversion with Recurrent Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Segment Level Voice Conversion with Recurrent Neural Networks</div><div class="cpsessionviewpaperauthor">[[Miguel Varela Ramos|AUTHOR Miguel Varela Ramos]], [[Alan W. Black|AUTHOR Alan W. Black]], [[Ramon Fernandez Astudillo|AUTHOR Ramon Fernandez Astudillo]], [[Isabel Trancoso|AUTHOR Isabel Trancoso]], [[Nuno Fonseca|AUTHOR Nuno Fonseca]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, 13:30–15:30, Wednesday, 23 Aug. 2017, E306|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-1|PAPER Wed-S&T-6-A-1 — Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot]]</div>|<div class="cpsessionviewpapertitle">Creating a Voice for MiRo, the World’s First Commercial Biomimetic Robot</div><div class="cpsessionviewpaperauthor">[[Roger K. Moore|AUTHOR Roger K. Moore]], [[Ben Mitchinson|AUTHOR Ben Mitchinson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-2|PAPER Wed-S&T-6-A-2 — A Thematicity-Based Prosody Enrichment Tool for CTS]]</div>|<div class="cpsessionviewpapertitle">A Thematicity-Based Prosody Enrichment Tool for CTS</div><div class="cpsessionviewpaperauthor">[[Mónica Domínguez|AUTHOR Mónica Domínguez]], [[Mireia Farrús|AUTHOR Mireia Farrús]], [[Leo Wanner|AUTHOR Leo Wanner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-3|PAPER Wed-S&T-6-A-3 — WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting]]</div>|<div class="cpsessionviewpapertitle">WebSubDub — Experimental System for Creating High-Quality Alternative Audio Track for TV Broadcasting</div><div class="cpsessionviewpaperauthor">[[Martin Grůber|AUTHOR Martin Grůber]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]], [[Jakub Vít|AUTHOR Jakub Vít]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-4|PAPER Wed-S&T-6-A-4 — Voice Conservation and TTS System for People Facing Total Laryngectomy]]</div>|<div class="cpsessionviewpapertitle">Voice Conservation and TTS System for People Facing Total Laryngectomy</div><div class="cpsessionviewpaperauthor">[[Markéta Jůzová|AUTHOR Markéta Jůzová]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-5|PAPER Wed-S&T-6-A-5 — TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice]]</div>|<div class="cpsessionviewpapertitle">TBT (Toolkit to Build TTS): A High Performance Framework to Build Multiple Language HTS Voice</div><div class="cpsessionviewpaperauthor">[[Atish Shankar Ghone|AUTHOR Atish Shankar Ghone]], [[Rachana Nerpagar|AUTHOR Rachana Nerpagar]], [[Pranaw Kumar|AUTHOR Pranaw Kumar]], [[Arun Baby|AUTHOR Arun Baby]], [[Aswin Shanmugam|AUTHOR Aswin Shanmugam]], [[Sasikumar M.|AUTHOR Sasikumar M.]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-A-6|PAPER Wed-S&T-6-A-6 — SIAK — A Game for Foreign Language Pronunciation Learning]]</div>|<div class="cpsessionviewpapertitle">SIAK — A Game for Foreign Language Pronunciation Learning</div><div class="cpsessionviewpaperauthor">[[Reima Karhila|AUTHOR Reima Karhila]], [[Sari Ylinen|AUTHOR Sari Ylinen]], [[Seppo Enarvi|AUTHOR Seppo Enarvi]], [[Kalle Palomäki|AUTHOR Kalle Palomäki]], [[Aleksander Nikulin|AUTHOR Aleksander Nikulin]], [[Olli Rantula|AUTHOR Olli Rantula]], [[Vertti Viitanen|AUTHOR Vertti Viitanen]], [[Krupakar Dhinakaran|AUTHOR Krupakar Dhinakaran]], [[Anna-Riikka Smolander|AUTHOR Anna-Riikka Smolander]], [[Heini Kallio|AUTHOR Heini Kallio]], [[Katja Junttila|AUTHOR Katja Junttila]], [[Maria Uther|AUTHOR Maria Uther]], [[Perttu Hämäläinen|AUTHOR Perttu Hämäläinen]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, 13:30–15:30, Wednesday, 23 Aug. 2017, E397|<|
| ||
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-B-1|PAPER Wed-S&T-6-B-1 — Integrating the Talkamatic Dialogue Manager with Alexa]]</div>|<div class="cpsessionviewpapertitle">Integrating the Talkamatic Dialogue Manager with Alexa</div><div class="cpsessionviewpaperauthor">[[Staffan Larsson|AUTHOR Staffan Larsson]], [[Alex Berman|AUTHOR Alex Berman]], [[Andreas Krona|AUTHOR Andreas Krona]], [[Fredrik Kronlid|AUTHOR Fredrik Kronlid]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-B-2|PAPER Wed-S&T-6-B-2 — A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator]]</div>|<div class="cpsessionviewpapertitle">A Robust Medical Speech-to-Speech/Speech-to-Sign Phraselator</div><div class="cpsessionviewpaperauthor">[[Farhia Ahmed|AUTHOR Farhia Ahmed]], [[Pierrette Bouillon|AUTHOR Pierrette Bouillon]], [[Chelle Destefano|AUTHOR Chelle Destefano]], [[Johanna Gerlach|AUTHOR Johanna Gerlach]], [[Sonia Halimi|AUTHOR Sonia Halimi]], [[Angela Hooper|AUTHOR Angela Hooper]], [[Manny Rayner|AUTHOR Manny Rayner]], [[Hervé Spechbach|AUTHOR Hervé Spechbach]], [[Irene Strasly|AUTHOR Irene Strasly]], [[Nikos Tsourakis|AUTHOR Nikos Tsourakis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-B-3|PAPER Wed-S&T-6-B-3 — Towards an Autarkic Embedded Cognitive User Interface]]</div>|<div class="cpsessionviewpapertitle">Towards an Autarkic Embedded Cognitive User Interface</div><div class="cpsessionviewpaperauthor">[[Frank Duckhorn|AUTHOR Frank Duckhorn]], [[Markus Huber|AUTHOR Markus Huber]], [[Werner Meyer|AUTHOR Werner Meyer]], [[Oliver Jokisch|AUTHOR Oliver Jokisch]], [[Constanze Tschöpe|AUTHOR Constanze Tschöpe]], [[Matthias Wolff|AUTHOR Matthias Wolff]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-B-4|PAPER Wed-S&T-6-B-4 — Nora the Empathetic Psychologist]]</div>|<div class="cpsessionviewpapertitle">Nora the Empathetic Psychologist</div><div class="cpsessionviewpaperauthor">[[Genta Indra Winata|AUTHOR Genta Indra Winata]], [[Onno Kampman|AUTHOR Onno Kampman]], [[Yang Yang|AUTHOR Yang Yang]], [[Anik Dey|AUTHOR Anik Dey]], [[Pascale Fung|AUTHOR Pascale Fung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS172057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-S&T-6-B-5|PAPER Wed-S&T-6-B-5 — Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study]]</div>|<div class="cpsessionviewpapertitle">Modifying Amazon’s Alexa ASR Grammar and Lexicon — A Case Study</div><div class="cpsessionviewpaperauthor">[[Hassan Alam|AUTHOR Hassan Alam]], [[Aman Kumar|AUTHOR Aman Kumar]], [[Manan Vyas|AUTHOR Manan Vyas]], [[Tina Werner|AUTHOR Tina Werner]], [[Rachmat Hartono|AUTHOR Rachmat Hartono]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, F11|<|
|Chair: |Elika Bergelson, Sho Tsuji|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-1|PAPER Wed-SS-6-11-1 — SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation]]</div>|<div class="cpsessionviewpapertitle"> SLPAnnotator: Tools for Implementing Sign Language Phonetic Annotation</div><div class="cpsessionviewpaperauthor">[[Kathleen Currie Hall|AUTHOR Kathleen Currie Hall]], [[Scott Mackie|AUTHOR Scott Mackie]], [[Michael Fry|AUTHOR Michael Fry]], [[Oksana Tkachman|AUTHOR Oksana Tkachman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-2|PAPER Wed-SS-6-11-2 — The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate]]</div>|<div class="cpsessionviewpapertitle">The LENA System Applied to Swedish: Reliability of the Adult Word Count Estimate</div><div class="cpsessionviewpaperauthor">[[Iris-Corinna Schwarz|AUTHOR Iris-Corinna Schwarz]], [[Noor Botros|AUTHOR Noor Botros]], [[Alekzandra Lord|AUTHOR Alekzandra Lord]], [[Amelie Marcusson|AUTHOR Amelie Marcusson]], [[Henrik Tidelius|AUTHOR Henrik Tidelius]], [[Ellen Marklund|AUTHOR Ellen Marklund]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171409.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-3|PAPER Wed-SS-6-11-3 — What do Babies Hear? Analyses of Child- and Adult-Directed Speech]]</div>|<div class="cpsessionviewpapertitle">What do Babies Hear? Analyses of Child- and Adult-Directed Speech</div><div class="cpsessionviewpaperauthor">[[Marisa Casillas|AUTHOR Marisa Casillas]], [[Andrei Amatuni|AUTHOR Andrei Amatuni]], [[Amanda Seidl|AUTHOR Amanda Seidl]], [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]], [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]], [[Elika Bergelson|AUTHOR Elika Bergelson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-4|PAPER Wed-SS-6-11-4 — A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments]]</div>|<div class="cpsessionviewpapertitle">A New Workflow for Semi-Automatized Annotations: Tests with Long-Form Naturalistic Recordings of Childrens Language Environments</div><div class="cpsessionviewpaperauthor">[[Marisa Casillas|AUTHOR Marisa Casillas]], [[Elika Bergelson|AUTHOR Elika Bergelson]], [[Anne S. Warlaumont|AUTHOR Anne S. Warlaumont]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]], [[Melanie Soderstrom|AUTHOR Melanie Soderstrom]], [[Mark VanDam|AUTHOR Mark VanDam]], [[Han Sloetjes|AUTHOR Han Sloetjes]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-5|PAPER Wed-SS-6-11-5 — Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach]]</div>|<div class="cpsessionviewpapertitle">Top-Down versus Bottom-Up Theories of Phonological Acquisition: A Big Data Approach</div><div class="cpsessionviewpaperauthor">[[Christina Bergmann|AUTHOR Christina Bergmann]], [[Sho Tsuji|AUTHOR Sho Tsuji]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171468.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-11-6|PAPER Wed-SS-6-11-6 — Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB]]</div>|<div class="cpsessionviewpapertitle">Which Acoustic and Phonological Factors Shape Infants’ Vowel Discrimination? Exploiting Natural Variation in InPhonDB</div><div class="cpsessionviewpaperauthor">[[Sho Tsuji|AUTHOR Sho Tsuji]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|10:00–12:00, Wednesday, 23 Aug. 2017, A2|<|
|Chair: |Alexey Karpov, Kristiina Jokinen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-1|PAPER Wed-SS-6-2-1 — Team ELISA System for DARPA LORELEI Speech Evaluation 2016]]</div>|<div class="cpsessionviewpapertitle">Team ELISA System for DARPA LORELEI Speech Evaluation 2016</div><div class="cpsessionviewpaperauthor">[[Pavlos Papadopoulos|AUTHOR Pavlos Papadopoulos]], [[Ruchir Travadi|AUTHOR Ruchir Travadi]], [[Colin Vaz|AUTHOR Colin Vaz]], [[Nikolaos Malandrakis|AUTHOR Nikolaos Malandrakis]], [[Ulf Hermjakob|AUTHOR Ulf Hermjakob]], [[Nima Pourdamghani|AUTHOR Nima Pourdamghani]], [[Michael Pust|AUTHOR Michael Pust]], [[Boliang Zhang|AUTHOR Boliang Zhang]], [[Xiaoman Pan|AUTHOR Xiaoman Pan]], [[Di Lu|AUTHOR Di Lu]], [[Ying Lin|AUTHOR Ying Lin]], [[Ondřej Glembek|AUTHOR Ondřej Glembek]], [[Murali Karthick Baskar|AUTHOR Murali Karthick Baskar]], [[Martin Karafiát|AUTHOR Martin Karafiát]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]], [[Heng Ji|AUTHOR Heng Ji]], [[Jonathan May|AUTHOR Jonathan May]], [[Kevin Knight|AUTHOR Kevin Knight]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171558.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-2|PAPER Wed-SS-6-2-2 — First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region]]</div>|<div class="cpsessionviewpapertitle">First Results in Developing a Medieval Latin Language Charter Dictation System for the East-Central Europe Region</div><div class="cpsessionviewpaperauthor">[[Péter Mihajlik|AUTHOR Péter Mihajlik]], [[Lili Szabó|AUTHOR Lili Szabó]], [[Balázs Tarján|AUTHOR Balázs Tarján]], [[András Balog|AUTHOR András Balog]], [[Krisztina Rábai|AUTHOR Krisztina Rábai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170215.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-3|PAPER Wed-SS-6-2-3 — The Motivation and Development of MPAi, a Māori Pronunciation Aid]]</div>|<div class="cpsessionviewpapertitle">The Motivation and Development of MPAi, a Māori Pronunciation Aid</div><div class="cpsessionviewpaperauthor">[[C.I. Watson|AUTHOR C.I. Watson]], [[P.J. Keegan|AUTHOR P.J. Keegan]], [[M.A. Maclagan|AUTHOR M.A. Maclagan]], [[R. Harlow|AUTHOR R. Harlow]], [[J. King|AUTHOR J. King]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-4|PAPER Wed-SS-6-2-4 — On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling]]</div>|<div class="cpsessionviewpapertitle">On the Linguistic Relevance of Speech Units Learned by Unsupervised Acoustic Modeling</div><div class="cpsessionviewpaperauthor">[[Siyuan Feng|AUTHOR Siyuan Feng]], [[Tan Lee|AUTHOR Tan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-5|PAPER Wed-SS-6-2-5 — Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions]]</div>|<div class="cpsessionviewpapertitle">Deep Auto-Encoder Based Multi-Task Learning Using Probabilistic Transcriptions</div><div class="cpsessionviewpaperauthor">[[Amit Das|AUTHOR Amit Das]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]], [[Karel Veselý|AUTHOR Karel Veselý]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-6-2-6|PAPER Wed-SS-6-2-6 — Areal and Phylogenetic Features for Multilingual Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Areal and Phylogenetic Features for Multilingual Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Alexander Gutkin|AUTHOR Alexander Gutkin]], [[Richard Sproat|AUTHOR Richard Sproat]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, Poster 1|<|
|Chair: |Shyam Agrawal, Oddur Kjartansson|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-1|PAPER Wed-SS-7-1-1 — The ABAIR Initiative: Bringing Spoken Irish into the Digital Space]]</div>|<div class="cpsessionviewpapertitle">The ABAIR Initiative: Bringing Spoken Irish into the Digital Space</div><div class="cpsessionviewpaperauthor">[[Ailbhe Ní Chasaide|AUTHOR Ailbhe Ní Chasaide]], [[Neasa Ní Chiaráin|AUTHOR Neasa Ní Chiaráin]], [[Christoph Wendler|AUTHOR Christoph Wendler]], [[Harald Berthelsen|AUTHOR Harald Berthelsen]], [[Andy Murphy|AUTHOR Andy Murphy]], [[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170880.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-2|PAPER Wed-SS-7-1-2 — Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring]]</div>|<div class="cpsessionviewpapertitle">Very Low Resource Radio Browsing for Agile Developmental and Humanitarian Monitoring</div><div class="cpsessionviewpaperauthor">[[Armin Saeb|AUTHOR Armin Saeb]], [[Raghav Menon|AUTHOR Raghav Menon]], [[Hugh Cameron|AUTHOR Hugh Cameron]], [[William Kibira|AUTHOR William Kibira]], [[John Quinn|AUTHOR John Quinn]], [[Thomas Niesler|AUTHOR Thomas Niesler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170226.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-3|PAPER Wed-SS-7-1-3 — Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results]]</div>|<div class="cpsessionviewpapertitle">Extracting Situation Frames from Non-English Speech: Evaluation Framework and Pilot Results</div><div class="cpsessionviewpaperauthor">[[Nikolaos Malandrakis|AUTHOR Nikolaos Malandrakis]], [[Ondřej Glembek|AUTHOR Ondřej Glembek]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170855.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-4|PAPER Wed-SS-7-1-4 — Eliciting Meaningful Units from Speech]]</div>|<div class="cpsessionviewpapertitle">Eliciting Meaningful Units from Speech</div><div class="cpsessionviewpaperauthor">[[Daniil Kocharov|AUTHOR Daniil Kocharov]], [[Tatiana Kachkovskaia|AUTHOR Tatiana Kachkovskaia]], [[Pavel Skrelin|AUTHOR Pavel Skrelin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171476.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-5|PAPER Wed-SS-7-1-5 — Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Speech Signal to Symbol Transformation for Zero Resource Speech Applications</div><div class="cpsessionviewpaperauthor">[[Saurabhchand Bhati|AUTHOR Saurabhchand Bhati]], [[Shekhar Nayak|AUTHOR Shekhar Nayak]], [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170268.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-6|PAPER Wed-SS-7-1-6 — Machine Assisted Analysis of Vowel Length Contrasts in Wolof]]</div>|<div class="cpsessionviewpapertitle">Machine Assisted Analysis of Vowel Length Contrasts in Wolof</div><div class="cpsessionviewpaperauthor">[[Elodie Gauthier|AUTHOR Elodie Gauthier]], [[Laurent Besacier|AUTHOR Laurent Besacier]], [[Sylvie Voisin|AUTHOR Sylvie Voisin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-7|PAPER Wed-SS-7-1-7 — Leveraging Text Data for Word Segmentation for Underresourced Languages]]</div>|<div class="cpsessionviewpapertitle">Leveraging Text Data for Word Segmentation for Underresourced Languages</div><div class="cpsessionviewpaperauthor">[[Thomas Glarner|AUTHOR Thomas Glarner]], [[Benedikt Boenninghoff|AUTHOR Benedikt Boenninghoff]], [[Oliver Walter|AUTHOR Oliver Walter]], [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-8|PAPER Wed-SS-7-1-8 — Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization]]</div>|<div class="cpsessionviewpapertitle">Improving DNN Bluetooth Narrowband Acoustic Models by Cross-Bandwidth and Cross-Lingual Initialization</div><div class="cpsessionviewpaperauthor">[[Xiaodan Zhuang|AUTHOR Xiaodan Zhuang]], [[Arnab Ghoshal|AUTHOR Arnab Ghoshal]], [[Antti-Veikko Rosti|AUTHOR Antti-Veikko Rosti]], [[Matthias Paulik|AUTHOR Matthias Paulik]], [[Daben Liu|AUTHOR Daben Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-9|PAPER Wed-SS-7-1-9 — Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages]]</div>|<div class="cpsessionviewpapertitle">Joint Estimation of Articulatory Features and Acoustic Models for Low-Resource Languages</div><div class="cpsessionviewpaperauthor">[[Basil Abraham|AUTHOR Basil Abraham]], [[S. Umesh|AUTHOR S. Umesh]], [[Neethu Mariam Joy|AUTHOR Neethu Mariam Joy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-10|PAPER Wed-SS-7-1-10 — Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages]]</div>|<div class="cpsessionviewpapertitle">Transfer Learning and Distillation Techniques to Improve the Acoustic Modeling of Low Resource Languages</div><div class="cpsessionviewpaperauthor">[[Basil Abraham|AUTHOR Basil Abraham]], [[Tejaswi Seeram|AUTHOR Tejaswi Seeram]], [[S. Umesh|AUTHOR S. Umesh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-11|PAPER Wed-SS-7-1-11 — Building an ASR Corpus Using Althingi’s Parliamentary Speeches]]</div>|<div class="cpsessionviewpapertitle">Building an ASR Corpus Using Althingi’s Parliamentary Speeches</div><div class="cpsessionviewpaperauthor">[[Inga Rún Helgadóttir|AUTHOR Inga Rún Helgadóttir]], [[Róbert Kjaran|AUTHOR Róbert Kjaran]], [[Anna Björk Nikulásdóttir|AUTHOR Anna Björk Nikulásdóttir]], [[Jón Guðnason|AUTHOR Jón Guðnason]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-12|PAPER Wed-SS-7-1-12 — Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software]]</div>|<div class="cpsessionviewpapertitle">Implementation of a Radiology Speech Recognition System for Estonian Using Open Source Software</div><div class="cpsessionviewpaperauthor">[[Tanel Alumäe|AUTHOR Tanel Alumäe]], [[Andrus Paats|AUTHOR Andrus Paats]], [[Ivo Fridolin|AUTHOR Ivo Fridolin]], [[Einar Meister|AUTHOR Einar Meister]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-13|PAPER Wed-SS-7-1-13 — Building ASR Corpora Using Eyra]]</div>|<div class="cpsessionviewpapertitle">Building ASR Corpora Using Eyra</div><div class="cpsessionviewpaperauthor">[[Jón Guðnason|AUTHOR Jón Guðnason]], [[Matthías Pétursson|AUTHOR Matthías Pétursson]], [[Róbert Kjaran|AUTHOR Róbert Kjaran]], [[Simon Klüpfel|AUTHOR Simon Klüpfel]], [[Anna Björk Nikulásdóttir|AUTHOR Anna Björk Nikulásdóttir]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-14|PAPER Wed-SS-7-1-14 — Rapid Development of TTS Corpora for Four South African Languages]]</div>|<div class="cpsessionviewpapertitle">Rapid Development of TTS Corpora for Four South African Languages</div><div class="cpsessionviewpaperauthor">[[Daniel van Niekerk|AUTHOR Daniel van Niekerk]], [[Charl van Heerden|AUTHOR Charl van Heerden]], [[Marelie Davel|AUTHOR Marelie Davel]], [[Neil Kleynhans|AUTHOR Neil Kleynhans]], [[Oddur Kjartansson|AUTHOR Oddur Kjartansson]], [[Martin Jansche|AUTHOR Martin Jansche]], [[Linne Ha|AUTHOR Linne Ha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-15|PAPER Wed-SS-7-1-15 — Uniform Multilingual Multi-Speaker Acoustic Model for Statistical Parametric Speech Synthesis of Low-Resourced Languages]]</div>|<div class="cpsessionviewpapertitle">Uniform Multilingual Multi-Speaker Acoustic Model for Statistical Parametric Speech Synthesis of Low-Resourced Languages</div><div class="cpsessionviewpaperauthor">[[Alexander Gutkin|AUTHOR Alexander Gutkin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-1-16|PAPER Wed-SS-7-1-16 — Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili]]</div>|<div class="cpsessionviewpapertitle">Nativization of Foreign Names in TTS for Automatic Reading of World News in Swahili</div><div class="cpsessionviewpaperauthor">[[Joseph Mendelson|AUTHOR Joseph Mendelson]], [[Pilar Oplustil|AUTHOR Pilar Oplustil]], [[Oliver Watts|AUTHOR Oliver Watts]], [[Simon King|AUTHOR Simon King]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Wednesday, 23 Aug. 2017, F11|<|
|Chair: |Alejandrina Cristia, Kristina Nilsson Björkenstam|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-1|PAPER Wed-SS-7-11-1 — Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech]]</div>|<div class="cpsessionviewpapertitle">Multi-Task Learning for Mispronunciation Detection on Singapore Children’s Mandarin Speech</div><div class="cpsessionviewpaperauthor">[[Rong Tong|AUTHOR Rong Tong]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]], [[Bin Ma|AUTHOR Bin Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170937.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-2|PAPER Wed-SS-7-11-2 — Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition]]</div>|<div class="cpsessionviewpapertitle">Relating Unsupervised Word Segmentation to Reported Vocabulary Acquisition</div><div class="cpsessionviewpaperauthor">[[Elin Larsen|AUTHOR Elin Larsen]], [[Alejandrina Cristia|AUTHOR Alejandrina Cristia]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171143.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-3|PAPER Wed-SS-7-11-3 — Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction]]</div>|<div class="cpsessionviewpapertitle">Modelling the Informativeness of Non-Verbal Cues in Parent-Child Interaction</div><div class="cpsessionviewpaperauthor">[[Mats Wirén|AUTHOR Mats Wirén]], [[Kristina N. Björkenstam|AUTHOR Kristina N. Björkenstam]], [[Robert Östling|AUTHOR Robert Östling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-4|PAPER Wed-SS-7-11-4 — Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction]]</div>|<div class="cpsessionviewpapertitle">Computational Simulations of Temporal Vocalization Behavior in Adult-Child Interaction</div><div class="cpsessionviewpaperauthor">[[Ellen Marklund|AUTHOR Ellen Marklund]], [[David Pagmar|AUTHOR David Pagmar]], [[Tove Gerholm|AUTHOR Tove Gerholm]], [[Lisa Gustavsson|AUTHOR Lisa Gustavsson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171634.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-5|PAPER Wed-SS-7-11-5 — Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts]]</div>|<div class="cpsessionviewpapertitle">Approximating Phonotactic Input in Children’s Linguistic Environments from Orthographic Transcripts</div><div class="cpsessionviewpaperauthor">[[Sofia Strömbergsson|AUTHOR Sofia Strömbergsson]], [[Jens Edlund|AUTHOR Jens Edlund]], [[Jana Götze|AUTHOR Jana Götze]], [[Kristina Nilsson Björkenstam|AUTHOR Kristina Nilsson Björkenstam]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-7-11-6|PAPER Wed-SS-7-11-6 — Learning Weakly Supervised Multimodal Phoneme Embeddings]]</div>|<div class="cpsessionviewpapertitle">Learning Weakly Supervised Multimodal Phoneme Embeddings</div><div class="cpsessionviewpaperauthor">[[Rahma Chaabouni|AUTHOR Rahma Chaabouni]], [[Ewan Dunbar|AUTHOR Ewan Dunbar]], [[Neil Zeghidour|AUTHOR Neil Zeghidour]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday, 23 Aug. 2017, F11|<|
|Chair: |Melissa Barkat-Defradas, Benjamin Weiss|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-10|PAPER Wed-SS-8-11-10 — Introduction]]</div>|<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-1|PAPER Wed-SS-8-11-1 — Personalized Quantification of Voice Attractiveness in Multidimensional Merit Space]]</div>|<div class="cpsessionviewpapertitle">Personalized Quantification of Voice Attractiveness in Multidimensional Merit Space</div><div class="cpsessionviewpaperauthor">[[Yasunari Obuchi|AUTHOR Yasunari Obuchi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-2|PAPER Wed-SS-8-11-2 — The Role of Temporal Amplitude Modulations in the Political Arena: Hillary Clinton vs. Donald Trump]]</div>|<div class="cpsessionviewpapertitle">The Role of Temporal Amplitude Modulations in the Political Arena: Hillary Clinton vs. Donald Trump</div><div class="cpsessionviewpaperauthor">[[Hans Rutger Bosker|AUTHOR Hans Rutger Bosker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-3|PAPER Wed-SS-8-11-3 — Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing]]</div>|<div class="cpsessionviewpapertitle">Perceptual Ratings of Voice Likability Collected Through In-Lab Listening Tests vs. Mobile-Based Crowdsourcing</div><div class="cpsessionviewpaperauthor">[[Laura Fernández Gallardo|AUTHOR Laura Fernández Gallardo]], [[Rafael Zequeira Jiménez|AUTHOR Rafael Zequeira Jiménez]], [[Sebastian Möller|AUTHOR Sebastian Möller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-4|PAPER Wed-SS-8-11-4 — Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech]]</div>|<div class="cpsessionviewpapertitle">Attractiveness of French Voices for German Listeners — Results from Native and Non-Native Read Speech</div><div class="cpsessionviewpaperauthor">[[Jürgen Trouvain|AUTHOR Jürgen Trouvain]], [[Frank Zimmerer|AUTHOR Frank Zimmerer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS170833.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-5|PAPER Wed-SS-8-11-5 — Social Attractiveness in Dialogs]]</div>|<div class="cpsessionviewpapertitle">Social Attractiveness in Dialogs</div><div class="cpsessionviewpaperauthor">[[Antje Schweitzer|AUTHOR Antje Schweitzer]], [[Natalie Lewandowski|AUTHOR Natalie Lewandowski]], [[Daniel Duran|AUTHOR Daniel Duran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-6|PAPER Wed-SS-8-11-6 — A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?]]</div>|<div class="cpsessionviewpapertitle">A Gender Bias in the Acoustic-Melodic Features of Charismatic Speech?</div><div class="cpsessionviewpaperauthor">[[Eszter Novák-Tót|AUTHOR Eszter Novák-Tót]], [[Oliver Niebuhr|AUTHOR Oliver Niebuhr]], [[Aoju Chen|AUTHOR Aoju Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-7|PAPER Wed-SS-8-11-7 — Pitch Convergence as an Effect of Perceived Attractiveness and Likability]]</div>|<div class="cpsessionviewpapertitle">Pitch Convergence as an Effect of Perceived Attractiveness and Likability</div><div class="cpsessionviewpaperauthor">[[Jan Michalsky|AUTHOR Jan Michalsky]], [[Heike Schoormann|AUTHOR Heike Schoormann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-8|PAPER Wed-SS-8-11-8 — Does Posh English Sound Attractive?]]</div>|<div class="cpsessionviewpapertitle">Does Posh English Sound Attractive?</div><div class="cpsessionviewpaperauthor">[[Li Jiao|AUTHOR Li Jiao]], [[Chengxia Wang|AUTHOR Chengxia Wang]], [[Cristiane Hsu|AUTHOR Cristiane Hsu]], [[Peter Birkholz|AUTHOR Peter Birkholz]], [[Yi Xu|AUTHOR Yi Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2017/PDF/AUTHOR/IS171697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-9|PAPER Wed-SS-8-11-9 — Large-Scale Speaker Ranking from Crowdsourced Pairwise Listener Ratings]]</div>|<div class="cpsessionviewpapertitle">Large-Scale Speaker Ranking from Crowdsourced Pairwise Listener Ratings</div><div class="cpsessionviewpaperauthor">[[Timo Baumann|AUTHOR Timo Baumann]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Wed-SS-8-11-11|PAPER Wed-SS-8-11-11 — Discussion]]</div>|<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}}
</p></div>
<div class="cpsupportpage">
This HTML index file is based on the [ext[TiddlyWiki|http://www.tiddlywiki.com]] web application.
You can browse the table of contents, author index, and individual paper details, and launch the paper PDF file to a separate window.
</div>
|cpsupportpagetable|k
|cptightlineheight|k
|cptablecelltopbottomspace2|k
|PDF Reader |This publication has been designed for use with Adobe Reader 8 or later to view the PDF files.|
|^Support |If you have problems with this publication, please contact Causal Productions at:<div class="cpmailingaddress">Causal Productions Pty Ltd<br>PO Box<$link to="$:/causal/Causal Productions Configurator Control Panel"> </$link>100<br>Rundle Mall<br>SA 5000<br>Australia</div>|
|Phone |+61 8 8295 8200|
|Fax |+61 8 8295 8299|
|E-mail |[ext[info@causalproductions.com|mailto:info@causalproductions.com]]|
|Web |[ext[http://www.causalproductions.com|http://www.causalproductions.com]]|
\rules except wikilink
<div class="cppublicationname">INTERSPEECH 2017</div><div class="cppublicationdatevenue">August 20--24, 2017 ■ Stockholm, Sweden<span><a href="http://www.interspeech2017.org" target="_blank"><$button style="color:black">Web Site</$button></a></span></div>
|cpborderless|k
|cpwelcomepageconferencetable|k
|cph3|k
|<hr>|<|<|
| <div class="cpwelcomepagespaceaboveiconwithoutconferencename icon_size_on_welcome_page">{{$:/causal/image/IS2017 WELCOME.SVG}}</div> |<div class="cpwelcomepageconferencelinks">[[Conference Information]]<br>[[Session List]]<br>[[Author Index]] </div> |
|<hr>|<|<|
|[[Copyright Statement]] |[[Support]] |
<div class="cpwelcomepagecopyright">
{{$:/causal/publication/Copyright Statement}}
</div>